From 0b6210cd37b68b94252cb798598b12974a20e1c1 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Tue, 21 May 2024 22:56:19 +0200 Subject: Adding upstream version 5.28.2+dfsg1+~cs23.11.12.3. Signed-off-by: Daniel Baumann --- .dockerignore | 8 + .editorconfig | 9 + .github/ISSUE_TEMPLATE/bug-report.md | 34 + .github/ISSUE_TEMPLATE/feature-request.md | 28 + .github/PULL_REQUEST_TEMPLATE.md | 53 + .github/dependabot.yml | 23 + .github/workflows/bench.yml | 43 + .github/workflows/codeql.yml | 78 + .github/workflows/dependency-review.yml | 27 + .github/workflows/fuzz.yml | 39 + .github/workflows/lint.yml | 17 + .github/workflows/nodejs.yml | 45 + .github/workflows/publish-undici-types.yml | 26 + .github/workflows/scorecard.yml | 56 + .gitignore | 81 + .husky/pre-commit | 4 + .nojekyll | 0 .npmignore | 2 + .taprc | 7 + CNAME | 1 + CODE_OF_CONDUCT.md | 6 + CONTRIBUTING.md | 201 + GOVERNANCE.md | 136 + LICENSE | 21 + MAINTAINERS.md | 33 + README.md | 443 ++ SECURITY.md | 2 + benchmarks/benchmark-http2.js | 306 ++ benchmarks/benchmark-https.js | 319 ++ benchmarks/benchmark.js | 300 ++ benchmarks/server-http2.js | 49 + benchmarks/server-https.js | 41 + benchmarks/server.js | 33 + benchmarks/wait.js | 22 + binary-search/.gitignore | 1 + binary-search/.travis.yml | 6 + binary-search/README.md | 46 + binary-search/binary-search.d.ts | 22 + binary-search/index.js | 45 + binary-search/package.json | 28 + binary-search/test.js | 46 + build/Dockerfile | 18 + build/wasm.js | 101 + docs/api/Agent.md | 80 + docs/api/BalancedPool.md | 99 + docs/api/CacheStorage.md | 30 + docs/api/Client.md | 273 ++ docs/api/Connector.md | 115 + docs/api/ContentType.md | 57 + docs/api/Cookies.md | 101 + docs/api/DiagnosticsChannel.md | 204 + docs/api/DispatchInterceptor.md | 60 + docs/api/Dispatcher.md | 887 ++++ docs/api/Errors.md | 47 + docs/api/Fetch.md | 27 + docs/api/MockAgent.md | 540 +++ docs/api/MockClient.md | 77 + docs/api/MockErrors.md | 12 + docs/api/MockPool.md | 547 +++ docs/api/Pool.md | 84 + docs/api/PoolStats.md | 35 + docs/api/ProxyAgent.md | 126 + docs/api/RetryHandler.md | 108 + docs/api/WebSocket.md | 43 + docs/api/api-lifecycle.md | 62 + docs/assets/lifecycle-diagram.png | Bin 0 -> 47090 bytes docs/best-practices/client-certificate.md | 64 + docs/best-practices/mocking-request.md | 136 + docs/best-practices/proxy.md | 127 + docs/best-practices/writing-tests.md | 20 + docsify/sidebar.md | 28 + examples/ca-fingerprint/index.js | 80 + examples/fetch.js | 13 + examples/proxy-agent.js | 25 + examples/proxy/index.js | 49 + examples/proxy/proxy.js | 256 + examples/request.js | 18 + fastify-busboy/.eslintrc.js | 27 + fastify-busboy/.gitattributes | 2 + fastify-busboy/.github/dependabot.yml | 13 + fastify-busboy/.github/workflows/ci.yml | 22 + fastify-busboy/.github/workflows/coverage.yml | 44 + fastify-busboy/.github/workflows/linting.yml | 35 + fastify-busboy/.gitignore | 152 + fastify-busboy/.taprc | 4 + fastify-busboy/CHANGELOG.md | 28 + fastify-busboy/LICENSE | 19 + fastify-busboy/README.md | 271 ++ fastify-busboy/bench/busboy-form-bench-latin1.js | 32 + fastify-busboy/bench/busboy-form-bench-utf8.js | 32 + .../bench/createMultipartBufferForEncodingBench.js | 23 + .../bench/dicer/dicer-bench-multipart-parser.js | 60 + .../dicer/formidable-bench-multipart-parser.js | 71 + .../dicer/multipartser-bench-multipart-parser.js | 57 + .../dicer/multiparty-bench-multipart-parser.js | 78 + .../bench/dicer/parted-bench-multipart-parser.js | 65 + fastify-busboy/bench/dicer/parted-multipart.js | 486 ++ .../bench/fastify-busboy-form-bench-latin1.js | 31 + .../bench/fastify-busboy-form-bench-utf8.js | 31 + fastify-busboy/bench/parse-params.js | 21 + .../_results/Busboy_comparison-busboy-Node_12.json | 10 + .../_results/Busboy_comparison-busboy-Node_16.json | 10 + .../Busboy_comparison-fastify-busboy-Node_16.json | 10 + .../benchmarks/busboy/contestants/busboy.js | 40 + .../busboy/contestants/fastify-busboy.js | 41 + fastify-busboy/benchmarks/busboy/data.js | 34 + fastify-busboy/benchmarks/busboy/executioner.js | 50 + fastify-busboy/benchmarks/busboy/regenerate.cmd | 17 + fastify-busboy/benchmarks/busboy/validator.js | 15 + fastify-busboy/benchmarks/common/commonBuilder.js | 46 + .../benchmarks/common/contestantResolver.js | 26 + fastify-busboy/benchmarks/common/executionUtils.js | 18 + fastify-busboy/benchmarks/common/resultUtils.js | 17 + .../benchmarks/common/resultsCombinator.js | 54 + fastify-busboy/benchmarks/package.json | 21 + fastify-busboy/deps/dicer/LICENSE | 19 + fastify-busboy/deps/dicer/lib/Dicer.js | 207 + fastify-busboy/deps/dicer/lib/HeaderParser.js | 100 + fastify-busboy/deps/dicer/lib/PartStream.js | 13 + fastify-busboy/deps/dicer/lib/dicer.d.ts | 164 + fastify-busboy/deps/streamsearch/sbmh.js | 228 + fastify-busboy/lib/main.d.ts | 196 + fastify-busboy/lib/main.js | 85 + fastify-busboy/lib/types/multipart.js | 306 ++ fastify-busboy/lib/types/urlencoded.js | 190 + fastify-busboy/lib/utils/Decoder.js | 54 + fastify-busboy/lib/utils/basename.js | 14 + fastify-busboy/lib/utils/decodeText.js | 114 + fastify-busboy/lib/utils/getLimit.js | 16 + fastify-busboy/lib/utils/parseParams.js | 196 + fastify-busboy/package.json | 86 + fastify-busboy/test/busboy-constructor.test.js | 75 + fastify-busboy/test/decoder.test.js | 98 + fastify-busboy/test/dicer-constructor.test.js | 22 + fastify-busboy/test/dicer-endfinish.test.js | 96 + fastify-busboy/test/dicer-export.test.js | 24 + fastify-busboy/test/dicer-headerparser.test.js | 192 + fastify-busboy/test/dicer-malformed-header.test.js | 29 + .../test/dicer-multipart-extra-trailer.test.js | 82 + .../test/dicer-multipart-nolisteners.test.js | 44 + fastify-busboy/test/dicer-multipart.test.js | 223 + fastify-busboy/test/fixtures/many-noend/original | 31 + fastify-busboy/test/fixtures/many-noend/part1 | 1 + .../test/fixtures/many-noend/part1.header | 1 + fastify-busboy/test/fixtures/many-noend/part2 | 0 .../test/fixtures/many-noend/part2.header | 1 + fastify-busboy/test/fixtures/many-noend/part3 | 0 .../test/fixtures/many-noend/part3.header | 1 + fastify-busboy/test/fixtures/many-noend/part4 | 0 .../test/fixtures/many-noend/part4.header | 1 + fastify-busboy/test/fixtures/many-noend/part5 | 3 + .../test/fixtures/many-noend/part5.header | 1 + fastify-busboy/test/fixtures/many-noend/part6 | 1 + .../test/fixtures/many-noend/part6.header | 1 + .../test/fixtures/many-noend/part7.header | 2 + .../test/fixtures/many-wrongboundary/original | 32 + .../test/fixtures/many-wrongboundary/preamble | 33 + .../fixtures/many-wrongboundary/preamble.error | 1 + fastify-busboy/test/fixtures/many/original | 32 + fastify-busboy/test/fixtures/many/part1 | 1 + fastify-busboy/test/fixtures/many/part1.header | 1 + fastify-busboy/test/fixtures/many/part2 | 0 fastify-busboy/test/fixtures/many/part2.header | 1 + fastify-busboy/test/fixtures/many/part3 | 0 fastify-busboy/test/fixtures/many/part3.header | 1 + fastify-busboy/test/fixtures/many/part4 | 0 fastify-busboy/test/fixtures/many/part4.header | 1 + fastify-busboy/test/fixtures/many/part5 | 3 + fastify-busboy/test/fixtures/many/part5.header | 1 + fastify-busboy/test/fixtures/many/part6 | 0 fastify-busboy/test/fixtures/many/part6.header | 2 + fastify-busboy/test/fixtures/many/part7 | 1 + fastify-busboy/test/fixtures/many/part7.header | 1 + fastify-busboy/test/fixtures/nested-full/original | 24 + fastify-busboy/test/fixtures/nested-full/part1 | 1 + .../test/fixtures/nested-full/part1.header | 1 + fastify-busboy/test/fixtures/nested-full/part2 | 12 + .../test/fixtures/nested-full/part2.header | 2 + .../test/fixtures/nested-full/preamble.header | 2 + fastify-busboy/test/fixtures/nested/original | 21 + fastify-busboy/test/fixtures/nested/part1 | 1 + fastify-busboy/test/fixtures/nested/part1.header | 1 + fastify-busboy/test/fixtures/nested/part2 | 12 + fastify-busboy/test/fixtures/nested/part2.header | 2 + fastify-busboy/test/get-limit.test.js | 34 + fastify-busboy/test/multipart-stream-pause.test.js | 82 + fastify-busboy/test/parse-params.test.js | 124 + fastify-busboy/test/streamsearch.test.js | 396 ++ fastify-busboy/test/types-multipart.test.js | 678 +++ fastify-busboy/test/types-urlencoded.test.js | 210 + fastify-busboy/test/types/dicer.test-d.ts | 81 + fastify-busboy/test/types/main.test-d.ts | 241 + fastify-busboy/tsconfig.json | 30 + index-fetch.js | 15 + index.d.ts | 3 + index.html | 35 + index.js | 167 + lib/agent.js | 148 + lib/api/abort-signal.js | 54 + lib/api/api-connect.js | 104 + lib/api/api-pipeline.js | 249 + lib/api/api-request.js | 180 + lib/api/api-stream.js | 220 + lib/api/api-upgrade.js | 105 + lib/api/index.js | 7 + lib/api/readable.js | 322 ++ lib/api/util.js | 46 + lib/balanced-pool.js | 190 + lib/cache/cache.js | 838 ++++ lib/cache/cachestorage.js | 144 + lib/cache/symbols.js | 5 + lib/cache/util.js | 49 + lib/client.js | 2283 +++++++++ lib/compat/dispatcher-weakref.js | 48 + lib/cookies/constants.js | 12 + lib/cookies/index.js | 184 + lib/cookies/parse.js | 317 ++ lib/cookies/util.js | 291 ++ lib/core/connect.js | 189 + lib/core/errors.js | 230 + lib/core/request.js | 499 ++ lib/core/symbols.js | 63 + lib/core/util.js | 511 ++ lib/dispatcher-base.js | 192 + lib/dispatcher.js | 19 + lib/fetch/LICENSE | 21 + lib/fetch/body.js | 605 +++ lib/fetch/constants.js | 151 + lib/fetch/dataURL.js | 627 +++ lib/fetch/file.js | 344 ++ lib/fetch/formdata.js | 265 ++ lib/fetch/global.js | 40 + lib/fetch/headers.js | 589 +++ lib/fetch/index.js | 2145 +++++++++ lib/fetch/request.js | 946 ++++ lib/fetch/response.js | 571 +++ lib/fetch/symbols.js | 10 + lib/fetch/util.js | 1071 +++++ lib/fetch/webidl.js | 646 +++ lib/fileapi/encoding.js | 290 ++ lib/fileapi/filereader.js | 344 ++ lib/fileapi/progressevent.js | 78 + lib/fileapi/symbols.js | 10 + lib/fileapi/util.js | 392 ++ lib/global.js | 32 + lib/handler/DecoratorHandler.js | 35 + lib/handler/RedirectHandler.js | 216 + lib/handler/RetryHandler.js | 336 ++ lib/interceptor/redirectInterceptor.js | 21 + lib/llhttp/constants.d.ts | 199 + lib/llhttp/constants.js | 278 ++ lib/llhttp/utils.d.ts | 4 + lib/llhttp/utils.js | 15 + lib/llhttp/wasm_build_env.txt | 32 + lib/mock/mock-agent.js | 171 + lib/mock/mock-client.js | 59 + lib/mock/mock-errors.js | 17 + lib/mock/mock-interceptor.js | 206 + lib/mock/mock-pool.js | 59 + lib/mock/mock-symbols.js | 23 + lib/mock/mock-utils.js | 351 ++ lib/mock/pending-interceptors-formatter.js | 40 + lib/mock/pluralizer.js | 29 + lib/node/fixed-queue.js | 117 + lib/pool-base.js | 194 + lib/pool-stats.js | 34 + lib/pool.js | 94 + lib/proxy-agent.js | 189 + lib/timers.js | 97 + lib/websocket/connection.js | 291 ++ lib/websocket/constants.js | 51 + lib/websocket/events.js | 303 ++ lib/websocket/frame.js | 73 + lib/websocket/receiver.js | 344 ++ lib/websocket/symbols.js | 12 + lib/websocket/util.js | 200 + lib/websocket/websocket.js | 641 +++ llhttp/.dockerignore | 6 + llhttp/.eslintrc.js | 31 + llhttp/.github/workflows/aiohttp.yml | 61 + llhttp/.github/workflows/ci.yaml | 117 + llhttp/.gitignore | 6 + llhttp/.npmrc | 1 + llhttp/CMakeLists.txt | 117 + llhttp/CNAME | 1 + llhttp/CODE_OF_CONDUCT.md | 4 + llhttp/Dockerfile | 13 + llhttp/LICENSE-MIT | 22 + llhttp/Makefile | 93 + llhttp/README.md | 501 ++ llhttp/_config.yml | 1 + llhttp/bench/index.ts | 71 + llhttp/bin/build_wasm.ts | 95 + llhttp/bin/generate.ts | 47 + llhttp/docs/releasing.md | 65 + llhttp/examples/wasm.ts | 248 + llhttp/images/http-loose-none.png | Bin 0 -> 3571702 bytes llhttp/images/http-strict-none.png | Bin 0 -> 4166480 bytes llhttp/libllhttp.pc.in | 10 + llhttp/package-lock.json | 2995 ++++++++++++ llhttp/package.json | 60 + llhttp/src/common.gypi | 46 + llhttp/src/llhttp.gyp | 22 + llhttp/src/llhttp.ts | 7 + llhttp/src/llhttp/c-headers.ts | 106 + llhttp/src/llhttp/constants.ts | 540 +++ llhttp/src/llhttp/http.ts | 1299 ++++++ llhttp/src/llhttp/url.ts | 220 + llhttp/src/llhttp/utils.ts | 27 + llhttp/src/native/api.c | 510 ++ llhttp/src/native/api.h | 355 ++ llhttp/src/native/http.c | 170 + llhttp/test/fixtures/extra.c | 457 ++ llhttp/test/fixtures/index.ts | 116 + llhttp/test/fuzzers/fuzz_parser.c | 45 + llhttp/test/md-test.ts | 269 ++ llhttp/test/request/connection.md | 732 +++ llhttp/test/request/content-length.md | 482 ++ llhttp/test/request/finish.md | 69 + llhttp/test/request/invalid.md | 607 +++ llhttp/test/request/lenient-headers.md | 145 + llhttp/test/request/lenient-version.md | 23 + llhttp/test/request/method.md | 450 ++ llhttp/test/request/pausing.md | 381 ++ llhttp/test/request/pipelining.md | 66 + llhttp/test/request/sample.md | 629 +++ llhttp/test/request/transfer-encoding.md | 1187 +++++ llhttp/test/request/uri.md | 243 + llhttp/test/response/connection.md | 647 +++ llhttp/test/response/content-length.md | 158 + llhttp/test/response/finish.md | 23 + llhttp/test/response/invalid.md | 285 ++ llhttp/test/response/lenient-version.md | 20 + llhttp/test/response/pausing.md | 330 ++ llhttp/test/response/pipelining.md | 60 + llhttp/test/response/sample.md | 653 +++ llhttp/test/response/transfer-encoding.md | 410 ++ llhttp/test/url.md | 261 ++ llhttp/tsconfig.json | 15 + llhttp/tslint.json | 14 + llparse-builder/.gitignore | 3 + llparse-builder/.travis.yml | 4 + llparse-builder/README.md | 32 + llparse-builder/package-lock.json | 1466 ++++++ llparse-builder/package.json | 48 + llparse-builder/src/builder.ts | 147 + llparse-builder/src/code/and.ts | 7 + llparse-builder/src/code/base.ts | 16 + llparse-builder/src/code/creator.ts | 184 + llparse-builder/src/code/field-value.ts | 9 + llparse-builder/src/code/field.ts | 10 + llparse-builder/src/code/index.ts | 15 + llparse-builder/src/code/is-equal.ts | 7 + llparse-builder/src/code/load.ts | 7 + llparse-builder/src/code/match.ts | 7 + llparse-builder/src/code/mul-add.ts | 28 + llparse-builder/src/code/or.ts | 7 + llparse-builder/src/code/span.ts | 5 + llparse-builder/src/code/store.ts | 7 + llparse-builder/src/code/test.ts | 7 + llparse-builder/src/code/update.ts | 7 + llparse-builder/src/code/value.ts | 7 + llparse-builder/src/edge.ts | 54 + llparse-builder/src/loop-checker/index.ts | 205 + llparse-builder/src/loop-checker/lattice.ts | 115 + llparse-builder/src/node/base.ts | 96 + llparse-builder/src/node/consume.ts | 19 + llparse-builder/src/node/error.ts | 24 + llparse-builder/src/node/index.ts | 8 + llparse-builder/src/node/invoke.ts | 39 + llparse-builder/src/node/match.ts | 162 + llparse-builder/src/node/pause.ts | 25 + llparse-builder/src/node/span-end.ts | 19 + llparse-builder/src/node/span-start.ts | 16 + llparse-builder/src/property.ts | 12 + llparse-builder/src/reachability.ts | 31 + llparse-builder/src/span-allocator.ts | 182 + llparse-builder/src/span.ts | 57 + llparse-builder/src/transform/base.ts | 12 + llparse-builder/src/transform/creator.ts | 28 + llparse-builder/src/transform/index.ts | 3 + llparse-builder/src/transform/to-lower-unsafe.ts | 7 + llparse-builder/src/transform/to-lower.ts | 7 + llparse-builder/src/utils.ts | 19 + llparse-builder/test/builder-test.ts | 94 + llparse-builder/test/loop-checker-test.ts | 118 + llparse-builder/test/span-allocator-test.ts | 146 + llparse-builder/tsconfig.json | 15 + llparse-builder/tslint.json | 14 + llparse-frontend/.gitignore | 2 + llparse-frontend/.travis.yml | 6 + llparse-frontend/README.md | 30 + llparse-frontend/package-lock.json | 1516 ++++++ llparse-frontend/package.json | 43 + llparse-frontend/src/code/and.ts | 8 + llparse-frontend/src/code/base.ts | 8 + llparse-frontend/src/code/external.ts | 7 + llparse-frontend/src/code/field-value.ts | 13 + llparse-frontend/src/code/field.ts | 8 + llparse-frontend/src/code/index.ts | 15 + llparse-frontend/src/code/is-equal.ts | 9 + llparse-frontend/src/code/load.ts | 7 + llparse-frontend/src/code/match.ts | 7 + llparse-frontend/src/code/mul-add.ts | 26 + llparse-frontend/src/code/or.ts | 8 + llparse-frontend/src/code/span.ts | 7 + llparse-frontend/src/code/store.ts | 7 + llparse-frontend/src/code/test.ts | 8 + llparse-frontend/src/code/update.ts | 8 + llparse-frontend/src/code/value.ts | 7 + llparse-frontend/src/container/index.ts | 84 + llparse-frontend/src/container/wrap.ts | 15 + llparse-frontend/src/enumerator.ts | 23 + llparse-frontend/src/frontend.ts | 513 ++ llparse-frontend/src/implementation/code.ts | 16 + llparse-frontend/src/implementation/full.ts | 9 + llparse-frontend/src/implementation/index.ts | 4 + llparse-frontend/src/implementation/node.ts | 15 + llparse-frontend/src/implementation/transform.ts | 9 + llparse-frontend/src/namespace/frontend.ts | 5 + llparse-frontend/src/node/base.ts | 46 + llparse-frontend/src/node/consume.ts | 8 + llparse-frontend/src/node/empty.ts | 4 + llparse-frontend/src/node/error.ts | 9 + llparse-frontend/src/node/index.ts | 13 + llparse-frontend/src/node/invoke.ts | 39 + llparse-frontend/src/node/match.ts | 11 + llparse-frontend/src/node/pause.ts | 4 + llparse-frontend/src/node/sequence.ts | 44 + llparse-frontend/src/node/single.ts | 46 + llparse-frontend/src/node/slot.ts | 20 + llparse-frontend/src/node/span-end.ts | 12 + llparse-frontend/src/node/span-start.ts | 12 + llparse-frontend/src/node/table-lookup.ts | 43 + llparse-frontend/src/peephole.ts | 52 + llparse-frontend/src/span-field.ts | 8 + llparse-frontend/src/transform/base.ts | 4 + llparse-frontend/src/transform/id.ts | 7 + llparse-frontend/src/transform/index.ts | 4 + llparse-frontend/src/transform/to-lower-unsafe.ts | 7 + llparse-frontend/src/transform/to-lower.ts | 7 + llparse-frontend/src/trie/empty.ts | 9 + llparse-frontend/src/trie/index.ts | 136 + llparse-frontend/src/trie/node.ts | 2 + llparse-frontend/src/trie/sequence.ts | 9 + llparse-frontend/src/trie/single.ts | 16 + llparse-frontend/src/utils/identifier.ts | 32 + llparse-frontend/src/utils/index.ts | 19 + llparse-frontend/src/wrap.ts | 3 + llparse-frontend/test/container-test.ts | 46 + .../test/fixtures/a-implementation/code/and.ts | 8 + .../test/fixtures/a-implementation/code/base.ts | 6 + .../test/fixtures/a-implementation/code/index.ts | 15 + .../fixtures/a-implementation/code/is-equal.ts | 8 + .../test/fixtures/a-implementation/code/load.ts | 8 + .../test/fixtures/a-implementation/code/match.ts | 8 + .../test/fixtures/a-implementation/code/mul-add.ts | 8 + .../test/fixtures/a-implementation/code/or.ts | 8 + .../test/fixtures/a-implementation/code/span.ts | 8 + .../test/fixtures/a-implementation/code/store.ts | 8 + .../test/fixtures/a-implementation/code/test.ts | 8 + .../test/fixtures/a-implementation/code/update.ts | 8 + .../test/fixtures/a-implementation/code/value.ts | 8 + .../test/fixtures/a-implementation/index.ts | 5 + .../test/fixtures/a-implementation/node/base.ts | 38 + .../test/fixtures/a-implementation/node/consume.ts | 8 + .../test/fixtures/a-implementation/node/empty.ts | 8 + .../test/fixtures/a-implementation/node/error.ts | 10 + .../test/fixtures/a-implementation/node/index.ts | 15 + .../test/fixtures/a-implementation/node/invoke.ts | 8 + .../test/fixtures/a-implementation/node/pause.ts | 8 + .../fixtures/a-implementation/node/sequence.ts | 8 + .../test/fixtures/a-implementation/node/single.ts | 18 + .../fixtures/a-implementation/node/span-end.ts | 8 + .../fixtures/a-implementation/node/span-start.ts | 8 + .../fixtures/a-implementation/node/table-lookup.ts | 8 + .../fixtures/a-implementation/transform/base.ts | 6 + .../test/fixtures/a-implementation/transform/id.ts | 8 + .../fixtures/a-implementation/transform/index.ts | 5 + .../a-implementation/transform/to-lower-unsafe.ts | 8 + .../a-implementation/transform/to-lower.ts | 8 + .../test/fixtures/implementation/code/and.ts | 8 + .../test/fixtures/implementation/code/base.ts | 6 + .../test/fixtures/implementation/code/index.ts | 15 + .../test/fixtures/implementation/code/is-equal.ts | 8 + .../test/fixtures/implementation/code/load.ts | 8 + .../test/fixtures/implementation/code/match.ts | 8 + .../test/fixtures/implementation/code/mul-add.ts | 8 + .../test/fixtures/implementation/code/or.ts | 8 + .../test/fixtures/implementation/code/span.ts | 8 + .../test/fixtures/implementation/code/store.ts | 8 + .../test/fixtures/implementation/code/test.ts | 8 + .../test/fixtures/implementation/code/update.ts | 8 + .../test/fixtures/implementation/code/value.ts | 8 + .../test/fixtures/implementation/index.ts | 5 + .../test/fixtures/implementation/node/base.ts | 39 + .../test/fixtures/implementation/node/consume.ts | 8 + .../test/fixtures/implementation/node/empty.ts | 8 + .../test/fixtures/implementation/node/error.ts | 10 + .../test/fixtures/implementation/node/index.ts | 15 + .../test/fixtures/implementation/node/invoke.ts | 8 + .../test/fixtures/implementation/node/pause.ts | 8 + .../test/fixtures/implementation/node/sequence.ts | 15 + .../test/fixtures/implementation/node/single.ts | 22 + .../test/fixtures/implementation/node/span-end.ts | 8 + .../fixtures/implementation/node/span-start.ts | 8 + .../fixtures/implementation/node/table-lookup.ts | 8 + .../test/fixtures/implementation/transform/base.ts | 6 + .../test/fixtures/implementation/transform/id.ts | 8 + .../fixtures/implementation/transform/index.ts | 5 + .../implementation/transform/to-lower-unsafe.ts | 8 + .../fixtures/implementation/transform/to-lower.ts | 8 + llparse-frontend/test/frontend-test.ts | 187 + llparse-frontend/tsconfig.json | 15 + llparse-frontend/tslint.json | 16 + llparse/.gitignore | 4 + llparse/.travis.yml | 6 + llparse/CNAME | 1 + llparse/CODE_OF_CONDUCT.md | 4 + llparse/LICENSE-MIT | 22 + llparse/README.md | 86 + llparse/_config.yml | 1 + llparse/examples/http/.gitignore | 6 + llparse/examples/http/Makefile | 11 + llparse/examples/http/index.ts | 51 + llparse/examples/http/main.c | 48 + llparse/package-lock.json | 1802 +++++++ llparse/package.json | 49 + llparse/src/api.ts | 47 + llparse/src/compiler/header-builder.ts | 80 + llparse/src/compiler/index.ts | 88 + llparse/src/implementation/c/code/and.ts | 11 + llparse/src/implementation/c/code/base.ts | 12 + llparse/src/implementation/c/code/external.ts | 19 + llparse/src/implementation/c/code/field.ts | 28 + llparse/src/implementation/c/code/index.ts | 27 + llparse/src/implementation/c/code/is-equal.ts | 10 + llparse/src/implementation/c/code/load.ts | 10 + llparse/src/implementation/c/code/mul-add.ts | 67 + llparse/src/implementation/c/code/or.ts | 11 + llparse/src/implementation/c/code/store.ts | 11 + llparse/src/implementation/c/code/test.ts | 11 + llparse/src/implementation/c/code/update.ts | 11 + llparse/src/implementation/c/compilation.ts | 336 ++ llparse/src/implementation/c/constants.ts | 45 + .../src/implementation/c/helpers/match-sequence.ts | 75 + llparse/src/implementation/c/index.ts | 199 + llparse/src/implementation/c/node/base.ts | 77 + llparse/src/implementation/c/node/consume.ts | 48 + llparse/src/implementation/c/node/empty.ts | 16 + llparse/src/implementation/c/node/error.ts | 33 + llparse/src/implementation/c/node/index.ts | 27 + llparse/src/implementation/c/node/invoke.ts | 44 + llparse/src/implementation/c/node/pause.ts | 19 + llparse/src/implementation/c/node/sequence.ts | 55 + llparse/src/implementation/c/node/single.ts | 47 + llparse/src/implementation/c/node/span-end.ts | 56 + llparse/src/implementation/c/node/span-start.ts | 26 + llparse/src/implementation/c/node/table-lookup.ts | 196 + llparse/src/implementation/c/transform/base.ts | 10 + llparse/src/implementation/c/transform/id.ts | 11 + llparse/src/implementation/c/transform/index.ts | 11 + .../implementation/c/transform/to-lower-unsafe.ts | 10 + llparse/src/implementation/c/transform/to-lower.ts | 11 + llparse/test/code-test.ts | 168 + llparse/test/compiler-test.ts | 289 ++ llparse/test/consume-test.ts | 69 + llparse/test/fixtures/extra.c | 84 + llparse/test/fixtures/index.ts | 52 + llparse/test/resumption-test.ts | 55 + llparse/test/span-test.ts | 107 + llparse/test/transform-test.ts | 41 + llparse/tsconfig.json | 15 + llparse/tslint.json | 16 + package.json | 167 + scripts/generate-pem.js | 3 + scripts/generate-undici-types-package-json.js | 28 + scripts/verifyVersion.js | 15 + test/abort-controller.js | 238 + test/abort-event-emitter.js | 259 + test/agent.js | 782 ++++ test/async_hooks.js | 206 + test/autoselectfamily.js | 198 + test/balanced-pool.js | 566 +++ test/ca-fingerprint.js | 126 + test/client-abort.js | 213 + test/client-connect.js | 308 ++ test/client-dispatch.js | 815 ++++ test/client-errors.js | 1285 +++++ test/client-head-reset-override.js | 62 + test/client-idempotent-body.js | 43 + test/client-keep-alive.js | 359 ++ test/client-node-max-header-size.js | 23 + test/client-pipeline.js | 1042 +++++ test/client-pipelining.js | 752 +++ test/client-post.js | 73 + test/client-reconnect.js | 54 + test/client-request.js | 997 ++++ test/client-stream.js | 847 ++++ test/client-timeout.js | 197 + test/client-unref.js | 47 + test/client-upgrade.js | 452 ++ test/client-write-max-listeners.js | 51 + test/client.js | 2096 +++++++++ test/close-and-destroy.js | 344 ++ test/connect-abort.js | 28 + test/connect-errconnect.js | 32 + test/connect-timeout.js | 68 + test/content-length.js | 445 ++ test/cookie/cookies.js | 616 +++ test/cookie/global-headers.js | 70 + test/diagnostics-channel/connect-error.js | 61 + test/diagnostics-channel/error.js | 52 + test/diagnostics-channel/get.js | 141 + test/diagnostics-channel/post-stream.js | 149 + test/diagnostics-channel/post.js | 147 + test/dispatcher.js | 22 + test/errors.js | 81 + test/esm-wrapper.js | 19 + test/fetch/407-statuscode-window-null.js | 20 + test/fetch/abort.js | 82 + test/fetch/abort2.js | 60 + test/fetch/about-uri.js | 21 + test/fetch/blob-uri.js | 100 + test/fetch/bundle.js | 41 + test/fetch/client-error-stack-trace.js | 21 + test/fetch/client-fetch.js | 688 +++ test/fetch/client-node-max-header-size.js | 29 + test/fetch/content-length.js | 29 + test/fetch/cookies.js | 69 + test/fetch/data-uri.js | 214 + test/fetch/encoding.js | 58 + test/fetch/fetch-leak.js | 44 + test/fetch/fetch-timeouts.js | 56 + test/fetch/file.js | 190 + test/fetch/formdata.js | 401 ++ test/fetch/general.js | 30 + test/fetch/headers.js | 743 +++ test/fetch/http2.js | 415 ++ test/fetch/integrity.js | 150 + test/fetch/issue-1447.js | 46 + test/fetch/issue-2009.js | 28 + test/fetch/issue-2021.js | 32 + test/fetch/issue-2171.js | 25 + test/fetch/issue-2242.js | 8 + test/fetch/issue-2318.js | 25 + test/fetch/issue-node-46525.js | 28 + test/fetch/iterators.js | 140 + .../fetch/jsdom-abortcontroller-1910-1464495619.js | 26 + test/fetch/redirect-cross-origin-header.js | 48 + test/fetch/redirect.js | 50 + test/fetch/relative-url.js | 110 + test/fetch/request.js | 514 ++ test/fetch/resource-timing.js | 72 + test/fetch/response-json.js | 113 + test/fetch/response.js | 257 + test/fetch/user-agent.js | 32 + test/fetch/util.js | 281 ++ test/fixed-queue.js | 38 + test/fixtures/ca.pem | 16 + test/fixtures/cert.pem | 18 + test/fixtures/client-ca-crt.pem | 17 + test/fixtures/client-crt-2048.pem | 22 + test/fixtures/client-crt.pem | 17 + test/fixtures/client-key-2048.pem | 27 + test/fixtures/client-key.pem | 27 + test/fixtures/key.pem | 15 + test/fuzzing/client/client-fuzz-body.js | 28 + test/fuzzing/client/client-fuzz-headers.js | 27 + test/fuzzing/client/client-fuzz-options.js | 38 + test/fuzzing/client/index.js | 7 + test/fuzzing/fuzz.js | 66 + test/fuzzing/server/index.js | 6 + test/fuzzing/server/server-fuzz-append-data.js | 7 + test/fuzzing/server/server-fuzz-split-data.js | 17 + test/gc.js | 98 + test/get-head-body.js | 184 + test/headers-as-array.js | 131 + test/headers-crlf.js | 36 + test/http-100.js | 141 + test/http-req-destroy.js | 69 + test/http2-alpn.js | 277 ++ test/http2.js | 1191 +++++ test/https.js | 74 + test/imports/undici-import.ts | 5 + test/inflight-and-close.js | 31 + test/invalid-headers.js | 108 + test/issue-1670.js | 12 + test/issue-1903.js | 78 + test/issue-2065.js | 71 + test/issue-2078.js | 30 + test/issue-2349.js | 53 + test/issue-803.js | 47 + test/issue-810.js | 135 + test/jest/instanceof-error.test.js | 44 + test/jest/interceptor.test.js | 197 + test/jest/issue-1757.test.js | 61 + test/jest/mock-agent.test.js | 46 + test/jest/mock-scope.test.js | 32 + test/jest/test.js | 36 + test/max-headers.js | 41 + test/max-response-size.js | 105 + test/mock-agent.js | 2637 +++++++++++ test/mock-client.js | 446 ++ test/mock-errors.js | 32 + test/mock-interceptor-unused-assertions.js | 219 + test/mock-interceptor.js | 258 + test/mock-pool.js | 369 ++ test/mock-scope.js | 73 + test/mock-utils.js | 160 + test/no-strict-content-length.js | 349 ++ test/node-fetch/LICENSE | 22 + test/node-fetch/headers.js | 282 ++ test/node-fetch/main.js | 1661 +++++++ test/node-fetch/mock.js | 112 + test/node-fetch/request.js | 281 ++ test/node-fetch/response.js | 251 + test/node-fetch/utils/chai-timeout.js | 15 + test/node-fetch/utils/dummy.txt | 1 + test/node-fetch/utils/read-stream.js | 9 + test/node-fetch/utils/server.js | 467 ++ test/parser-issues.js | 114 + test/pipeline-pipelining.js | 108 + test/pool.js | 1101 +++++ test/promises.js | 280 ++ test/proxy-agent.js | 720 +++ test/proxy.js | 132 + test/readable.test.js | 23 + test/redirect-pipeline.js | 50 + test/redirect-relative.js | 22 + test/redirect-request.js | 420 ++ test/redirect-stream.js | 423 ++ test/redirect-upgrade.js | 34 + test/request-crlf.js | 32 + test/request-timeout.js | 820 ++++ test/request-timeout2.js | 48 + test/request.js | 248 + test/retry-handler.js | 622 +++ test/socket-back-pressure.js | 54 + test/socket-timeout.js | 100 + test/stream-compat.js | 75 + test/tls-client-cert.js | 70 + test/tls-session-reuse.js | 185 + test/tls.js | 188 + test/trailers.js | 57 + test/types/agent.test-d.ts | 110 + test/types/api.test-d.ts | 28 + test/types/balanced-pool.test-d.ts | 113 + test/types/cache-storage.test-d.ts | 39 + test/types/client.test-d.ts | 185 + test/types/connector.test-d.ts | 38 + test/types/diagnostics-channel.test-d.ts | 72 + test/types/dispatcher.events.test-d.ts | 45 + test/types/dispatcher.test-d.ts | 123 + test/types/errors.test-d.ts | 115 + test/types/fetch.test-d.ts | 173 + test/types/formdata.test-d.ts | 27 + test/types/global-dispatcher.test-d.ts | 12 + test/types/header.test-d.ts | 16 + test/types/index.test-d.ts | 23 + test/types/interceptor.test-d.ts | 5 + test/types/mock-agent.test-d.ts | 75 + test/types/mock-client.test-d.ts | 43 + test/types/mock-errors.test-d.ts | 19 + test/types/mock-interceptor.test-d.ts | 80 + test/types/mock-pool.test-d.ts | 42 + test/types/pool.test-d.ts | 112 + test/types/proxy-agent.test-d.ts | 43 + test/types/readable.test-d.ts | 34 + test/unix.js | 141 + test/util.js | 123 + test/utils/async-iterators.js | 25 + test/utils/esm-wrapper.mjs | 102 + test/utils/formdata.js | 49 + test/utils/redirecting-servers.js | 265 ++ test/utils/stream.js | 48 + test/validations.js | 63 + test/webidl/converters.js | 202 + test/webidl/helpers.js | 75 + test/webidl/util.js | 106 + test/websocket/close.js | 130 + test/websocket/constructor.js | 48 + test/websocket/custom-headers.js | 30 + test/websocket/diagnostics-channel.js | 71 + test/websocket/events.js | 204 + test/websocket/fragments.js | 40 + test/websocket/frame.js | 24 + test/websocket/opening-handshake.js | 215 + test/websocket/ping-pong.js | 46 + test/websocket/receive.js | 60 + test/websocket/send.js | 216 + test/websocket/websocketinit.js | 45 + test/wpt/runner/runner.mjs | 356 ++ test/wpt/runner/util.mjs | 172 + test/wpt/runner/worker.mjs | 164 + test/wpt/server/routes/network-partition-key.mjs | 111 + test/wpt/server/routes/redirect.mjs | 104 + test/wpt/server/server.mjs | 397 ++ test/wpt/server/websocket.mjs | 46 + test/wpt/start-FileAPI.mjs | 26 + test/wpt/start-cacheStorage.mjs | 26 + test/wpt/start-fetch.mjs | 31 + test/wpt/start-mimesniff.mjs | 31 + test/wpt/start-websockets.mjs | 47 + test/wpt/start-xhr.mjs | 12 + test/wpt/status/FileAPI.status.json | 75 + test/wpt/status/fetch.status.json | 457 ++ test/wpt/status/mimesniff.status.json | 7 + .../service-workers/cache-storage.status.json | 24 + test/wpt/status/websockets.status.json | 115 + test/wpt/status/xhr/formdata.status.json | 1 + test/wpt/tests/.azure-pipelines.yml | 595 +++ test/wpt/tests/.gitattributes | 1 + test/wpt/tests/.gitignore | 52 + test/wpt/tests/.mailmap | 9 + test/wpt/tests/.taskcluster.yml | 82 + test/wpt/tests/CODEOWNERS | 6 + test/wpt/tests/CODE_OF_CONDUCT.md | 138 + test/wpt/tests/CONTRIBUTING.md | 11 + .../FileAPI/Blob-methods-from-detached-frame.html | 59 + .../BlobURL/cross-partition.tentative.https.html | 276 ++ .../tests/FileAPI/BlobURL/support/file_test2.txt | 0 test/wpt/tests/FileAPI/BlobURL/test2-manual.html | 62 + .../progress_event_bubbles_cancelable.html | 33 + .../FileAPI/FileReader/support/file_test1.txt | 0 .../FileAPI/FileReader/test_errors-manual.html | 72 + .../FileReader/test_notreadableerrors-manual.html | 42 + .../FileReader/test_securityerrors-manual.html | 40 + test/wpt/tests/FileAPI/FileReader/workers.html | 27 + test/wpt/tests/FileAPI/FileReaderSync.worker.js | 56 + test/wpt/tests/FileAPI/META.yml | 6 + .../tests/FileAPI/blob/Blob-array-buffer.any.js | 45 + .../FileAPI/blob/Blob-constructor-dom.window.js | 53 + .../FileAPI/blob/Blob-constructor-endings.html | 104 + .../wpt/tests/FileAPI/blob/Blob-constructor.any.js | 468 ++ .../tests/FileAPI/blob/Blob-in-worker.worker.js | 9 + .../tests/FileAPI/blob/Blob-slice-overflow.any.js | 32 + test/wpt/tests/FileAPI/blob/Blob-slice.any.js | 231 + .../tests/FileAPI/blob/Blob-stream-byob-crash.html | 11 + .../FileAPI/blob/Blob-stream-sync-xhr-crash.html | 13 + test/wpt/tests/FileAPI/blob/Blob-stream.any.js | 83 + test/wpt/tests/FileAPI/blob/Blob-text.any.js | 64 + .../FileAPI/file/File-constructor-endings.html | 104 + .../wpt/tests/FileAPI/file/File-constructor.any.js | 155 + .../file/Worker-read-file-constructor.worker.js | 15 + .../FileAPI/file/resources/echo-content-escaped.py | 26 + .../FileAPI/file/send-file-form-controls.html | 113 + .../FileAPI/file/send-file-form-iso-2022-jp.html | 65 + .../FileAPI/file/send-file-form-punctuation.html | 226 + .../tests/FileAPI/file/send-file-form-utf-8.html | 62 + .../FileAPI/file/send-file-form-windows-1252.html | 62 + .../file/send-file-form-x-user-defined.html | 63 + test/wpt/tests/FileAPI/file/send-file-form.html | 25 + .../file/send-file-formdata-controls.any.js | 69 + .../file/send-file-formdata-punctuation.any.js | 144 + .../FileAPI/file/send-file-formdata-utf-8.any.js | 33 + .../tests/FileAPI/file/send-file-formdata.any.js | 8 + test/wpt/tests/FileAPI/fileReader.any.js | 59 + .../tests/FileAPI/filelist-section/filelist.html | 57 + .../filelist_multiple_selected_files-manual.html | 64 + .../filelist_selected_file-manual.html | 64 + .../FileAPI/filelist-section/support/upload.txt | 1 + .../FileAPI/filelist-section/support/upload.zip | Bin 0 -> 220 bytes test/wpt/tests/FileAPI/historical.https.html | 65 + test/wpt/tests/FileAPI/idlharness-manual.html | 45 + test/wpt/tests/FileAPI/idlharness.any.js | 19 + test/wpt/tests/FileAPI/idlharness.html | 37 + test/wpt/tests/FileAPI/idlharness.worker.js | 17 + test/wpt/tests/FileAPI/progress-manual.html | 49 + .../Determining-Encoding.any.js | 81 + .../FileReader-event-handler-attributes.any.js | 17 + .../FileReader-multiple-reads.any.js | 81 + .../reading-data-section/filereader_abort.any.js | 38 + .../reading-data-section/filereader_error.any.js | 19 + .../reading-data-section/filereader_events.any.js | 19 + .../filereader_file-manual.html | 69 + .../filereader_file_img-manual.html | 47 + .../filereader_readAsArrayBuffer.any.js | 23 + .../filereader_readAsBinaryString.any.js | 23 + .../filereader_readAsDataURL.any.js | 54 + .../filereader_readAsText.any.js | 36 + .../filereader_readystate.any.js | 19 + .../reading-data-section/filereader_result.any.js | 82 + .../reading-data-section/support/blue-100x100.png | Bin 0 -> 227 bytes test/wpt/tests/FileAPI/support/Blob.js | 70 + .../support/document-domain-setter.sub.html | 7 + test/wpt/tests/FileAPI/support/empty-document.html | 3 + .../FileAPI/support/historical-serviceworker.js | 5 + test/wpt/tests/FileAPI/support/incumbent.sub.html | 22 + .../tests/FileAPI/support/send-file-form-helper.js | 282 ++ .../FileAPI/support/send-file-formdata-helper.js | 99 + test/wpt/tests/FileAPI/support/upload.txt | 1 + test/wpt/tests/FileAPI/support/url-origin.html | 6 + test/wpt/tests/FileAPI/unicode.html | 46 + .../tests/FileAPI/url/cross-global-revoke.sub.html | 62 + .../url/multi-global-origin-serialization.sub.html | 26 + .../tests/FileAPI/url/resources/create-helper.html | 7 + .../tests/FileAPI/url/resources/create-helper.js | 4 + .../wpt/tests/FileAPI/url/resources/fetch-tests.js | 71 + .../tests/FileAPI/url/resources/revoke-helper.html | 7 + .../tests/FileAPI/url/resources/revoke-helper.js | 9 + test/wpt/tests/FileAPI/url/sandboxed-iframe.html | 32 + test/wpt/tests/FileAPI/url/unicode-origin.sub.html | 23 + test/wpt/tests/FileAPI/url/url-charset.window.js | 34 + test/wpt/tests/FileAPI/url/url-format.any.js | 70 + .../tests/FileAPI/url/url-in-tags-revoke.window.js | 115 + test/wpt/tests/FileAPI/url/url-in-tags.window.js | 48 + test/wpt/tests/FileAPI/url/url-lifetime.html | 56 + test/wpt/tests/FileAPI/url/url-reload.window.js | 36 + test/wpt/tests/FileAPI/url/url-with-fetch.any.js | 72 + test/wpt/tests/FileAPI/url/url-with-xhr.any.js | 68 + .../url/url_createobjecturl_file-manual.html | 45 + .../url/url_createobjecturl_file_img-manual.html | 28 + .../FileAPI/url/url_xmlhttprequest_img-ref.html | 12 + .../tests/FileAPI/url/url_xmlhttprequest_img.html | 27 + test/wpt/tests/LICENSE.md | 11 + test/wpt/tests/README.md | 124 + test/wpt/tests/common/CustomCorsResponse.py | 30 + test/wpt/tests/common/META.yml | 3 + test/wpt/tests/common/PrefixedLocalStorage.js | 116 + .../tests/common/PrefixedLocalStorage.js.headers | 1 + test/wpt/tests/common/PrefixedPostMessage.js | 100 + .../tests/common/PrefixedPostMessage.js.headers | 1 + test/wpt/tests/common/README.md | 10 + test/wpt/tests/common/__init__.py | 0 test/wpt/tests/common/arrays.js | 31 + test/wpt/tests/common/blank-with-cors.html | 0 test/wpt/tests/common/blank-with-cors.html.headers | 1 + test/wpt/tests/common/blank.html | 0 test/wpt/tests/common/custom-cors-response.js | 32 + test/wpt/tests/common/dispatcher/README.md | 228 + test/wpt/tests/common/dispatcher/dispatcher.js | 256 + test/wpt/tests/common/dispatcher/dispatcher.py | 53 + .../common/dispatcher/executor-service-worker.js | 24 + .../wpt/tests/common/dispatcher/executor-worker.js | 12 + test/wpt/tests/common/dispatcher/executor.html | 15 + .../tests/common/dispatcher/remote-executor.html | 12 + test/wpt/tests/common/domain-setter.sub.html | 8 + test/wpt/tests/common/dummy.xhtml | 2 + test/wpt/tests/common/dummy.xml | 1 + test/wpt/tests/common/echo.py | 6 + test/wpt/tests/common/gc.js | 52 + test/wpt/tests/common/get-host-info.sub.js | 63 + test/wpt/tests/common/get-host-info.sub.js.headers | 1 + test/wpt/tests/common/media.js | 61 + test/wpt/tests/common/media.js.headers | 1 + test/wpt/tests/common/object-association.js | 74 + .../wpt/tests/common/object-association.js.headers | 1 + .../wpt/tests/common/performance-timeline-utils.js | 56 + .../common/performance-timeline-utils.js.headers | 1 + test/wpt/tests/common/proxy-all.sub.pac | 3 + test/wpt/tests/common/redirect-opt-in.py | 20 + test/wpt/tests/common/redirect.py | 19 + test/wpt/tests/common/refresh.py | 11 + test/wpt/tests/common/reftest-wait.js | 39 + test/wpt/tests/common/reftest-wait.js.headers | 1 + test/wpt/tests/common/rendering-utils.js | 19 + test/wpt/tests/common/sab.js | 21 + test/wpt/tests/common/security-features/README.md | 460 ++ .../wpt/tests/common/security-features/__init__.py | 0 .../security-features/resources/common.sub.js | 1311 ++++++ .../resources/common.sub.js.headers | 1 + .../common/security-features/scope/__init__.py | 0 .../common/security-features/scope/document.py | 36 + .../scope/template/document.html.template | 30 + .../scope/template/worker.js.template | 29 + .../tests/common/security-features/scope/util.py | 43 + .../tests/common/security-features/scope/worker.py | 44 + .../security-features/subresource/__init__.py | 0 .../common/security-features/subresource/audio.py | 18 + .../security-features/subresource/document.py | 12 + .../common/security-features/subresource/empty.py | 14 + .../common/security-features/subresource/font.py | 76 + .../common/security-features/subresource/image.py | 116 + .../security-features/subresource/referrer.py | 4 + .../common/security-features/subresource/script.py | 14 + .../security-features/subresource/shared-worker.py | 13 + .../security-features/subresource/static-import.py | 61 + .../security-features/subresource/stylesheet.py | 61 + .../security-features/subresource/subresource.py | 199 + .../common/security-features/subresource/svg.py | 37 + .../subresource/template/document.html.template | 16 + .../subresource/template/font.css.template | 9 + .../subresource/template/image.css.template | 3 + .../subresource/template/script.js.template | 3 + .../subresource/template/shared-worker.js.template | 5 + .../subresource/template/static-import.js.template | 1 + .../subresource/template/svg.css.template | 3 + .../subresource/template/svg.embedded.template | 5 + .../subresource/template/worker.js.template | 3 + .../common/security-features/subresource/video.py | 17 + .../common/security-features/subresource/worker.py | 13 + .../common/security-features/subresource/xhr.py | 16 + .../tools/format_spec_src_json.py | 24 + .../common/security-features/tools/generate.py | 462 ++ .../common/security-features/tools/spec.src.json | 533 +++ .../security-features/tools/spec_validator.py | 251 + .../tools/template/disclaimer.template | 1 + .../tools/template/spec_json.js.template | 1 + .../tools/template/test.debug.html.template | 26 + .../tools/template/test.release.html.template | 22 + .../tests/common/security-features/tools/util.py | 228 + test/wpt/tests/common/security-features/types.md | 62 + test/wpt/tests/common/slow-redirect.py | 29 + test/wpt/tests/common/slow.py | 6 + test/wpt/tests/common/square.png | Bin 0 -> 18299 bytes test/wpt/tests/common/stringifiers.js | 57 + test/wpt/tests/common/stringifiers.js.headers | 1 + test/wpt/tests/common/subset-tests-by-key.js | 83 + test/wpt/tests/common/subset-tests.js | 60 + .../common/test-setting-immutable-prototype.js | 67 + .../test-setting-immutable-prototype.js.headers | 1 + test/wpt/tests/common/text-plain.txt | 4 + .../common/third_party/reftest-analyzer.xhtml | 934 ++++ test/wpt/tests/common/utils.js | 98 + test/wpt/tests/common/utils.js.headers | 1 + test/wpt/tests/common/window-name-setter.html | 12 + test/wpt/tests/common/worklet-reftest.js | 50 + test/wpt/tests/common/worklet-reftest.js.headers | 1 + test/wpt/tests/fetch/META.yml | 7 + test/wpt/tests/fetch/README.md | 6 + test/wpt/tests/fetch/api/abort/cache.https.any.js | 47 + .../tests/fetch/api/abort/destroyed-context.html | 27 + test/wpt/tests/fetch/api/abort/general.any.js | 572 +++ test/wpt/tests/fetch/api/abort/keepalive.html | 85 + test/wpt/tests/fetch/api/abort/request.any.js | 85 + .../api/abort/serviceworker-intercepted.https.html | 212 + .../wpt/tests/fetch/api/basic/accept-header.any.js | 34 + .../fetch/api/basic/block-mime-as-script.html | 43 + .../tests/fetch/api/basic/conditional-get.any.js | 38 + .../fetch/api/basic/error-after-response.any.js | 24 + .../fetch/api/basic/header-value-combining.any.js | 15 + .../fetch/api/basic/header-value-null-byte.any.js | 5 + test/wpt/tests/fetch/api/basic/historical.any.js | 17 + .../fetch/api/basic/http-response-code.any.js | 14 + .../wpt/tests/fetch/api/basic/integrity.sub.any.js | 87 + test/wpt/tests/fetch/api/basic/keepalive.any.js | 43 + .../tests/fetch/api/basic/mediasource.window.js | 5 + .../tests/fetch/api/basic/mode-no-cors.sub.any.js | 29 + .../tests/fetch/api/basic/mode-same-origin.any.js | 28 + test/wpt/tests/fetch/api/basic/referrer.any.js | 29 + .../api/basic/request-forbidden-headers.any.js | 100 + test/wpt/tests/fetch/api/basic/request-head.any.js | 6 + .../fetch/api/basic/request-headers-case.any.js | 13 + .../api/basic/request-headers-nonascii.any.js | 29 + .../tests/fetch/api/basic/request-headers.any.js | 82 + .../basic/request-referrer-redirected-worker.html | 17 + .../tests/fetch/api/basic/request-referrer.any.js | 24 + .../tests/fetch/api/basic/request-upload.any.js | 135 + .../tests/fetch/api/basic/request-upload.h2.any.js | 186 + .../fetch/api/basic/response-null-body.any.js | 38 + .../tests/fetch/api/basic/response-url.sub.any.js | 16 + test/wpt/tests/fetch/api/basic/scheme-about.any.js | 26 + .../tests/fetch/api/basic/scheme-blob.sub.any.js | 125 + test/wpt/tests/fetch/api/basic/scheme-data.any.js | 43 + .../tests/fetch/api/basic/scheme-others.sub.any.js | 31 + test/wpt/tests/fetch/api/basic/status.h2.any.js | 17 + .../tests/fetch/api/basic/stream-response.any.js | 40 + .../fetch/api/basic/stream-safe-creation.any.js | 54 + test/wpt/tests/fetch/api/basic/text-utf8.any.js | 74 + test/wpt/tests/fetch/api/body/cloned-any.js | 50 + test/wpt/tests/fetch/api/body/formdata.any.js | 14 + test/wpt/tests/fetch/api/body/mime-type.any.js | 127 + test/wpt/tests/fetch/api/cors/cors-basic.any.js | 43 + .../fetch/api/cors/cors-cookies-redirect.any.js | 49 + test/wpt/tests/fetch/api/cors/cors-cookies.any.js | 56 + .../fetch/api/cors/cors-expose-star.sub.any.js | 41 + .../tests/fetch/api/cors/cors-filtering.sub.any.js | 69 + .../wpt/tests/fetch/api/cors/cors-keepalive.any.js | 118 + .../api/cors/cors-multiple-origins.sub.any.js | 22 + .../tests/fetch/api/cors/cors-no-preflight.any.js | 41 + test/wpt/tests/fetch/api/cors/cors-origin.any.js | 51 + .../fetch/api/cors/cors-preflight-cache.any.js | 46 + .../cors/cors-preflight-not-cors-safelisted.any.js | 19 + .../fetch/api/cors/cors-preflight-redirect.any.js | 37 + .../fetch/api/cors/cors-preflight-referrer.any.js | 51 + .../cors/cors-preflight-response-validation.any.js | 33 + .../fetch/api/cors/cors-preflight-star.any.js | 86 + .../fetch/api/cors/cors-preflight-status.any.js | 37 + .../wpt/tests/fetch/api/cors/cors-preflight.any.js | 62 + .../api/cors/cors-redirect-credentials.any.js | 52 + .../fetch/api/cors/cors-redirect-preflight.any.js | 46 + test/wpt/tests/fetch/api/cors/cors-redirect.any.js | 42 + test/wpt/tests/fetch/api/cors/data-url-iframe.html | 58 + .../fetch/api/cors/data-url-shared-worker.html | 53 + test/wpt/tests/fetch/api/cors/data-url-worker.html | 50 + .../fetch/api/cors/resources/corspreflight.js | 58 + .../api/cors/resources/not-cors-safelisted.json | 13 + .../wpt/tests/fetch/api/cors/sandboxed-iframe.html | 14 + .../fetch/api/crashtests/body-window-destroy.html | 11 + test/wpt/tests/fetch/api/crashtests/request.html | 8 + .../api/credentials/authentication-basic.any.js | 17 + .../credentials/authentication-redirection.any.js | 29 + .../wpt/tests/fetch/api/credentials/cookies.any.js | 49 + .../fetch/api/headers/header-setcookie.any.js | 266 ++ .../api/headers/header-values-normalize.any.js | 72 + .../tests/fetch/api/headers/header-values.any.js | 63 + .../tests/fetch/api/headers/headers-basic.any.js | 275 ++ .../tests/fetch/api/headers/headers-casing.any.js | 54 + .../tests/fetch/api/headers/headers-combine.any.js | 66 + .../tests/fetch/api/headers/headers-errors.any.js | 96 + .../tests/fetch/api/headers/headers-no-cors.any.js | 59 + .../fetch/api/headers/headers-normalize.any.js | 56 + .../tests/fetch/api/headers/headers-record.any.js | 357 ++ .../fetch/api/headers/headers-structure.any.js | 20 + test/wpt/tests/fetch/api/idlharness.any.js | 21 + .../fetch/api/policies/csp-blocked-worker.html | 16 + test/wpt/tests/fetch/api/policies/csp-blocked.html | 15 + .../fetch/api/policies/csp-blocked.html.headers | 1 + test/wpt/tests/fetch/api/policies/csp-blocked.js | 13 + .../fetch/api/policies/csp-blocked.js.headers | 1 + test/wpt/tests/fetch/api/policies/nested-policy.js | 1 + .../fetch/api/policies/nested-policy.js.headers | 1 + .../referrer-no-referrer-service-worker.https.html | 18 + .../api/policies/referrer-no-referrer-worker.html | 17 + .../fetch/api/policies/referrer-no-referrer.html | 15 + .../api/policies/referrer-no-referrer.html.headers | 1 + .../fetch/api/policies/referrer-no-referrer.js | 19 + .../api/policies/referrer-no-referrer.js.headers | 1 + .../referrer-origin-service-worker.https.html | 18 + ...gin-when-cross-origin-service-worker.https.html | 17 + .../referrer-origin-when-cross-origin-worker.html | 16 + .../referrer-origin-when-cross-origin.html | 16 + .../referrer-origin-when-cross-origin.html.headers | 1 + .../policies/referrer-origin-when-cross-origin.js | 21 + .../referrer-origin-when-cross-origin.js.headers | 1 + .../fetch/api/policies/referrer-origin-worker.html | 17 + .../tests/fetch/api/policies/referrer-origin.html | 16 + .../api/policies/referrer-origin.html.headers | 1 + .../tests/fetch/api/policies/referrer-origin.js | 30 + .../fetch/api/policies/referrer-origin.js.headers | 1 + .../referrer-unsafe-url-service-worker.https.html | 18 + .../api/policies/referrer-unsafe-url-worker.html | 17 + .../fetch/api/policies/referrer-unsafe-url.html | 16 + .../api/policies/referrer-unsafe-url.html.headers | 1 + .../fetch/api/policies/referrer-unsafe-url.js | 21 + .../api/policies/referrer-unsafe-url.js.headers | 1 + .../redirect-back-to-original-origin.any.js | 38 + .../tests/fetch/api/redirect/redirect-count.any.js | 51 + .../api/redirect/redirect-empty-location.any.js | 21 + .../fetch/api/redirect/redirect-keepalive.any.js | 94 + .../redirect-location-escape.tentative.any.js | 46 + .../fetch/api/redirect/redirect-location.any.js | 73 + .../fetch/api/redirect/redirect-method.any.js | 112 + .../tests/fetch/api/redirect/redirect-mode.any.js | 59 + .../fetch/api/redirect/redirect-origin.any.js | 68 + .../api/redirect/redirect-referrer-override.any.js | 104 + .../fetch/api/redirect/redirect-referrer.any.js | 66 + .../fetch/api/redirect/redirect-schemes.any.js | 19 + .../fetch/api/redirect/redirect-to-dataurl.any.js | 28 + .../fetch/api/redirect/redirect-upload.h2.any.js | 33 + .../destination/fetch-destination-frame.https.html | 51 + .../fetch-destination-iframe.https.html | 51 + .../fetch-destination-no-load-event.https.html | 124 + .../fetch-destination-prefetch.https.html | 46 + .../fetch-destination-worker.https.html | 60 + .../destination/fetch-destination.https.html | 435 ++ .../fetch/api/request/destination/resources/dummy | 0 .../api/request/destination/resources/dummy.es | 0 .../request/destination/resources/dummy.es.headers | 1 + .../api/request/destination/resources/dummy.html | 0 .../api/request/destination/resources/dummy.png | Bin 0 -> 18299 bytes .../api/request/destination/resources/dummy.ttf | Bin 0 -> 2528 bytes .../request/destination/resources/dummy_audio.mp3 | Bin 0 -> 20498 bytes .../request/destination/resources/dummy_audio.oga | Bin 0 -> 18541 bytes .../request/destination/resources/dummy_video.mp4 | Bin 0 -> 67369 bytes .../request/destination/resources/dummy_video.ogv | Bin 0 -> 94372 bytes .../request/destination/resources/dummy_video.webm | Bin 0 -> 96902 bytes .../request/destination/resources/empty.https.html | 0 .../resources/fetch-destination-worker-frame.js | 20 + .../resources/fetch-destination-worker-iframe.js | 20 + .../fetch-destination-worker-no-load-event.js | 20 + .../resources/fetch-destination-worker.js | 12 + .../api/request/destination/resources/importer.js | 1 + .../fetch/api/request/forbidden-method.any.js | 13 + .../construct-in-detached-frame.window.js | 11 + .../api/request/multi-globals/current/current.html | 3 + .../request/multi-globals/incumbent/incumbent.html | 14 + .../api/request/multi-globals/url-parsing.html | 27 + .../fetch/api/request/request-bad-port.any.js | 92 + .../request-cache-default-conditional.any.js | 170 + .../fetch/api/request/request-cache-default.any.js | 39 + .../api/request/request-cache-force-cache.any.js | 67 + .../api/request/request-cache-no-cache.any.js | 25 + .../api/request/request-cache-no-store.any.js | 37 + .../request/request-cache-only-if-cached.any.js | 66 + .../fetch/api/request/request-cache-reload.any.js | 51 + test/wpt/tests/fetch/api/request/request-cache.js | 223 + .../tests/fetch/api/request/request-clone.sub.html | 63 + .../fetch/api/request/request-consume-empty.any.js | 101 + .../tests/fetch/api/request/request-consume.any.js | 145 + .../fetch/api/request/request-disturbed.any.js | 109 + .../tests/fetch/api/request/request-error.any.js | 56 + test/wpt/tests/fetch/api/request/request-error.js | 57 + .../tests/fetch/api/request/request-headers.any.js | 178 + .../fetch/api/request/request-init-001.sub.html | 112 + .../fetch/api/request/request-init-002.any.js | 60 + .../fetch/api/request/request-init-003.sub.html | 84 + .../api/request/request-init-contenttype.any.js | 141 + .../fetch/api/request/request-init-priority.any.js | 26 + .../fetch/api/request/request-init-stream.any.js | 147 + .../fetch/api/request/request-keepalive-quota.html | 97 + .../fetch/api/request/request-keepalive.any.js | 17 + .../request/request-reset-attributes.https.html | 96 + .../fetch/api/request/request-structure.any.js | 143 + .../wpt/tests/fetch/api/request/resources/cache.py | 67 + .../tests/fetch/api/request/resources/hello.txt | 1 + .../resources/request-reset-attributes-worker.js | 19 + test/wpt/tests/fetch/api/request/url-encoding.html | 25 + .../tests/fetch/api/resources/authentication.py | 14 + .../fetch/api/resources/bad-chunk-encoding.py | 13 + test/wpt/tests/fetch/api/resources/basic.html | 5 + test/wpt/tests/fetch/api/resources/cache.py | 18 + test/wpt/tests/fetch/api/resources/clean-stash.py | 6 + test/wpt/tests/fetch/api/resources/cors-top.txt | 1 + .../tests/fetch/api/resources/cors-top.txt.headers | 1 + test/wpt/tests/fetch/api/resources/data.json | 1 + .../api/resources/dump-authorization-header.py | 14 + .../tests/fetch/api/resources/echo-content.h2.py | 7 + test/wpt/tests/fetch/api/resources/echo-content.py | 12 + test/wpt/tests/fetch/api/resources/empty.txt | 0 .../fetch/api/resources/infinite-slow-response.py | 35 + .../tests/fetch/api/resources/inspect-headers.py | 24 + .../tests/fetch/api/resources/keepalive-helper.js | 99 + .../fetch/api/resources/keepalive-iframe.html | 21 + .../api/resources/keepalive-redirect-iframe.html | 23 + .../api/resources/keepalive-redirect-window.html | 42 + test/wpt/tests/fetch/api/resources/method.py | 18 + test/wpt/tests/fetch/api/resources/preflight.py | 78 + .../fetch/api/resources/redirect-empty-location.py | 3 + test/wpt/tests/fetch/api/resources/redirect.h2.py | 14 + test/wpt/tests/fetch/api/resources/redirect.py | 73 + .../fetch/api/resources/sandboxed-iframe.html | 34 + .../fetch/api/resources/script-with-header.py | 7 + test/wpt/tests/fetch/api/resources/stash-put.py | 19 + test/wpt/tests/fetch/api/resources/stash-take.py | 9 + test/wpt/tests/fetch/api/resources/status.py | 11 + .../fetch/api/resources/sw-intercept-abort.js | 19 + test/wpt/tests/fetch/api/resources/sw-intercept.js | 10 + test/wpt/tests/fetch/api/resources/top.txt | 1 + test/wpt/tests/fetch/api/resources/trickle.py | 15 + test/wpt/tests/fetch/api/resources/utils.js | 105 + test/wpt/tests/fetch/api/response/json.any.js | 14 + .../api/response/many-empty-chunks-crash.html | 14 + .../response/multi-globals/current/current.html | 3 + .../multi-globals/incumbent/incumbent.html | 16 + .../response/multi-globals/relevant/relevant.html | 2 + .../api/response/multi-globals/url-parsing.html | 27 + .../response/response-body-read-task-handling.html | 86 + .../api/response/response-cancel-stream.any.js | 64 + .../api/response/response-clone-iframe.window.js | 32 + .../tests/fetch/api/response/response-clone.any.js | 140 + .../api/response/response-consume-empty.any.js | 99 + .../api/response/response-consume-stream.any.js | 61 + .../tests/fetch/api/response/response-consume.html | 317 ++ .../api/response/response-error-from-stream.any.js | 59 + .../tests/fetch/api/response/response-error.any.js | 27 + .../fetch/api/response/response-from-stream.any.js | 23 + .../fetch/api/response/response-init-001.any.js | 64 + .../fetch/api/response/response-init-002.any.js | 61 + .../api/response/response-init-contenttype.any.js | 125 + .../api/response/response-static-error.any.js | 34 + .../fetch/api/response/response-static-json.any.js | 96 + .../api/response/response-static-redirect.any.js | 40 + .../api/response/response-stream-bad-chunk.any.js | 24 + .../response/response-stream-disturbed-1.any.js | 44 + .../response/response-stream-disturbed-2.any.js | 35 + .../response/response-stream-disturbed-3.any.js | 36 + .../response/response-stream-disturbed-4.any.js | 35 + .../response/response-stream-disturbed-5.any.js | 19 + .../response/response-stream-disturbed-6.any.js | 76 + .../response-stream-disturbed-by-pipe.any.js | 17 + .../api/response/response-stream-disturbed-util.js | 17 + .../response-stream-with-broken-then.any.js | 117 + .../connection-pool/network-partition-key.html | 264 ++ .../network-partition-about-blank-checker.html | 35 + .../resources/network-partition-checker.html | 30 + .../network-partition-iframe-checker.html | 22 + .../resources/network-partition-key.js | 47 + .../resources/network-partition-key.py | 130 + .../network-partition-worker-checker.html | 24 + .../resources/network-partition-worker.js | 15 + .../fetch/content-encoding/bad-gzip-body.any.js | 22 + .../tests/fetch/content-encoding/gzip-body.any.js | 16 + .../content-encoding/resources/bad-gzip-body.py | 3 + .../content-encoding/resources/foo.octetstream.gz | Bin 0 -> 64 bytes .../resources/foo.octetstream.gz.headers | 2 + .../fetch/content-encoding/resources/foo.text.gz | Bin 0 -> 57 bytes .../content-encoding/resources/foo.text.gz.headers | 2 + .../api-and-duplicate-headers.any.js | 23 + .../tests/fetch/content-length/content-length.html | 14 + .../content-length/content-length.html.headers | 1 + .../tests/fetch/content-length/parsing.window.js | 18 + .../content-length/resources/content-length.py | 10 + .../content-length/resources/content-lengths.json | 142 + .../resources/identical-duplicates.asis | 9 + .../tests/fetch/content-length/too-long.window.js | 4 + test/wpt/tests/fetch/content-type/README.md | 20 + .../fetch/content-type/multipart-malformed.any.js | 22 + .../tests/fetch/content-type/multipart.window.js | 33 + .../fetch/content-type/resources/content-type.py | 18 + .../content-type/resources/content-types.json | 122 + .../resources/script-content-types.json | 92 + .../tests/fetch/content-type/response.window.js | 72 + test/wpt/tests/fetch/content-type/script.window.js | 48 + test/wpt/tests/fetch/corb/README.md | 67 + .../corb/img-html-correctly-labeled.sub-ref.html | 4 + .../fetch/corb/img-html-correctly-labeled.sub.html | 11 + .../img-mime-types-coverage.tentative.sub.html | 85 + ...slabeled-as-html-nosniff.tentative.sub-ref.html | 4 + ...g-mislabeled-as-html-nosniff.tentative.sub.html | 11 + .../corb/img-png-mislabeled-as-html.sub-ref.html | 4 + .../fetch/corb/img-png-mislabeled-as-html.sub.html | 10 + .../img-svg-doctype-html-mimetype-empty.sub.html | 7 + .../img-svg-doctype-html-mimetype-svg.sub.html | 11 + .../tests/fetch/corb/img-svg-invalid.sub-ref.html | 5 + .../fetch/corb/img-svg-labeled-as-dash.sub.html | 6 + .../fetch/corb/img-svg-labeled-as-svg-xml.sub.html | 6 + .../wpt/tests/fetch/corb/img-svg-xml-decl.sub.html | 6 + test/wpt/tests/fetch/corb/img-svg.sub-ref.html | 5 + ...g-mislabeled-as-html-nosniff.tentative.sub.html | 24 + .../resources/css-mislabeled-as-html-nosniff.css | 1 + .../css-mislabeled-as-html-nosniff.css.headers | 2 + .../corb/resources/css-mislabeled-as-html.css | 1 + .../resources/css-mislabeled-as-html.css.headers | 1 + .../resources/css-with-json-parser-breaker.css | 3 + .../fetch/corb/resources/empty-labeled-as-png.png | 0 .../resources/empty-labeled-as-png.png.headers | 1 + .../corb/resources/html-correctly-labeled.html | 10 + .../resources/html-correctly-labeled.html.headers | 1 + .../tests/fetch/corb/resources/html-js-polyglot.js | 9 + .../corb/resources/html-js-polyglot.js.headers | 1 + .../fetch/corb/resources/html-js-polyglot2.js | 10 + .../corb/resources/html-js-polyglot2.js.headers | 1 + .../resources/js-mislabeled-as-html-nosniff.js | 1 + .../js-mislabeled-as-html-nosniff.js.headers | 2 + .../fetch/corb/resources/js-mislabeled-as-html.js | 1 + .../resources/js-mislabeled-as-html.js.headers | 1 + .../fetch/corb/resources/png-correctly-labeled.png | Bin 0 -> 1010 bytes .../resources/png-correctly-labeled.png.headers | 1 + .../resources/png-mislabeled-as-html-nosniff.png | Bin 0 -> 1010 bytes .../png-mislabeled-as-html-nosniff.png.headers | 2 + .../corb/resources/png-mislabeled-as-html.png | Bin 0 -> 1010 bytes .../resources/png-mislabeled-as-html.png.headers | 1 + .../fetch/corb/resources/response_block_probe.js | 1 + .../corb/resources/response_block_probe.js.headers | 1 + .../fetch/corb/resources/sniffable-resource.py | 11 + ...t-posts-html-containing-blob-url-to-parent.html | 16 + .../resources/svg-doctype-html-mimetype-empty.svg | 4 + .../svg-doctype-html-mimetype-empty.svg.headers | 1 + .../resources/svg-doctype-html-mimetype-svg.svg | 4 + .../svg-doctype-html-mimetype-svg.svg.headers | 1 + .../fetch/corb/resources/svg-labeled-as-dash.svg | 3 + .../corb/resources/svg-labeled-as-dash.svg.headers | 1 + .../corb/resources/svg-labeled-as-svg-xml.svg | 3 + .../resources/svg-labeled-as-svg-xml.svg.headers | 1 + .../tests/fetch/corb/resources/svg-xml-decl.svg | 4 + test/wpt/tests/fetch/corb/resources/svg.svg | 3 + .../wpt/tests/fetch/corb/resources/svg.svg.headers | 1 + .../fetch/corb/response_block.tentative.https.html | 50 + ...cript-html-correctly-labeled.tentative.sub.html | 32 + .../fetch/corb/script-html-js-polyglot.sub.html | 32 + .../script-html-via-cross-origin-blob-url.sub.html | 38 + .../script-js-mislabeled-as-html-nosniff.sub.html | 33 + .../corb/script-js-mislabeled-as-html.sub.html | 25 + ...rce-with-json-parser-breaker.tentative.sub.html | 85 + ...urce-with-nonsniffable-types.tentative.sub.html | 84 + .../style-css-mislabeled-as-html-nosniff.sub.html | 42 + .../corb/style-css-mislabeled-as-html.sub.html | 36 + .../style-css-with-json-parser-breaker.sub.html | 38 + .../corb/style-html-correctly-labeled.sub.html | 41 + .../fetch-in-iframe.html | 67 + .../cross-origin-resource-policy/fetch.any.js | 76 + .../fetch.https.any.js | 56 + .../cross-origin-resource-policy/iframe-loads.html | 46 + .../cross-origin-resource-policy/image-loads.html | 54 + .../resources/green.png | Bin 0 -> 87 bytes .../resources/hello.py | 6 + .../resources/iframe.py | 5 + .../resources/iframeFetch.html | 19 + .../resources/image.py | 22 + .../resources/redirect.py | 6 + .../resources/script.py | 6 + .../scheme-restriction.any.js | 7 + .../scheme-restriction.https.window.js | 13 + .../cross-origin-resource-policy/script-loads.html | 52 + .../cross-origin-resource-policy/syntax.any.js | 19 + test/wpt/tests/fetch/data-urls/README.md | 11 + test/wpt/tests/fetch/data-urls/base64.any.js | 18 + test/wpt/tests/fetch/data-urls/navigate.window.js | 75 + test/wpt/tests/fetch/data-urls/processing.any.js | 22 + .../tests/fetch/data-urls/resources/base64.json | 82 + .../tests/fetch/data-urls/resources/data-urls.json | 214 + test/wpt/tests/fetch/fetch-later/META.yml | 3 + test/wpt/tests/fetch/fetch-later/README.md | 3 + .../fetch-later/basic.tentative.https.window.js | 13 + .../tests/fetch/fetch-later/non-secure.window.js | 8 + .../sendondiscard.tentative.https.window.js | 28 + test/wpt/tests/fetch/h1-parsing/README.md | 5 + test/wpt/tests/fetch/h1-parsing/lone-cr.window.js | 23 + .../resources-with-0x00-in-header.window.js | 31 + .../wpt/tests/fetch/h1-parsing/resources/README.md | 6 + .../resources/blue-with-0x00-in-a-header.asis | Bin 0 -> 546 bytes .../resources/document-with-0x00-in-header.py | 4 + .../tests/fetch/h1-parsing/resources/message.py | 3 + .../resources/script-with-0x00-in-header.py | 4 + .../fetch/h1-parsing/resources/status-code.py | 6 + .../tests/fetch/h1-parsing/status-code.window.js | 98 + test/wpt/tests/fetch/http-cache/304-update.any.js | 146 + test/wpt/tests/fetch/http-cache/README.md | 72 + .../http-cache/basic-auth-cache-test-ref.html | 6 + .../fetch/http-cache/basic-auth-cache-test.html | 27 + test/wpt/tests/fetch/http-cache/cache-mode.any.js | 61 + test/wpt/tests/fetch/http-cache/cc-request.any.js | 202 + .../fetch/http-cache/credentials.tentative.any.js | 62 + test/wpt/tests/fetch/http-cache/freshness.any.js | 215 + test/wpt/tests/fetch/http-cache/heuristic.any.js | 93 + test/wpt/tests/fetch/http-cache/http-cache.js | 274 ++ test/wpt/tests/fetch/http-cache/invalidate.any.js | 235 + test/wpt/tests/fetch/http-cache/partial.any.js | 208 + test/wpt/tests/fetch/http-cache/post-patch.any.js | 46 + .../tests/fetch/http-cache/resources/http-cache.py | 124 + .../fetch/http-cache/resources/securedimage.py | 19 + .../resources/split-cache-popup-with-iframe.html | 34 + .../http-cache/resources/split-cache-popup.html | 28 + test/wpt/tests/fetch/http-cache/split-cache.html | 158 + test/wpt/tests/fetch/http-cache/status.any.js | 60 + test/wpt/tests/fetch/http-cache/vary.any.js | 313 ++ .../canvas-remote-read-remote-image-redirect.html | 28 + test/wpt/tests/fetch/metadata/META.yml | 4 + test/wpt/tests/fetch/metadata/README.md | 9 + .../tests/fetch/metadata/audio-worklet.https.html | 20 + .../fetch/metadata/embed.https.sub.tentative.html | 63 + .../metadata/fetch-preflight.https.sub.any.js | 29 + .../tests/fetch/metadata/fetch.https.sub.any.js | 58 + .../generated/appcache-manifest.https.sub.html | 341 ++ .../metadata/generated/audioworklet.https.sub.html | 271 ++ .../css-font-face.https.sub.tentative.html | 230 + .../generated/css-font-face.sub.tentative.html | 196 + .../generated/css-images.https.sub.tentative.html | 1384 ++++++ .../generated/css-images.sub.tentative.html | 1099 +++++ .../metadata/generated/element-a.https.sub.html | 482 ++ .../fetch/metadata/generated/element-a.sub.html | 342 ++ .../metadata/generated/element-area.https.sub.html | 482 ++ .../fetch/metadata/generated/element-area.sub.html | 342 ++ .../generated/element-audio.https.sub.html | 325 ++ .../metadata/generated/element-audio.sub.html | 229 + .../generated/element-embed.https.sub.html | 224 + .../metadata/generated/element-embed.sub.html | 190 + .../generated/element-frame.https.sub.html | 309 ++ .../metadata/generated/element-frame.sub.html | 250 + .../generated/element-iframe.https.sub.html | 309 ++ .../metadata/generated/element-iframe.sub.html | 250 + .../element-img-environment-change.https.sub.html | 357 ++ .../element-img-environment-change.sub.html | 270 ++ .../metadata/generated/element-img.https.sub.html | 645 +++ .../fetch/metadata/generated/element-img.sub.html | 456 ++ .../generated/element-input-image.https.sub.html | 229 + .../generated/element-input-image.sub.html | 184 + .../generated/element-link-icon.https.sub.html | 371 ++ .../metadata/generated/element-link-icon.sub.html | 279 ++ .../element-link-prefetch.https.optional.sub.html | 559 +++ .../element-link-prefetch.optional.sub.html | 275 ++ .../element-meta-refresh.https.optional.sub.html | 276 ++ .../element-meta-refresh.optional.sub.html | 225 + .../generated/element-picture.https.sub.html | 997 ++++ .../metadata/generated/element-picture.sub.html | 721 +++ .../generated/element-script.https.sub.html | 593 +++ .../metadata/generated/element-script.sub.html | 488 ++ .../generated/element-video-poster.https.sub.html | 243 + .../generated/element-video-poster.sub.html | 198 + .../generated/element-video.https.sub.html | 325 ++ .../metadata/generated/element-video.sub.html | 229 + .../fetch-via-serviceworker.https.sub.html | 683 +++ .../fetch/metadata/generated/fetch.https.sub.html | 302 ++ .../tests/fetch/metadata/generated/fetch.sub.html | 220 + .../generated/form-submission.https.sub.html | 522 +++ .../metadata/generated/form-submission.sub.html | 400 ++ .../metadata/generated/header-link.https.sub.html | 529 +++ .../generated/header-link.https.sub.tentative.html | 51 + .../fetch/metadata/generated/header-link.sub.html | 460 ++ .../header-refresh.https.optional.sub.html | 273 ++ .../generated/header-refresh.optional.sub.html | 222 + .../script-module-import-dynamic.https.sub.html | 254 + .../script-module-import-dynamic.sub.html | 214 + .../script-module-import-static.https.sub.html | 288 ++ .../generated/script-module-import-static.sub.html | 246 + .../generated/serviceworker.https.sub.html | 170 + .../metadata/generated/svg-image.https.sub.html | 367 ++ .../fetch/metadata/generated/svg-image.sub.html | 265 ++ .../generated/window-history.https.sub.html | 237 + .../metadata/generated/window-history.sub.html | 360 ++ .../generated/window-location.https.sub.html | 1184 +++++ .../metadata/generated/window-location.sub.html | 894 ++++ .../worker-dedicated-constructor.https.sub.html | 118 + .../worker-dedicated-constructor.sub.html | 204 + .../worker-dedicated-importscripts.https.sub.html | 268 ++ .../worker-dedicated-importscripts.sub.html | 228 + .../tests/fetch/metadata/navigation.https.sub.html | 23 + .../wpt/tests/fetch/metadata/object.https.sub.html | 62 + .../tests/fetch/metadata/paint-worklet.https.html | 19 + .../wpt/tests/fetch/metadata/portal.https.sub.html | 50 + .../tests/fetch/metadata/preload.https.sub.html | 50 + ...tiple-redirect-https-downgrade-upgrade.sub.html | 18 + .../redirect/redirect-http-upgrade.sub.html | 17 + .../redirect/redirect-https-downgrade.sub.html | 17 + .../wpt/tests/fetch/metadata/report.https.sub.html | 33 + .../metadata/report.https.sub.html.sub.headers | 3 + .../metadata/resources/appcache-iframe.sub.html | 15 + .../fetch/metadata/resources/dedicatedWorker.js | 1 + .../tests/fetch/metadata/resources/echo-as-json.py | 29 + .../fetch/metadata/resources/echo-as-script.py | 14 + .../fetch/metadata/resources/es-module.sub.js | 1 + .../fetch-via-serviceworker--fallback--sw.js | 3 + .../fetch-via-serviceworker--respondWith--sw.js | 3 + .../resources/fetch-via-serviceworker-frame.html | 3 + .../tests/fetch/metadata/resources/header-link.py | 15 + test/wpt/tests/fetch/metadata/resources/helper.js | 42 + .../tests/fetch/metadata/resources/helper.sub.js | 67 + .../fetch/metadata/resources/message-opener.html | 17 + .../fetch/metadata/resources/post-to-owner.py | 36 + .../fetch/metadata/resources/record-header.py | 145 + .../fetch/metadata/resources/record-headers.py | 73 + .../metadata/resources/redirectTestHelper.sub.js | 167 + .../resources/serviceworker-accessors-frame.html | 3 + .../resources/serviceworker-accessors.sw.js | 14 + .../tests/fetch/metadata/resources/sharedWorker.js | 9 + .../metadata/resources/unload-with-beacon.html | 12 + .../fetch/metadata/resources/xslt-test.sub.xml | 12 + .../serviceworker-accessors.https.sub.html | 51 + .../fetch/metadata/sharedworker.https.sub.html | 40 + test/wpt/tests/fetch/metadata/style.https.sub.html | 86 + test/wpt/tests/fetch/metadata/tools/README.md | 126 + .../fetch/metadata/tools/fetch-metadata.conf.yml | 806 ++++ test/wpt/tests/fetch/metadata/tools/generate.py | 195 + .../templates/appcache-manifest.sub.https.html | 63 + .../tools/templates/audioworklet.https.sub.html | 53 + .../tools/templates/css-font-face.sub.html | 60 + .../metadata/tools/templates/css-images.sub.html | 137 + .../metadata/tools/templates/element-a.sub.html | 72 + .../metadata/tools/templates/element-area.sub.html | 72 + .../tools/templates/element-audio.sub.html | 51 + .../tools/templates/element-embed.sub.html | 54 + .../tools/templates/element-frame.sub.html | 62 + .../tools/templates/element-iframe.sub.html | 62 + .../element-img-environment-change.sub.html | 78 + .../metadata/tools/templates/element-img.sub.html | 52 + .../tools/templates/element-input-image.sub.html | 48 + .../tools/templates/element-link-icon.sub.html | 75 + .../element-link-prefetch.optional.sub.html | 71 + .../element-meta-refresh.optional.sub.html | 60 + .../tools/templates/element-picture.sub.html | 101 + .../tools/templates/element-script.sub.html | 54 + .../tools/templates/element-video-poster.sub.html | 62 + .../tools/templates/element-video.sub.html | 51 + .../fetch-via-serviceworker.https.sub.html | 88 + .../fetch/metadata/tools/templates/fetch.sub.html | 42 + .../tools/templates/form-submission.sub.html | 87 + .../metadata/tools/templates/header-link.sub.html | 56 + .../templates/header-refresh.optional.sub.html | 59 + .../script-module-import-dynamic.sub.html | 35 + .../templates/script-module-import-static.sub.html | 53 + .../tools/templates/serviceworker.https.sub.html | 72 + .../metadata/tools/templates/svg-image.sub.html | 75 + .../tools/templates/window-history.sub.html | 134 + .../tools/templates/window-location.sub.html | 128 + .../worker-dedicated-constructor.sub.html | 49 + .../worker-dedicated-importscripts.sub.html | 54 + test/wpt/tests/fetch/metadata/track.https.sub.html | 119 + .../fetch/metadata/trailing-dot.https.sub.any.js | 30 + .../wpt/tests/fetch/metadata/unload.https.sub.html | 64 + .../fetch/metadata/window-open.https.sub.html | 199 + .../wpt/tests/fetch/metadata/worker.https.sub.html | 24 + test/wpt/tests/fetch/metadata/xslt.https.sub.html | 25 + test/wpt/tests/fetch/nosniff/image.html | 39 + test/wpt/tests/fetch/nosniff/importscripts.html | 14 + test/wpt/tests/fetch/nosniff/importscripts.js | 28 + .../tests/fetch/nosniff/parsing-nosniff.window.js | 27 + test/wpt/tests/fetch/nosniff/resources/css.py | 23 + test/wpt/tests/fetch/nosniff/resources/image.py | 24 + test/wpt/tests/fetch/nosniff/resources/js.py | 17 + test/wpt/tests/fetch/nosniff/resources/nosniff.py | 11 + test/wpt/tests/fetch/nosniff/resources/worker.py | 16 + .../nosniff/resources/x-content-type-options.json | 62 + test/wpt/tests/fetch/nosniff/script.html | 43 + test/wpt/tests/fetch/nosniff/stylesheet.html | 60 + test/wpt/tests/fetch/nosniff/worker.html | 28 + test/wpt/tests/fetch/orb/resources/data.json | 3 + .../tests/fetch/orb/resources/data_non_ascii.json | 1 + test/wpt/tests/fetch/orb/resources/empty.json | 1 + test/wpt/tests/fetch/orb/resources/font.ttf | Bin 0 -> 2528 bytes test/wpt/tests/fetch/orb/resources/image.png | Bin 0 -> 1010 bytes .../resources/js-unlabeled-utf16-without-bom.json | Bin 0 -> 70 bytes test/wpt/tests/fetch/orb/resources/js-unlabeled.js | 1 + .../fetch/orb/resources/png-mislabeled-as-html.png | Bin 0 -> 1010 bytes .../resources/png-mislabeled-as-html.png.headers | 1 + .../tests/fetch/orb/resources/png-unlabeled.png | Bin 0 -> 1010 bytes .../fetch/orb/resources/script-asm-js-invalid.js | 4 + .../fetch/orb/resources/script-asm-js-valid.js | 4 + .../tests/fetch/orb/resources/script-iso-8559-1.js | 4 + .../tests/fetch/orb/resources/script-utf16-bom.js | Bin 0 -> 92 bytes .../orb/resources/script-utf16-without-bom.js | Bin 0 -> 90 bytes test/wpt/tests/fetch/orb/resources/script.js | 4 + test/wpt/tests/fetch/orb/resources/sound.mp3 | Bin 0 -> 539 bytes test/wpt/tests/fetch/orb/resources/text.txt | 1 + test/wpt/tests/fetch/orb/resources/utils.js | 18 + .../tentative/compressed-image-sniffing.sub.html | 20 + .../fetch/orb/tentative/content-range.sub.any.js | 31 + .../img-mime-types-coverage.tentative.sub.html | 126 + .../img-png-mislabeled-as-html.sub-ref.html | 5 + .../tentative/img-png-mislabeled-as-html.sub.html | 7 + .../orb/tentative/img-png-unlabeled.sub-ref.html | 5 + .../fetch/orb/tentative/img-png-unlabeled.sub.html | 7 + .../fetch/orb/tentative/known-mime-type.sub.any.js | 86 + .../tests/fetch/orb/tentative/nosniff.sub.any.js | 59 + .../tentative/script-js-unlabeled-gziped.sub.html | 24 + .../fetch/orb/tentative/script-unlabeled.sub.html | 24 + .../script-utf16-without-bom-hint-charset.sub.html | 22 + .../tests/fetch/orb/tentative/status.sub.any.js | 33 + test/wpt/tests/fetch/orb/tentative/status.sub.html | 17 + .../orb/tentative/unknown-mime-type.sub.any.js | 28 + test/wpt/tests/fetch/origin/assorted.window.js | 211 + .../fetch/origin/resources/redirect-and-stash.py | 38 + .../fetch/origin/resources/referrer-policy.py | 7 + .../tests/fetch/private-network-access/META.yml | 7 + .../tests/fetch/private-network-access/README.md | 10 + ...no-preflight-required.tentative.https.window.js | 91 + ...ame-subresource-fetch.tentative.https.window.js | 330 ++ .../fenced-frame.tentative.https.window.js | 150 + ...-from-treat-as-public.tentative.https.window.js | 80 + .../fetch.tentative.https.window.js | 271 ++ .../fetch.tentative.window.js | 183 + .../iframe.tentative.https.window.js | 266 ++ .../iframe.tentative.window.js | 110 + .../mixed-content-fetch.tentative.https.window.js | 277 ++ .../nested-worker.tentative.https.window.js | 36 + .../nested-worker.tentative.window.js | 36 + .../preflight-cache.https.tentative.window.js | 88 + .../redirect.tentative.https.window.js | 640 +++ .../private-network-access/resources/executor.html | 9 + .../resources/fenced-frame-fetcher.https.html | 25 + .../fenced-frame-fetcher.https.html.headers | 1 + ...ed-frame-local-network-access-target.https.html | 8 + .../fenced-frame-local-network-access.https.html | 14 + ...d-frame-local-network-access.https.html.headers | 1 + .../private-network-access/resources/fetcher.html | 21 + .../private-network-access/resources/fetcher.js | 20 + .../private-network-access/resources/iframed.html | 7 + .../private-network-access/resources/iframer.html | 9 + .../private-network-access/resources/preflight.py | 175 + .../resources/service-worker-bridge.html | 155 + .../resources/service-worker.js | 18 + .../resources/shared-fetcher.js | 23 + .../resources/shared-worker-blob-fetcher.html | 50 + .../resources/shared-worker-fetcher.html | 19 + .../resources/socket-opener.html | 15 + .../resources/support.sub.js | 759 +++ .../resources/worker-blob-fetcher.html | 45 + .../resources/worker-fetcher.html | 18 + .../resources/worker-fetcher.js | 11 + .../resources/xhr-sender.html | 33 + ...rker-background-fetch.tentative.https.window.js | 142 + .../service-worker-fetch.tentative.https.window.js | 235 + ...service-worker-update.tentative.https.window.js | 106 + .../service-worker.tentative.https.window.js | 84 + ...red-worker-blob-fetch.tentative.https.window.js | 168 + .../shared-worker-blob-fetch.tentative.window.js | 173 + .../shared-worker-fetch.tentative.https.window.js | 167 + .../shared-worker-fetch.tentative.window.js | 154 + .../shared-worker.tentative.https.window.js | 34 + .../shared-worker.tentative.window.js | 34 + .../websocket.tentative.https.window.js | 40 + .../websocket.tentative.window.js | 40 + .../worker-blob-fetch.tentative.window.js | 155 + .../worker-fetch.tentative.https.window.js | 151 + .../worker-fetch.tentative.window.js | 154 + .../worker.tentative.https.window.js | 37 + .../worker.tentative.window.js | 37 + ...-from-treat-as-public.tentative.https.window.js | 83 + .../xhr.https.tentative.window.js | 142 + .../private-network-access/xhr.tentative.window.js | 195 + test/wpt/tests/fetch/range/blob.any.js | 233 + test/wpt/tests/fetch/range/data.any.js | 29 + test/wpt/tests/fetch/range/general.any.js | 140 + test/wpt/tests/fetch/range/general.window.js | 29 + .../fetch/range/non-matching-range-response.html | 34 + test/wpt/tests/fetch/range/resources/basic.html | 1 + test/wpt/tests/fetch/range/resources/long-wav.py | 134 + .../tests/fetch/range/resources/partial-script.py | 29 + .../tests/fetch/range/resources/partial-text.py | 53 + test/wpt/tests/fetch/range/resources/range-sw.js | 218 + test/wpt/tests/fetch/range/resources/stash-take.py | 7 + test/wpt/tests/fetch/range/resources/utils.js | 36 + .../fetch/range/resources/video-with-range.py | 43 + test/wpt/tests/fetch/range/sw.https.window.js | 228 + .../redirect-navigate/302-found-post-handler.py | 15 + .../fetch/redirect-navigate/302-found-post.html | 20 + .../fetch/redirect-navigate/preserve-fragment.html | 202 + .../redirect-navigate/resources/destination.html | 28 + test/wpt/tests/fetch/redirects/data.window.js | 25 + .../fetch/redirects/subresource-fragments.html | 39 + test/wpt/tests/fetch/security/1xx-response.any.js | 28 + ...g-markup-mitigation-data-url.tentative.sub.html | 229 + .../dangling-markup-mitigation.tentative.html | 147 + .../embedded-credentials.tentative.sub.html | 89 + .../redirect-to-url-with-credentials.https.html | 68 + .../support/embedded-credential-window.sub.html | 19 + .../stale-while-revalidate/fetch-sw.https.html | 65 + .../fetch/stale-while-revalidate/fetch.any.js | 32 + .../stale-while-revalidate/resources/stale-css.py | 28 + .../resources/stale-image.py | 40 + .../resources/stale-script.py | 32 + .../revalidate-not-blocked-by-csp.html | 69 + .../fetch/stale-while-revalidate/stale-css.html | 51 + .../fetch/stale-while-revalidate/stale-image.html | 55 + .../fetch/stale-while-revalidate/stale-script.html | 59 + .../fetch/stale-while-revalidate/sw-intercept.js | 14 + .../tests/interfaces/ANGLE_instanced_arrays.idl | 12 + test/wpt/tests/interfaces/CSP.idl | 56 + test/wpt/tests/interfaces/DOM-Parsing.idl | 26 + test/wpt/tests/interfaces/EXT_blend_minmax.idl | 10 + .../tests/interfaces/EXT_color_buffer_float.idl | 8 + .../interfaces/EXT_color_buffer_half_float.idl | 12 + .../tests/interfaces/EXT_disjoint_timer_query.idl | 30 + .../interfaces/EXT_disjoint_timer_query_webgl2.idl | 14 + test/wpt/tests/interfaces/EXT_float_blend.idl | 8 + test/wpt/tests/interfaces/EXT_frag_depth.idl | 8 + test/wpt/tests/interfaces/EXT_sRGB.idl | 12 + .../tests/interfaces/EXT_shader_texture_lod.idl | 8 + .../interfaces/EXT_texture_compression_bptc.idl | 12 + .../interfaces/EXT_texture_compression_rgtc.idl | 12 + .../interfaces/EXT_texture_filter_anisotropic.idl | 10 + test/wpt/tests/interfaces/EXT_texture_norm16.idl | 16 + test/wpt/tests/interfaces/FedCM.idl | 67 + test/wpt/tests/interfaces/FileAPI.idl | 100 + test/wpt/tests/interfaces/IndexedDB.idl | 226 + .../interfaces/KHR_parallel_shader_compile.idl | 9 + test/wpt/tests/interfaces/META.yml | 2 + .../tests/interfaces/OES_draw_buffers_indexed.idl | 26 + .../tests/interfaces/OES_element_index_uint.idl | 8 + .../wpt/tests/interfaces/OES_fbo_render_mipmap.idl | 8 + .../tests/interfaces/OES_standard_derivatives.idl | 9 + test/wpt/tests/interfaces/OES_texture_float.idl | 7 + .../tests/interfaces/OES_texture_float_linear.idl | 7 + .../tests/interfaces/OES_texture_half_float.idl | 9 + .../interfaces/OES_texture_half_float_linear.idl | 7 + .../tests/interfaces/OES_vertex_array_object.idl | 18 + test/wpt/tests/interfaces/OVR_multiview2.idl | 14 + test/wpt/tests/interfaces/README.md | 3 + test/wpt/tests/interfaces/SVG.idl | 693 +++ .../WEBGL_blend_equation_advanced_coherent.idl | 23 + .../tests/interfaces/WEBGL_clip_cull_distance.idl | 20 + .../tests/interfaces/WEBGL_color_buffer_float.idl | 11 + .../interfaces/WEBGL_compressed_texture_astc.idl | 41 + .../interfaces/WEBGL_compressed_texture_etc.idl | 19 + .../interfaces/WEBGL_compressed_texture_etc1.idl | 10 + .../interfaces/WEBGL_compressed_texture_pvrtc.idl | 13 + .../interfaces/WEBGL_compressed_texture_s3tc.idl | 13 + .../WEBGL_compressed_texture_s3tc_srgb.idl | 13 + .../tests/interfaces/WEBGL_debug_renderer_info.idl | 12 + test/wpt/tests/interfaces/WEBGL_debug_shaders.idl | 11 + test/wpt/tests/interfaces/WEBGL_depth_texture.idl | 9 + test/wpt/tests/interfaces/WEBGL_draw_buffers.idl | 46 + ...GL_draw_instanced_base_vertex_base_instance.idl | 14 + test/wpt/tests/interfaces/WEBGL_lose_context.idl | 10 + test/wpt/tests/interfaces/WEBGL_multi_draw.idl | 32 + ...ti_draw_instanced_base_vertex_base_instance.idl | 26 + .../tests/interfaces/WEBGL_provoking_vertex.idl | 13 + test/wpt/tests/interfaces/WebCryptoAPI.idl | 237 + test/wpt/tests/interfaces/accelerometer.idl | 40 + test/wpt/tests/interfaces/ambient-light.idl | 14 + test/wpt/tests/interfaces/anchors.idl | 37 + .../tests/interfaces/attribution-reporting-api.idl | 26 + test/wpt/tests/interfaces/audio-output.idl | 17 + test/wpt/tests/interfaces/autoplay-detection.idl | 19 + test/wpt/tests/interfaces/background-fetch.idl | 89 + test/wpt/tests/interfaces/background-sync.idl | 30 + test/wpt/tests/interfaces/badging.idl | 15 + test/wpt/tests/interfaces/battery-status.idl | 21 + test/wpt/tests/interfaces/beacon.idl | 8 + .../tests/interfaces/capture-handle-identity.idl | 27 + .../interfaces/captured-mouse-events.tentative.idl | 25 + test/wpt/tests/interfaces/clipboard-apis.idl | 51 + test/wpt/tests/interfaces/close-watcher.idl | 19 + test/wpt/tests/interfaces/compat.idl | 13 + test/wpt/tests/interfaces/compression.idl | 22 + test/wpt/tests/interfaces/compute-pressure.idl | 37 + test/wpt/tests/interfaces/console.idl | 34 + test/wpt/tests/interfaces/contact-picker.idl | 44 + test/wpt/tests/interfaces/content-index.idl | 46 + test/wpt/tests/interfaces/cookie-store.idl | 110 + .../wpt/tests/interfaces/credential-management.idl | 105 + .../tests/interfaces/csp-embedded-enforcement.idl | 8 + test/wpt/tests/interfaces/csp-next.idl | 21 + test/wpt/tests/interfaces/css-anchor-position.idl | 11 + .../wpt/tests/interfaces/css-animation-worklet.idl | 37 + test/wpt/tests/interfaces/css-animations-2.idl | 9 + test/wpt/tests/interfaces/css-animations.idl | 47 + test/wpt/tests/interfaces/css-cascade-6.idl | 10 + test/wpt/tests/interfaces/css-cascade.idl | 14 + test/wpt/tests/interfaces/css-color-5.idl | 12 + test/wpt/tests/interfaces/css-conditional.idl | 27 + test/wpt/tests/interfaces/css-contain-3.idl | 10 + test/wpt/tests/interfaces/css-contain.idl | 13 + test/wpt/tests/interfaces/css-counter-styles.idl | 23 + test/wpt/tests/interfaces/css-font-loading.idl | 134 + test/wpt/tests/interfaces/css-fonts.idl | 36 + test/wpt/tests/interfaces/css-highlight-api.idl | 27 + test/wpt/tests/interfaces/css-images-4.idl | 8 + test/wpt/tests/interfaces/css-layout-api.idl | 144 + test/wpt/tests/interfaces/css-masking.idl | 20 + test/wpt/tests/interfaces/css-nav.idl | 48 + test/wpt/tests/interfaces/css-nesting.idl | 10 + test/wpt/tests/interfaces/css-paint-api.idl | 39 + test/wpt/tests/interfaces/css-parser-api.idl | 76 + .../tests/interfaces/css-properties-values-api.idl | 23 + test/wpt/tests/interfaces/css-pseudo.idl | 16 + test/wpt/tests/interfaces/css-regions.idl | 29 + test/wpt/tests/interfaces/css-shadow-parts.idl | 8 + test/wpt/tests/interfaces/css-toggle.tentative.idl | 51 + test/wpt/tests/interfaces/css-transitions-2.idl | 9 + test/wpt/tests/interfaces/css-transitions.idl | 25 + test/wpt/tests/interfaces/css-typed-om.idl | 423 ++ test/wpt/tests/interfaces/css-view-transitions.idl | 18 + test/wpt/tests/interfaces/cssom-view.idl | 200 + test/wpt/tests/interfaces/cssom.idl | 169 + .../tests/interfaces/custom-state-pseudo-class.idl | 14 + test/wpt/tests/interfaces/datacue.idl | 12 + .../wpt/tests/interfaces/deprecation-reporting.idl | 15 + test/wpt/tests/interfaces/device-memory.idl | 14 + test/wpt/tests/interfaces/device-posture.idl | 20 + test/wpt/tests/interfaces/digital-goods.idl | 44 + .../interfaces/document-picture-in-picture.idl | 34 + test/wpt/tests/interfaces/dom.idl | 646 +++ test/wpt/tests/interfaces/edit-context.idl | 111 + test/wpt/tests/interfaces/element-timing.idl | 22 + test/wpt/tests/interfaces/encoding.idl | 59 + test/wpt/tests/interfaces/encrypted-media.idl | 125 + test/wpt/tests/interfaces/entries-api.idl | 71 + test/wpt/tests/interfaces/event-timing.idl | 29 + test/wpt/tests/interfaces/eyedropper-api.idl | 18 + test/wpt/tests/interfaces/fenced-frame.idl | 57 + test/wpt/tests/interfaces/fetch.idl | 117 + test/wpt/tests/interfaces/fido.idl | 47 + test/wpt/tests/interfaces/file-system-access.idl | 72 + test/wpt/tests/interfaces/filter-effects.idl | 341 ++ test/wpt/tests/interfaces/font-metrics-api.idl | 42 + test/wpt/tests/interfaces/fs.idl | 97 + test/wpt/tests/interfaces/fullscreen.idl | 35 + test/wpt/tests/interfaces/gamepad-extensions.idl | 71 + test/wpt/tests/interfaces/gamepad.idl | 49 + test/wpt/tests/interfaces/generic-sensor.idl | 60 + test/wpt/tests/interfaces/geolocation-sensor.idl | 47 + test/wpt/tests/interfaces/geolocation.idl | 65 + test/wpt/tests/interfaces/geometry.idl | 290 ++ .../interfaces/get-installed-related-apps.idl | 16 + test/wpt/tests/interfaces/gpc-spec.idl | 10 + test/wpt/tests/interfaces/gyroscope.idl | 24 + test/wpt/tests/interfaces/hr-time.idl | 19 + test/wpt/tests/interfaces/html-media-capture.idl | 8 + test/wpt/tests/interfaces/html.idl | 2725 +++++++++++ test/wpt/tests/interfaces/idle-detection.idl | 31 + test/wpt/tests/interfaces/image-capture.idl | 160 + test/wpt/tests/interfaces/image-resource.idl | 11 + test/wpt/tests/interfaces/ink-enhancement.idl | 32 + .../tests/interfaces/input-device-capabilities.idl | 24 + test/wpt/tests/interfaces/input-events.idl | 14 + .../wpt/tests/interfaces/intersection-observer.idl | 46 + .../tests/interfaces/intervention-reporting.idl | 14 + test/wpt/tests/interfaces/is-input-pending.idl | 16 + test/wpt/tests/interfaces/js-self-profiling.idl | 44 + test/wpt/tests/interfaces/keyboard-lock.idl | 14 + test/wpt/tests/interfaces/keyboard-map.idl | 15 + .../tests/interfaces/largest-contentful-paint.idl | 15 + test/wpt/tests/interfaces/layout-instability.idl | 20 + test/wpt/tests/interfaces/local-font-access.idl | 24 + test/wpt/tests/interfaces/longtasks.idl | 19 + test/wpt/tests/interfaces/magnetometer.idl | 46 + test/wpt/tests/interfaces/manifest-incubations.idl | 24 + test/wpt/tests/interfaces/mathml-core.idl | 9 + test/wpt/tests/interfaces/media-capabilities.idl | 115 + .../tests/interfaces/media-playback-quality.idl | 18 + test/wpt/tests/interfaces/media-source.idl | 91 + .../tests/interfaces/mediacapture-automation.idl | 36 + .../tests/interfaces/mediacapture-fromelement.idl | 17 + .../interfaces/mediacapture-handle-actions.idl | 31 + test/wpt/tests/interfaces/mediacapture-region.idl | 15 + test/wpt/tests/interfaces/mediacapture-streams.idl | 248 + .../tests/interfaces/mediacapture-transform.idl | 23 + .../wpt/tests/interfaces/mediacapture-viewport.idl | 14 + test/wpt/tests/interfaces/mediasession.idl | 84 + .../wpt/tests/interfaces/mediastream-recording.idl | 62 + test/wpt/tests/interfaces/model-element.idl | 9 + test/wpt/tests/interfaces/mst-content-hint.idl | 18 + test/wpt/tests/interfaces/navigation-timing.idl | 71 + test/wpt/tests/interfaces/netinfo.idl | 43 + test/wpt/tests/interfaces/notifications.idl | 101 + test/wpt/tests/interfaces/orientation-event.idl | 78 + test/wpt/tests/interfaces/orientation-sensor.idl | 35 + test/wpt/tests/interfaces/page-lifecycle.idl | 19 + test/wpt/tests/interfaces/paint-timing.idl | 7 + test/wpt/tests/interfaces/parakeet.tentative.idl | 32 + test/wpt/tests/interfaces/payment-handler.idl | 131 + test/wpt/tests/interfaces/payment-request.idl | 112 + .../interfaces/performance-measure-memory.idl | 30 + test/wpt/tests/interfaces/performance-timeline.idl | 49 + .../tests/interfaces/periodic-background-sync.idl | 34 + test/wpt/tests/interfaces/permissions-policy.idl | 29 + test/wpt/tests/interfaces/permissions-request.idl | 8 + test/wpt/tests/interfaces/permissions-revoke.idl | 8 + test/wpt/tests/interfaces/permissions.idl | 41 + test/wpt/tests/interfaces/picture-in-picture.idl | 41 + test/wpt/tests/interfaces/pointerevents.idl | 64 + test/wpt/tests/interfaces/pointerlock.idl | 28 + test/wpt/tests/interfaces/portals.idl | 50 + test/wpt/tests/interfaces/prefer-current-tab.idl | 8 + .../wpt/tests/interfaces/prerendering-revamped.idl | 15 + test/wpt/tests/interfaces/presentation-api.idl | 95 + .../tests/interfaces/private-click-measurement.idl | 8 + test/wpt/tests/interfaces/proximity.idl | 18 + test/wpt/tests/interfaces/push-api.idl | 93 + test/wpt/tests/interfaces/raw-camera-access.idl | 18 + test/wpt/tests/interfaces/real-world-meshing.idl | 21 + test/wpt/tests/interfaces/referrer-policy.idl | 16 + test/wpt/tests/interfaces/remote-playback.idl | 32 + test/wpt/tests/interfaces/reporting.idl | 39 + .../tests/interfaces/requestStorageAccessFor.idl | 12 + test/wpt/tests/interfaces/requestidlecallback.idl | 20 + test/wpt/tests/interfaces/resize-observer.idl | 37 + test/wpt/tests/interfaces/resource-timing.idl | 42 + test/wpt/tests/interfaces/sanitizer-api.idl | 38 + .../tests/interfaces/sanitizer-api.tentative.idl | 17 + test/wpt/tests/interfaces/savedata.idl | 10 + test/wpt/tests/interfaces/scheduling-apis.idl | 63 + test/wpt/tests/interfaces/screen-capture.idl | 85 + test/wpt/tests/interfaces/screen-orientation.idl | 35 + test/wpt/tests/interfaces/screen-wake-lock.idl | 24 + test/wpt/tests/interfaces/scroll-animations.idl | 46 + .../tests/interfaces/scroll-to-text-fragment.idl | 12 + .../interfaces/secure-payment-confirmation.idl | 52 + test/wpt/tests/interfaces/selection-api.idl | 46 + test/wpt/tests/interfaces/serial.idl | 85 + test/wpt/tests/interfaces/server-timing.idl | 17 + test/wpt/tests/interfaces/service-workers.idl | 240 + test/wpt/tests/interfaces/shape-detection-api.idl | 69 + test/wpt/tests/interfaces/shared-storage.idl | 80 + test/wpt/tests/interfaces/speech-api.idl | 202 + test/wpt/tests/interfaces/storage-access.idl | 9 + test/wpt/tests/interfaces/storage-buckets.idl | 53 + .../tests/interfaces/storage-buckets.tentative.idl | 36 + test/wpt/tests/interfaces/storage.idl | 25 + test/wpt/tests/interfaces/streams.idl | 222 + test/wpt/tests/interfaces/sub-apps.tentative.idl | 17 + test/wpt/tests/interfaces/svg-animations.idl | 68 + test/wpt/tests/interfaces/testutils.idl | 9 + test/wpt/tests/interfaces/text-detection-api.idl | 18 + test/wpt/tests/interfaces/touch-events.idl | 79 + test/wpt/tests/interfaces/trust-token-api.idl | 26 + test/wpt/tests/interfaces/trusted-types.idl | 71 + test/wpt/tests/interfaces/turtledove.idl | 120 + test/wpt/tests/interfaces/ua-client-hints.idl | 45 + test/wpt/tests/interfaces/uievents.idl | 248 + test/wpt/tests/interfaces/url.idl | 46 + test/wpt/tests/interfaces/urlpattern.idl | 59 + test/wpt/tests/interfaces/user-timing.idl | 34 + test/wpt/tests/interfaces/vibration.idl | 10 + test/wpt/tests/interfaces/video-rvfc.idl | 27 + test/wpt/tests/interfaces/virtual-keyboard.idl | 21 + .../interfaces/virtual-keyboard.tentative.idl | 15 + test/wpt/tests/interfaces/wai-aria.idl | 59 + test/wpt/tests/interfaces/wasm-js-api.idl | 110 + test/wpt/tests/interfaces/wasm-web-api.idl | 11 + test/wpt/tests/interfaces/web-animations-2.idl | 112 + test/wpt/tests/interfaces/web-animations.idl | 149 + test/wpt/tests/interfaces/web-app-launch.idl | 19 + test/wpt/tests/interfaces/web-bluetooth.idl | 252 + test/wpt/tests/interfaces/web-locks.idl | 50 + test/wpt/tests/interfaces/web-nfc.idl | 81 + test/wpt/tests/interfaces/web-otp.idl | 21 + test/wpt/tests/interfaces/web-share.idl | 16 + test/wpt/tests/interfaces/webaudio.idl | 674 +++ test/wpt/tests/interfaces/webauthn.idl | 350 ++ .../webcodecs-aac-codec-registration.idl | 17 + .../webcodecs-av1-codec-registration.idl | 20 + .../webcodecs-avc-codec-registration.idl | 25 + .../webcodecs-flac-codec-registration.idl | 13 + .../webcodecs-hevc-codec-registration.idl | 17 + .../webcodecs-opus-codec-registration.idl | 22 + .../webcodecs-vp9-codec-registration.idl | 12 + test/wpt/tests/interfaces/webcodecs.idl | 501 ++ .../tests/interfaces/webcrypto-secure-curves.idl | 8 + test/wpt/tests/interfaces/webdriver.idl | 9 + test/wpt/tests/interfaces/webgl1.idl | 745 +++ test/wpt/tests/interfaces/webgl2.idl | 582 +++ test/wpt/tests/interfaces/webgpu.idl | 1293 +++++ test/wpt/tests/interfaces/webhid.idl | 127 + test/wpt/tests/interfaces/webidl.idl | 48 + test/wpt/tests/interfaces/webmidi.idl | 91 + test/wpt/tests/interfaces/webnn.idl | 544 +++ .../tests/interfaces/webrtc-encoded-transform.idl | 128 + test/wpt/tests/interfaces/webrtc-ice.idl | 24 + test/wpt/tests/interfaces/webrtc-identity.idl | 97 + test/wpt/tests/interfaces/webrtc-priority.idl | 24 + test/wpt/tests/interfaces/webrtc-stats.idl | 288 ++ test/wpt/tests/interfaces/webrtc-svc.idl | 8 + test/wpt/tests/interfaces/webrtc.idl | 627 +++ test/wpt/tests/interfaces/websockets.idl | 48 + test/wpt/tests/interfaces/webtransport.idl | 145 + test/wpt/tests/interfaces/webusb.idl | 249 + test/wpt/tests/interfaces/webvr.tentative.idl | 204 + test/wpt/tests/interfaces/webvtt.idl | 40 + test/wpt/tests/interfaces/webxr-ar-module.idl | 29 + test/wpt/tests/interfaces/webxr-depth-sensing.idl | 57 + test/wpt/tests/interfaces/webxr-dom-overlays.idl | 31 + .../wpt/tests/interfaces/webxr-gamepads-module.idl | 8 + test/wpt/tests/interfaces/webxr-hand-input.idl | 66 + test/wpt/tests/interfaces/webxr-hit-test.idl | 69 + .../tests/interfaces/webxr-lighting-estimation.idl | 39 + test/wpt/tests/interfaces/webxr.idl | 295 ++ test/wpt/tests/interfaces/webxrlayers.idl | 221 + .../tests/interfaces/window-controls-overlay.idl | 28 + test/wpt/tests/interfaces/window-management.idl | 42 + test/wpt/tests/interfaces/xhr.idl | 99 + test/wpt/tests/lint.ignore | 707 +++ test/wpt/tests/mimesniff/META.yml | 3 + test/wpt/tests/mimesniff/README.md | 4 + .../tests/mimesniff/media/media-sniff.window.js | 32 + test/wpt/tests/mimesniff/media/resources/flac.flac | Bin 0 -> 8493 bytes .../mimesniff/media/resources/make-vectors.sh | 10 + .../tests/mimesniff/media/resources/mp3-raw.mp3 | Bin 0 -> 417 bytes .../mimesniff/media/resources/mp3-with-id3.mp3 | Bin 0 -> 644 bytes test/wpt/tests/mimesniff/media/resources/mp4.mp4 | Bin 0 -> 1231 bytes test/wpt/tests/mimesniff/media/resources/ogg.ogg | Bin 0 -> 3594 bytes test/wpt/tests/mimesniff/media/resources/wav.wav | Bin 0 -> 486 bytes test/wpt/tests/mimesniff/media/resources/webm.webm | Bin 0 -> 877 bytes test/wpt/tests/mimesniff/mime-types/README.md | 47 + .../mime-types/charset-parameter.window.js | 61 + test/wpt/tests/mimesniff/mime-types/parsing.any.js | 57 + .../mime-types/resources/generated-mime-types.json | 3526 ++++++++++++++ .../mime-types/resources/generated-mime-types.py | 48 + .../mimesniff/mime-types/resources/mime-charset.py | 19 + .../mime-types/resources/mime-groups.json | 159 + .../mimesniff/mime-types/resources/mime-types.json | 397 ++ test/wpt/tests/resources/.htaccess | 2 + test/wpt/tests/resources/META.yml | 2 + .../resources/SVGAnimationTestCase-testharness.js | 102 + test/wpt/tests/resources/accesskey.js | 34 + test/wpt/tests/resources/blank.html | 16 + test/wpt/tests/resources/channel.sub.js | 1097 +++++ test/wpt/tests/resources/check-layout-th.js | 252 + test/wpt/tests/resources/check-layout.js | 245 + test/wpt/tests/resources/chromium/README.md | 7 + .../resources/chromium/contacts_manager_mock.js | 90 + .../resources/chromium/content-index-helpers.js | 9 + .../chromium/enable-hyperlink-auditing.js | 2 + test/wpt/tests/resources/chromium/fake-hid.js | 297 ++ test/wpt/tests/resources/chromium/fake-serial.js | 443 ++ .../resources/chromium/generic_sensor_mocks.js | 519 ++ .../chromium/generic_sensor_mocks.js.headers | 1 + .../resources/chromium/mock-barcodedetection.js | 136 + .../chromium/mock-barcodedetection.js.headers | 1 + .../chromium/mock-battery-monitor.headers | 1 + .../resources/chromium/mock-battery-monitor.js | 61 + .../resources/chromium/mock-direct-sockets.js | 94 + .../tests/resources/chromium/mock-facedetection.js | 130 + .../chromium/mock-facedetection.js.headers | 1 + .../resources/chromium/mock-idle-detection.js | 80 + .../tests/resources/chromium/mock-imagecapture.js | 309 ++ .../resources/chromium/mock-managed-config.js | 91 + .../resources/chromium/mock-pressure-service.js | 134 + .../chromium/mock-pressure-service.js.headers | 1 + test/wpt/tests/resources/chromium/mock-subapps.js | 89 + .../tests/resources/chromium/mock-textdetection.js | 92 + .../chromium/mock-textdetection.js.headers | 1 + test/wpt/tests/resources/chromium/nfc-mock.js | 437 ++ .../tests/resources/chromium/web-bluetooth-test.js | 629 +++ .../chromium/web-bluetooth-test.js.headers | 1 + .../tests/resources/chromium/webusb-child-test.js | 47 + .../chromium/webusb-child-test.js.headers | 1 + test/wpt/tests/resources/chromium/webusb-test.js | 583 +++ .../resources/chromium/webusb-test.js.headers | 1 + .../resources/chromium/webxr-test-math-helper.js | 298 ++ .../chromium/webxr-test-math-helper.js.headers | 1 + test/wpt/tests/resources/chromium/webxr-test.js | 2125 +++++++++ .../tests/resources/chromium/webxr-test.js.headers | 1 + .../resources/declarative-shadow-dom-polyfill.js | 25 + test/wpt/tests/resources/idlharness-shadowrealm.js | 61 + test/wpt/tests/resources/idlharness.js | 3554 ++++++++++++++ test/wpt/tests/resources/idlharness.js.headers | 2 + test/wpt/tests/resources/readme.md | 14 + test/wpt/tests/resources/sriharness.js | 226 + test/wpt/tests/resources/test-only-api.js | 31 + test/wpt/tests/resources/test-only-api.js.headers | 2 + test/wpt/tests/resources/test-only-api.m.js | 5 + .../wpt/tests/resources/test-only-api.m.js.headers | 2 + test/wpt/tests/resources/test/README.md | 83 + test/wpt/tests/resources/test/conftest.py | 269 ++ test/wpt/tests/resources/test/harness.html | 26 + test/wpt/tests/resources/test/idl-helper.js | 24 + .../wpt/tests/resources/test/nested-testharness.js | 80 + test/wpt/tests/resources/test/requirements.txt | 1 + .../test/tests/functional/abortsignal.html | 49 + .../test/tests/functional/add_cleanup.html | 91 + .../test/tests/functional/add_cleanup_async.html | 85 + .../functional/add_cleanup_async_bad_return.html | 50 + .../functional/add_cleanup_async_rejection.html | 94 + .../add_cleanup_async_rejection_after_load.html | 52 + .../functional/add_cleanup_async_timeout.html | 57 + .../tests/functional/add_cleanup_bad_return.html | 61 + .../test/tests/functional/add_cleanup_count.html | 39 + .../test/tests/functional/add_cleanup_err.html | 45 + .../tests/functional/add_cleanup_err_multi.html | 52 + .../tests/functional/add_cleanup_sync_queue.html | 55 + .../test/tests/functional/api-tests-1.html | 991 ++++ .../test/tests/functional/api-tests-2.html | 62 + .../test/tests/functional/api-tests-3.html | 34 + .../test/tests/functional/assert-array-equals.html | 162 + .../test/tests/functional/assert-throws-dom.html | 55 + .../test/tests/functional/force_timeout.html | 60 + .../test/tests/functional/generate-callback.html | 153 + .../IdlDictionary/test_partial_interface_of.html | 89 + .../IdlInterface/test_exposed_wildcard.html | 233 + .../IdlInterface/test_immutable_prototype.html | 298 ++ .../IdlInterface/test_interface_mixin.html | 131 + .../IdlInterface/test_partial_interface_of.html | 187 + .../IdlInterface/test_primary_interface_of.html | 116 + .../IdlInterface/test_to_json_operation.html | 177 + .../idlharness/IdlNamespace/test_attribute.html | 100 + .../idlharness/IdlNamespace/test_operation.html | 242 + .../IdlNamespace/test_partial_namespace.html | 125 + .../test/tests/functional/iframe-callback.html | 116 + .../functional/iframe-consolidate-errors.html | 50 + .../tests/functional/iframe-consolidate-tests.html | 85 + .../test/tests/functional/iframe-msg.html | 84 + .../test/tests/functional/log-insertion.html | 46 + .../resources/test/tests/functional/no-title.html | 146 + .../resources/test/tests/functional/order.html | 36 + .../test/tests/functional/promise-async.html | 172 + .../test/tests/functional/promise-with-sync.html | 79 + .../resources/test/tests/functional/promise.html | 219 + .../resources/test/tests/functional/queue.html | 130 + .../test/tests/functional/setup-function-worker.js | 14 + .../tests/functional/setup-worker-service.html | 86 + .../tests/functional/single-page-test-fail.html | 28 + .../functional/single-page-test-no-assertions.html | 25 + .../tests/functional/single-page-test-no-body.html | 26 + .../tests/functional/single-page-test-pass.html | 28 + .../resources/test/tests/functional/step_wait.html | 57 + .../test/tests/functional/step_wait_func.html | 49 + .../functional/task-scheduling-promise-test.html | 241 + .../tests/functional/task-scheduling-test.html | 141 + .../functional/uncaught-exception-handle.html | 33 + .../functional/uncaught-exception-ignore.html | 35 + .../worker-dedicated-uncaught-allow.html | 45 + .../worker-dedicated-uncaught-single.html | 56 + .../tests/functional/worker-dedicated.sub.html | 88 + .../test/tests/functional/worker-error.js | 8 + .../test/tests/functional/worker-service.html | 115 + .../test/tests/functional/worker-shared.html | 73 + .../test/tests/functional/worker-uncaught-allow.js | 19 + .../tests/functional/worker-uncaught-single.js | 8 + .../resources/test/tests/functional/worker.js | 34 + .../test/tests/unit/IdlArray/is_json_type.html | 192 + .../get_reverse_inheritance_stack.html | 47 + .../IdlDictionary/test_partial_dictionary.html | 39 + .../test/tests/unit/IdlInterface/constructors.html | 26 + .../IdlInterface/default_to_json_operation.html | 114 + .../IdlInterface/do_member_unscopable_asserts.html | 56 + .../unit/IdlInterface/get_interface_object.html | 22 + .../IdlInterface/get_interface_object_owner.html | 21 + .../unit/IdlInterface/get_legacy_namespace.html | 20 + .../unit/IdlInterface/get_qualified_name.html | 20 + .../get_reverse_inheritance_stack.html | 49 + .../has_default_to_json_regular_operation.html | 47 + .../has_to_json_regular_operation.html | 31 + .../IdlInterface/should_have_interface_object.html | 30 + .../test_primary_interface_of_undefined.html | 29 + .../is_to_json_regular_operation.html | 42 + .../tests/unit/IdlInterfaceMember/toString.html | 36 + .../test/tests/unit/assert_implements.html | 43 + .../tests/unit/assert_implements_optional.html | 43 + .../test/tests/unit/assert_object_equals.html | 152 + .../tests/unit/async-test-return-restrictions.html | 135 + .../wpt/tests/resources/test/tests/unit/basic.html | 48 + .../tests/unit/exceptional-cases-timeouts.html | 120 + .../test/tests/unit/exceptional-cases.html | 392 ++ .../resources/test/tests/unit/format-value.html | 123 + .../wpt/tests/resources/test/tests/unit/helpers.js | 21 + .../tests/resources/test/tests/unit/late-test.html | 54 + .../test/tests/unit/promise_setup-timeout.html | 28 + .../resources/test/tests/unit/promise_setup.html | 333 ++ .../resources/test/tests/unit/single_test.html | 94 + .../test/tests/unit/test-return-restrictions.html | 156 + .../test/tests/unit/throwing-assertions.html | 268 ++ .../test/tests/unit/unpaired-surrogates.html | 143 + test/wpt/tests/resources/test/tox.ini | 13 + test/wpt/tests/resources/test/wptserver.py | 58 + test/wpt/tests/resources/testdriver-actions.js | 599 +++ test/wpt/tests/resources/testdriver-vendor.js | 0 .../tests/resources/testdriver-vendor.js.headers | 2 + test/wpt/tests/resources/testdriver.js | 958 ++++ test/wpt/tests/resources/testdriver.js.headers | 2 + test/wpt/tests/resources/testharness.js | 4933 ++++++++++++++++++++ test/wpt/tests/resources/testharness.js.headers | 2 + test/wpt/tests/resources/testharnessreport.js | 57 + .../tests/resources/testharnessreport.js.headers | 2 + test/wpt/tests/resources/webidl2/build.sh | 12 + test/wpt/tests/resources/webidl2/lib/README.md | 4 + test/wpt/tests/resources/webidl2/lib/VERSION.md | 1 + test/wpt/tests/resources/webidl2/lib/webidl2.js | 3824 +++++++++++++++ .../tests/resources/webidl2/lib/webidl2.js.headers | 1 + test/wpt/tests/service-workers/META.yml | 6 + .../tests/service-workers/cache-storage/META.yml | 3 + .../cache-storage/cache-abort.https.any.js | 81 + .../cache-storage/cache-add.https.any.js | 368 ++ .../cache-storage/cache-delete.https.any.js | 164 + ...e-keys-attributes-for-service-worker.https.html | 75 + .../cache-storage/cache-keys.https.any.js | 212 + .../cache-storage/cache-match.https.any.js | 437 ++ .../cache-storage/cache-matchAll.https.any.js | 244 + .../cache-storage/cache-put.https.any.js | 411 ++ .../cache-storage-buckets.https.any.js | 64 + .../cache-storage/cache-storage-keys.https.any.js | 35 + .../cache-storage/cache-storage-match.https.any.js | 245 + .../cache-storage/cache-storage.https.any.js | 239 + .../cache-storage/common.https.window.js | 44 + .../crashtests/cache-response-clone.https.html | 17 + .../cache-storage/credentials.https.html | 46 + .../cross-partition.https.tentative.html | 269 ++ .../cache-storage/resources/blank.html | 2 + .../cache-keys-attributes-for-service-worker.js | 22 + .../cache-storage/resources/common-worker.js | 15 + .../resources/credentials-iframe.html | 38 + .../cache-storage/resources/credentials-worker.js | 59 + .../cache-storage/resources/fetch-status.py | 2 + .../cache-storage/resources/iframe.html | 18 + .../cache-storage/resources/simple.txt | 1 + .../cache-storage/resources/test-helpers.js | 272 ++ .../cache-storage/resources/vary.py | 25 + .../cache-storage/sandboxed-iframes.https.html | 66 + .../tests/service-workers/idlharness.https.any.js | 53 + .../Service-Worker-Allowed-header.https.html | 88 + .../ServiceWorkerGlobalScope/close.https.html | 11 + ...extendable-message-event-constructor.https.html | 10 + .../extendable-message-event.https.html | 226 + .../fetch-on-the-right-interface.https.any.js | 14 + .../isSecureContext.https.html | 32 + .../isSecureContext.serviceworker.js | 5 + .../postmessage.https.html | 83 + .../registration-attribute.https.html | 107 + .../resources/close-worker.js | 5 + .../resources/error-worker.js | 12 + .../extendable-message-event-constructor-worker.js | 197 + .../extendable-message-event-loopback-worker.js | 36 + .../extendable-message-event-ping-worker.js | 23 + .../extendable-message-event-pong-worker.js | 18 + .../resources/extendable-message-event-utils.js | 78 + .../resources/extendable-message-event-worker.js | 5 + .../resources/postmessage-loopback-worker.js | 15 + .../resources/postmessage-ping-worker.js | 15 + .../resources/postmessage-pong-worker.js | 4 + .../registration-attribute-newer-worker.js | 33 + .../resources/registration-attribute-worker.js | 139 + .../resources/unregister-controlling-worker.html | 0 .../resources/unregister-worker.js | 25 + .../resources/update-worker.js | 22 + .../resources/update-worker.py | 16 + .../service-worker-error-event.https.html | 31 + .../ServiceWorkerGlobalScope/unregister.https.html | 139 + .../ServiceWorkerGlobalScope/update.https.html | 48 + .../about-blank-replacement.https.html | 181 + ...ate-event-after-install-state-change.https.html | 33 + .../activation-after-registration.https.html | 28 + .../service-worker/activation.https.html | 168 + .../service-worker/active.https.html | 50 + .../claim-affect-other-registration.https.html | 136 + .../service-worker/claim-fetch.https.html | 90 + .../claim-not-using-registration.https.html | 131 + .../claim-shared-worker-fetch.https.html | 71 + .../claim-using-registration.https.html | 103 + .../service-worker/claim-with-redirect.https.html | 59 + .../service-worker/claim-worker-fetch.https.html | 83 + .../service-worker/client-id.https.html | 60 + .../service-worker/client-navigate.https.html | 107 + .../client-url-of-blob-url-worker.https.html | 29 + .../clients-get-client-types.https.html | 108 + .../clients-get-cross-origin.https.html | 69 + .../clients-get-resultingClientId.https.html | 177 + .../service-worker/clients-get.https.html | 154 + .../clients-matchall-blob-url-worker.https.html | 85 + .../clients-matchall-client-types.https.html | 92 + .../clients-matchall-exact-controller.https.html | 67 + .../clients-matchall-frozen.https.html | 64 + ...lients-matchall-include-uncontrolled.https.html | 117 + .../clients-matchall-on-evaluation.https.html | 24 + .../clients-matchall-order.https.html | 427 ++ .../service-worker/clients-matchall.https.html | 50 + ...ntrolled-dedicatedworker-postMessage.https.html | 44 + .../controlled-iframe-postMessage.https.html | 67 + .../controller-on-disconnect.https.html | 40 + .../service-worker/controller-on-load.https.html | 46 + .../service-worker/controller-on-reload.https.html | 58 + ...ntroller-with-no-fetch-event-handler.https.html | 56 + .../service-worker/credentials.https.html | 100 + .../service-worker/data-iframe.html | 25 + .../service-worker/data-transfer-files.https.html | 41 + ...d-worker-service-worker-interception.https.html | 40 + .../service-worker/detached-context.https.html | 124 + ...embed-and-object-are-not-intercepted.https.html | 104 + .../extendable-event-async-waituntil.https.html | 120 + .../extendable-event-waituntil.https.html | 140 + .../service-worker/fetch-audio-tainting.https.html | 47 + .../fetch-canvas-tainting-double-write.https.html | 57 + .../fetch-canvas-tainting-image-cache.https.html | 16 + .../fetch-canvas-tainting-image.https.html | 16 + .../fetch-canvas-tainting-video-cache.https.html | 17 + ...as-tainting-video-with-range-request.https.html | 92 + .../fetch-canvas-tainting-video.https.html | 17 + .../fetch-cors-exposed-header-names.https.html | 30 + .../service-worker/fetch-cors-xhr.https.html | 49 + .../service-worker/fetch-csp.https.html | 138 + .../service-worker/fetch-error.https.html | 29 + .../fetch-event-add-async.https.html | 11 + ...h-event-after-navigation-within-page.https.html | 71 + .../fetch-event-async-respond-with.https.html | 73 + .../service-worker/fetch-event-handled.https.html | 86 + ...s-history-backward-navigation-manual.https.html | 8 + ...is-history-forward-navigation-manual.https.html | 8 + ...t-is-reload-iframe-navigation-manual.https.html | 31 + ...ch-event-is-reload-navigation-manual.https.html | 8 + .../fetch-event-network-error.https.html | 44 + .../service-worker/fetch-event-redirect.https.html | 1038 ++++ .../fetch-event-referrer-policy.https.html | 274 ++ .../fetch-event-respond-with-argument.https.html | 44 + ...nt-respond-with-body-loaded-in-chunk.https.html | 24 + ...h-event-respond-with-custom-response.https.html | 82 + ...ch-event-respond-with-partial-stream.https.html | 62 + ...t-respond-with-readable-stream-chunk.https.html | 23 + ...h-event-respond-with-readable-stream.https.html | 88 + ...ith-response-body-with-invalid-chunk.https.html | 46 + ...event-respond-with-stops-propagation.https.html | 37 + ...etch-event-throws-after-respond-with.https.html | 37 + .../fetch-event-within-sw-manual.https.html | 122 + .../fetch-event-within-sw.https.html | 53 + .../service-worker/fetch-event.https.h2.html | 112 + .../service-worker/fetch-event.https.html | 1000 ++++ .../service-worker/fetch-frame-resource.https.html | 236 + .../fetch-header-visibility.https.html | 54 + .../fetch-mixed-content-to-inscope.https.html | 21 + .../fetch-mixed-content-to-outscope.https.html | 21 + .../fetch-request-css-base-url.https.html | 87 + .../fetch-request-css-cross-origin.https.html | 81 + .../fetch-request-css-images.https.html | 214 + .../fetch-request-fallback.https.html | 282 ++ .../fetch-request-no-freshness-headers.https.html | 55 + .../fetch-request-redirect.https.html | 385 ++ .../fetch-request-resources.https.html | 302 ++ .../fetch-request-xhr-sync-error.https.window.js | 19 + .../fetch-request-xhr-sync-on-worker.https.html | 41 + .../fetch-request-xhr-sync.https.html | 53 + .../service-worker/fetch-request-xhr.https.html | 75 + .../service-worker/fetch-response-taint.https.html | 223 + .../service-worker/fetch-response-xhr.https.html | 50 + .../fetch-waits-for-activate.https.html | 128 + .../service-worker/getregistration.https.html | 108 + .../service-worker/getregistrations.https.html | 134 + .../global-serviceworker.https.any.js | 53 + .../service-worker/historical.https.any.js | 5 + .../http-to-https-redirect-and-register.https.html | 49 + .../immutable-prototype-serviceworker.https.html | 23 + .../import-scripts-cross-origin.https.html | 18 + .../import-scripts-data-url.https.html | 18 + .../import-scripts-mime-types.https.html | 30 + .../import-scripts-redirect.https.html | 55 + .../import-scripts-resource-map.https.html | 34 + .../import-scripts-updated-flag.https.html | 83 + .../service-worker/indexeddb.https.html | 78 + .../service-worker/install-event-type.https.html | 30 + .../service-worker/installing.https.html | 48 + .../interface-requirements-sw.https.html | 16 + .../service-worker/invalid-blobtype.https.html | 40 + .../service-worker/invalid-header.https.html | 39 + .../service-worker/iso-latin1-header.https.html | 40 + .../local-url-inherit-controller.https.html | 115 + .../service-worker/mime-sniffing.https.html | 24 + .../multi-globals/current/current.https.html | 2 + .../multi-globals/current/test-sw.js | 1 + .../multi-globals/incumbent/incumbent.https.html | 20 + .../multi-globals/incumbent/test-sw.js | 1 + .../multi-globals/relevant/relevant.https.html | 2 + .../multi-globals/relevant/test-sw.js | 1 + .../service-worker/multi-globals/test-sw.js | 1 + .../multi-globals/url-parsing.https.html | 73 + .../service-worker/multipart-image.https.html | 68 + .../service-worker/multiple-register.https.html | 117 + .../service-worker/multiple-update.https.html | 94 + .../service-worker/navigate-window.https.html | 151 + .../service-worker/navigation-headers.https.html | 819 ++++ .../broken-chunked-encoding.https.html | 42 + .../navigation-preload/chunked-encoding.https.html | 25 + .../empty-preload-response-body.https.html | 25 + .../navigation-preload/get-state.https.html | 217 + .../navigationPreload.https.html | 20 + .../navigation-preload/redirect.https.html | 93 + .../navigation-preload/request-headers.https.html | 41 + .../navigation-preload/resource-timing.https.html | 92 + .../resources/broken-chunked-encoding-scope.asis | 6 + .../resources/broken-chunked-encoding-worker.js | 11 + .../resources/chunked-encoding-scope.py | 19 + .../resources/chunked-encoding-worker.js | 8 + .../navigation-preload/resources/cookie.py | 20 + .../empty-preload-response-body-scope.html | 0 .../empty-preload-response-body-worker.js | 15 + .../resources/get-state-worker.js | 21 + .../navigation-preload/resources/helpers.js | 5 + .../resources/navigation-preload-worker.js | 3 + .../resources/redirect-redirected.html | 3 + .../navigation-preload/resources/redirect-scope.py | 38 + .../resources/redirect-worker.js | 35 + .../resources/request-headers-scope.py | 14 + .../resources/request-headers-worker.js | 10 + .../resources/resource-timing-scope.py | 19 + .../resources/resource-timing-worker.js | 37 + .../resources/samesite-iframe.html | 10 + .../resources/samesite-sw-helper.html | 34 + .../resources/samesite-worker.js | 8 + .../resources/wait-for-activate-worker.js | 40 + .../navigation-preload/samesite-cookies.https.html | 61 + .../navigation-preload/samesite-iframe.https.html | 67 + .../navigation-redirect-body.https.html | 53 + .../navigation-redirect-resolution.https.html | 58 + .../navigation-redirect-to-http.https.html | 25 + .../service-worker/navigation-redirect.https.html | 846 ++++ .../navigation-sets-cookie.https.html | 133 + .../navigation-timing-extended.https.html | 55 + .../service-worker/navigation-timing.https.html | 77 + .../nested-blob-url-workers.https.html | 42 + .../service-worker/next-hop-protocol.https.html | 49 + .../no-dynamic-import-in-module.any.js | 7 + .../service-worker/no-dynamic-import.any.js | 3 + .../onactivate-script-error.https.html | 74 + .../oninstall-script-error.https.html | 72 + .../opaque-response-preloaded.https.html | 50 + .../service-worker/opaque-script.https.html | 71 + .../partitioned-claim.tentative.https.html | 74 + .../partitioned-cookies.tentative.https.html | 119 + ...rtitioned-getRegistrations.tentative.https.html | 99 + .../partitioned-matchAll.tentative.https.html | 65 + .../partitioned.tentative.https.html | 188 + .../service-worker/performance-timeline.https.html | 49 + .../service-worker/postMessage-client-worker.js | 23 + .../service-worker/postmessage-blob-url.https.html | 33 + ...stmessage-from-waiting-serviceworker.https.html | 50 + .../postmessage-msgport-to-client.https.html | 43 + .../postmessage-to-client-message-queue.https.html | 212 + .../postmessage-to-client.https.html | 42 + .../service-worker/postmessage.https.html | 202 + .../service-worker/ready.https.window.js | 223 + .../service-worker/redirected-response.https.html | 471 ++ .../service-worker/referer.https.html | 40 + .../referrer-policy-header.https.html | 67 + .../referrer-toplevel-script-fetch.https.html | 64 + .../register-closed-window.https.html | 35 + .../register-default-scope.https.html | 69 + ...ster-same-scope-different-script-url.https.html | 233 + ...ister-wait-forever-in-install-worker.https.html | 57 + .../service-worker/registration-basic.https.html | 39 + .../registration-end-to-end.https.html | 88 + .../service-worker/registration-events.https.html | 42 + .../service-worker/registration-iframe.https.html | 116 + .../registration-mime-types.https.html | 10 + .../registration-schedule-job.https.html | 107 + ...istration-scope-module-static-import.https.html | 41 + .../service-worker/registration-scope.https.html | 9 + .../registration-script-module.https.html | 13 + .../registration-script-url.https.html | 9 + .../service-worker/registration-script.https.html | 12 + .../registration-security-error.https.html | 9 + ...gistration-service-worker-attributes.https.html | 72 + .../registration-updateviacache.https.html | 204 + .../service-worker/rejections.https.html | 21 + .../service-worker/request-end-to-end.https.html | 40 + .../resource-timing-bodySize.https.html | 55 + .../resource-timing-cross-origin.https.html | 46 + .../resource-timing-fetch-variants.https.html | 121 + .../service-worker/resource-timing.sub.https.html | 150 + .../service-worker/resources/404.py | 5 + ...ank-replacement-blank-dynamic-nested-frame.html | 21 + ...about-blank-replacement-blank-nested-frame.html | 21 + .../resources/about-blank-replacement-frame.py | 31 + .../about-blank-replacement-ping-frame.py | 49 + .../about-blank-replacement-popup-frame.py | 32 + ...bout-blank-replacement-srcdoc-nested-frame.html | 22 + ...lank-replacement-uncontrolled-nested-frame.html | 22 + .../resources/about-blank-replacement-worker.js | 95 + .../service-worker/resources/basic-module-2.js | 1 + .../service-worker/resources/basic-module.js | 1 + .../service-worker/resources/blank.html | 2 + .../resources/bytecheck-worker-imported-script.py | 20 + .../service-worker/resources/bytecheck-worker.py | 38 + .../claim-blob-url-worker-fetch-iframe.html | 21 + .../claim-nested-worker-fetch-iframe.html | 16 + .../claim-nested-worker-fetch-parent-worker.js | 12 + .../claim-shared-worker-fetch-iframe.html | 13 + .../resources/claim-shared-worker-fetch-worker.js | 8 + .../resources/claim-with-redirect-iframe.html | 48 + .../resources/claim-worker-fetch-iframe.html | 13 + .../resources/claim-worker-fetch-worker.js | 5 + .../service-worker/resources/claim-worker.js | 19 + .../service-worker/resources/classic-worker.js | 1 + .../service-worker/resources/client-id-worker.js | 27 + .../resources/client-navigate-frame.html | 12 + .../resources/client-navigate-worker.js | 92 + .../resources/client-navigated-frame.html | 3 + .../resources/client-url-of-blob-url-worker.html | 26 + .../resources/client-url-of-blob-url-worker.js | 10 + .../resources/clients-frame-freeze.html | 15 + .../clients-get-client-types-frame-worker.js | 11 + .../resources/clients-get-client-types-frame.html | 17 + .../clients-get-client-types-shared-worker.js | 10 + .../resources/clients-get-client-types-worker.js | 11 + .../resources/clients-get-cross-origin-frame.html | 50 + .../resources/clients-get-frame.html | 12 + .../resources/clients-get-other-origin.html | 64 + .../clients-get-resultingClientId-worker.js | 60 + .../service-worker/resources/clients-get-worker.js | 41 + .../clients-matchall-blob-url-worker.html | 20 + ...ients-matchall-client-types-dedicated-worker.js | 3 + .../clients-matchall-client-types-iframe.html | 8 + .../clients-matchall-client-types-shared-worker.js | 4 + .../clients-matchall-on-evaluation-worker.js | 11 + .../resources/clients-matchall-worker.js | 40 + .../resources/controlled-frame-postMessage.html | 39 + .../controlled-worker-late-postMessage.js | 6 + .../resources/controlled-worker-postMessage.js | 4 + .../service-worker/resources/cors-approved.txt | 1 + .../resources/cors-approved.txt.headers | 3 + .../service-worker/resources/cors-denied.txt | 2 + .../resources/create-blob-url-worker.js | 22 + .../resources/create-out-of-scope-worker.html | 19 + .../service-worker/resources/echo-content.py | 16 + .../service-worker/resources/echo-cookie-worker.py | 24 + .../resources/echo-message-to-source-worker.js | 3 + .../embed-and-object-are-not-intercepted-worker.js | 14 + .../embed-image-is-not-intercepted-iframe.html | 21 + .../resources/embed-is-not-intercepted-iframe.html | 17 + ...embed-navigation-is-not-intercepted-iframe.html | 23 + .../resources/embedded-content-from-server.html | 6 + .../embedded-content-from-service-worker.html | 7 + .../resources/empty-but-slow-worker.js | 8 + .../service-worker/resources/empty-worker.js | 1 + .../service-worker/resources/empty.h2.js | 0 .../service-worker/resources/empty.html | 6 + .../service-worker/resources/empty.js | 0 .../resources/enable-client-message-queue.html | 39 + .../service-worker/resources/end-to-end-worker.js | 7 + .../service-worker/resources/events-worker.js | 12 + .../resources/extendable-event-async-waituntil.js | 210 + .../resources/extendable-event-waituntil.js | 87 + .../resources/fail-on-fetch-worker.js | 5 + .../resources/fetch-access-control-login.html | 16 + .../resources/fetch-access-control.py | 114 + .../fetch-canvas-tainting-double-write-worker.js | 7 + .../resources/fetch-canvas-tainting-iframe.html | 70 + .../resources/fetch-canvas-tainting-tests.js | 241 + .../fetch-cors-exposed-header-names-worker.js | 3 + .../resources/fetch-cors-xhr-iframe.html | 170 + .../service-worker/resources/fetch-csp-iframe.html | 16 + .../resources/fetch-csp-iframe.html.sub.headers | 1 + .../service-worker/resources/fetch-error-worker.js | 22 + .../resources/fetch-event-add-async-worker.js | 6 + ...-event-after-navigation-within-page-iframe.html | 22 + .../fetch-event-async-respond-with-worker.js | 66 + .../resources/fetch-event-handled-worker.js | 37 + ...etch-event-network-error-controllee-iframe.html | 60 + .../resources/fetch-event-network-error-worker.js | 49 + .../fetch-event-network-fallback-worker.js | 3 + .../fetch-event-respond-with-argument-iframe.html | 55 + .../fetch-event-respond-with-argument-worker.js | 14 + ...ent-respond-with-body-loaded-in-chunk-worker.js | 7 + ...ch-event-respond-with-custom-response-worker.js | 45 + ...tch-event-respond-with-partial-stream-worker.js | 28 + ...nt-respond-with-readable-stream-chunk-worker.js | 40 + ...ch-event-respond-with-readable-stream-worker.js | 81 + ...th-response-body-with-invalid-chunk-iframe.html | 15 + ...with-response-body-with-invalid-chunk-worker.js | 12 + ...-event-respond-with-stops-propagation-worker.js | 15 + .../resources/fetch-event-test-worker.js | 224 + .../resources/fetch-event-within-sw-worker.js | 48 + .../resources/fetch-header-visibility-iframe.html | 66 + ...ch-mixed-content-iframe-inscope-to-inscope.html | 71 + ...h-mixed-content-iframe-inscope-to-outscope.html | 80 + .../resources/fetch-mixed-content-iframe.html | 71 + .../fetch-request-css-base-url-iframe.html | 20 + .../resources/fetch-request-css-base-url-style.css | 1 + .../resources/fetch-request-css-base-url-worker.js | 45 + ...h-request-css-cross-origin-mime-check-cross.css | 1 + ...-request-css-cross-origin-mime-check-cross.html | 1 + ...request-css-cross-origin-mime-check-iframe.html | 17 + ...ch-request-css-cross-origin-mime-check-same.css | 1 + ...h-request-css-cross-origin-mime-check-same.html | 1 + ...tch-request-css-cross-origin-read-contents.html | 15 + .../fetch-request-css-cross-origin-worker.js | 65 + .../resources/fetch-request-fallback-iframe.html | 32 + .../resources/fetch-request-fallback-worker.js | 13 + .../fetch-request-html-imports-iframe.html | 13 + .../resources/fetch-request-html-imports-worker.js | 30 + .../fetch-request-no-freshness-headers-iframe.html | 1 + .../fetch-request-no-freshness-headers-script.py | 6 + .../fetch-request-no-freshness-headers-worker.js | 18 + .../resources/fetch-request-redirect-iframe.html | 35 + .../fetch-request-resources-iframe.https.html | 87 + .../resources/fetch-request-resources-worker.js | 26 + .../resources/fetch-request-xhr-iframe.https.html | 208 + .../fetch-request-xhr-sync-error-worker.js | 19 + .../resources/fetch-request-xhr-sync-iframe.html | 13 + .../fetch-request-xhr-sync-on-worker-worker.js | 41 + .../resources/fetch-request-xhr-sync-worker.js | 7 + .../resources/fetch-request-xhr-worker.js | 22 + .../resources/fetch-response-taint-iframe.html | 2 + .../resources/fetch-response-xhr-iframe.https.html | 53 + .../resources/fetch-response-xhr-worker.js | 12 + .../service-worker/resources/fetch-response.html | 29 + .../service-worker/resources/fetch-response.js | 35 + .../fetch-rewrite-worker-referrer-policy.js | 4 + ...fetch-rewrite-worker-referrer-policy.js.headers | 2 + .../resources/fetch-rewrite-worker.js | 166 + .../resources/fetch-rewrite-worker.js.headers | 2 + .../resources/fetch-variants-worker.js | 35 + .../resources/fetch-waits-for-activate-worker.js | 31 + .../service-worker/resources/form-poster.html | 13 + .../resources/frame-for-getregistrations.html | 19 + .../resources/get-resultingClientId-worker.js | 107 + ...http-to-https-redirect-and-register-iframe.html | 25 + .../resources/iframe-with-fetch-variants.html | 14 + .../resources/iframe-with-image.html | 2 + .../resources/immutable-prototype-serviceworker.js | 19 + .../resources/import-echo-cookie-worker-module.py | 6 + .../resources/import-echo-cookie-worker.js | 1 + .../resources/import-mime-type-worker.py | 10 + .../service-worker/resources/import-relative.xsl | 5 + ...-scripts-404-after-update-plus-update-worker.js | 8 + .../resources/import-scripts-404-after-update.js | 6 + .../service-worker/resources/import-scripts-404.js | 1 + .../import-scripts-cross-origin-worker.sub.js | 1 + .../resources/import-scripts-data-url-worker.js | 1 + .../import-scripts-diff-resource-map-worker.js | 10 + .../resources/import-scripts-echo.py | 6 + .../service-worker/resources/import-scripts-get.py | 6 + .../resources/import-scripts-mime-types-worker.js | 49 + .../resources/import-scripts-redirect-import.js | 1 + ...mport-scripts-redirect-on-second-time-worker.js | 7 + .../resources/import-scripts-redirect-worker.js | 1 + .../import-scripts-resource-map-worker.js | 15 + .../import-scripts-updated-flag-worker.js | 31 + .../resources/import-scripts-version.py | 17 + .../resources/imported-classic-script.js | 1 + .../resources/imported-module-script.js | 1 + .../service-worker/resources/indexeddb-worker.js | 57 + .../resources/install-event-type-worker.js | 9 + .../service-worker/resources/install-worker.html | 22 + .../resources/interface-requirements-worker.sub.js | 59 + .../resources/invalid-blobtype-iframe.https.html | 28 + .../resources/invalid-blobtype-worker.js | 10 + .../invalid-chunked-encoding-with-flush.py | 9 + .../resources/invalid-chunked-encoding.py | 2 + .../resources/invalid-header-iframe.https.html | 25 + .../resources/invalid-header-worker.js | 12 + .../resources/iso-latin1-header-iframe.html | 23 + .../resources/iso-latin1-header-worker.js | 12 + .../service-worker/resources/load_worker.js | 29 + .../service-worker/resources/loaded.html | 9 + .../local-url-inherit-controller-frame.html | 130 + .../local-url-inherit-controller-worker.js | 5 + .../service-worker/resources/location-setter.html | 10 + .../resources/malformed-http-response.asis | 1 + .../service-worker/resources/malformed-worker.py | 14 + .../resources/message-vs-microtask.html | 18 + .../resources/mime-sniffing-worker.js | 9 + .../service-worker/resources/mime-type-worker.py | 4 + .../service-worker/resources/mint-new-worker.py | 27 + .../service-worker/resources/missing.asis | 4 + .../service-worker/resources/module-worker.js | 1 + .../resources/multipart-image-iframe.html | 19 + .../resources/multipart-image-worker.js | 21 + .../service-worker/resources/multipart-image.py | 23 + .../resources/navigate-window-worker.js | 21 + .../resources/navigation-headers-server.py | 19 + .../resources/navigation-redirect-body-worker.js | 11 + .../resources/navigation-redirect-body.py | 11 + .../navigation-redirect-other-origin.html | 89 + .../resources/navigation-redirect-out-scope.py | 22 + .../resources/navigation-redirect-scope1.py | 22 + .../resources/navigation-redirect-scope2.py | 22 + .../navigation-redirect-to-http-iframe.html | 42 + .../navigation-redirect-to-http-worker.js | 22 + .../resources/navigation-timing-worker-extended.js | 22 + .../resources/navigation-timing-worker.js | 15 + ...nested-blob-url-worker-created-from-worker.html | 16 + .../resources/nested-blob-url-workers.html | 38 + .../resources/nested-iframe-parent.html | 5 + .../service-worker/resources/nested-parent.html | 18 + ...nested-worker-created-from-blob-url-worker.html | 33 + .../service-worker/resources/nested_load_worker.js | 23 + .../service-worker/resources/no-dynamic-import.js | 18 + .../service-worker/resources/notification_icon.py | 11 + .../object-image-is-not-intercepted-iframe.html | 21 + .../object-is-not-intercepted-iframe.html | 18 + ...bject-navigation-is-not-intercepted-iframe.html | 24 + ...ctivate-throw-error-from-nested-event-worker.js | 13 + .../onactivate-throw-error-then-cancel-worker.js | 3 + ...vate-throw-error-then-prevent-default-worker.js | 7 + ...tivate-throw-error-with-empty-onerror-worker.js | 2 + .../resources/onactivate-throw-error-worker.js | 7 + .../resources/onactivate-waituntil-forever.js | 8 + .../resources/onfetch-waituntil-forever.js | 10 + ...install-throw-error-from-nested-event-worker.js | 12 + .../oninstall-throw-error-then-cancel-worker.js | 3 + ...tall-throw-error-then-prevent-default-worker.js | 7 + ...nstall-throw-error-with-empty-onerror-worker.js | 2 + .../resources/oninstall-throw-error-worker.js | 7 + .../resources/oninstall-waituntil-forever.js | 8 + .../oninstall-waituntil-throw-error-worker.js | 5 + .../resources/onparse-infiniteloop-worker.js | 8 + .../opaque-response-being-preloaded-xhr.html | 33 + .../resources/opaque-response-preloaded-worker.js | 12 + .../resources/opaque-response-preloaded-xhr.html | 35 + .../resources/opaque-script-frame.html | 21 + .../resources/opaque-script-large.js | 41 + .../resources/opaque-script-small.js | 3 + .../service-worker/resources/opaque-script-sw.js | 37 + .../service-worker/resources/other.html | 3 + .../resources/override_assert_object_equals.js | 58 + ...artitioned-cookies-3p-credentialless-frame.html | 114 + .../resources/partitioned-cookies-3p-frame.html | 108 + .../resources/partitioned-cookies-3p-sw.js | 53 + .../resources/partitioned-cookies-3p-window.html | 35 + .../resources/partitioned-cookies-sw.js | 53 + .../partitioned-service-worker-iframe-claim.html | 59 + ...itioned-service-worker-nested-iframe-child.html | 44 + ...tioned-service-worker-nested-iframe-parent.html | 30 + ...worker-third-party-iframe-getRegistrations.html | 40 + ...service-worker-third-party-iframe-matchAll.html | 27 + ...titioned-service-worker-third-party-iframe.html | 36 + ...titioned-service-worker-third-party-window.html | 41 + .../resources/partitioned-storage-sw.js | 81 + .../service-worker/resources/partitioned-utils.js | 110 + .../resources/pass-through-worker.js | 3 + .../service-worker/resources/pass.txt | 1 + .../resources/performance-timeline-worker.js | 62 + .../resources/postmessage-blob-url.js | 5 + .../postmessage-dictionary-transferables-worker.js | 24 + .../resources/postmessage-echo-worker.js | 3 + .../resources/postmessage-fetched-text.js | 5 + .../postmessage-msgport-to-client-worker.js | 19 + .../resources/postmessage-on-load-worker.js | 9 + .../resources/postmessage-to-client-worker.js | 10 + .../resources/postmessage-transferables-worker.js | 24 + .../service-worker/resources/postmessage-worker.js | 19 + .../range-request-to-different-origins-worker.js | 40 + ...nge-request-with-different-cors-modes-worker.js | 60 + .../service-worker/resources/redirect-worker.js | 145 + .../service-worker/resources/redirect.py | 27 + .../service-worker/resources/referer-iframe.html | 39 + .../resources/referrer-policy-iframe.html | 32 + .../resources/register-closed-window-iframe.html | 19 + .../service-worker/resources/register-iframe.html | 4 + .../resources/register-rewrite-worker.html | 32 + .../resources/registration-tests-mime-types.js | 96 + .../resources/registration-tests-scope.js | 120 + .../resources/registration-tests-script-url.js | 82 + .../resources/registration-tests-script.js | 121 + .../resources/registration-tests-security-error.js | 78 + .../resources/registration-worker.js | 1 + .../resources/reject-install-worker.js | 3 + .../service-worker/resources/reply-to-message.html | 7 + .../resources/request-end-to-end-worker.js | 34 + .../service-worker/resources/request-headers.py | 8 + .../resources/resource-timing-iframe.sub.html | 10 + .../resources/resource-timing-worker.js | 12 + .../resources/respond-then-throw-worker.js | 40 + ...respond-with-body-accessed-response-iframe.html | 20 + .../respond-with-body-accessed-response-worker.js | 93 + .../respond-with-body-accessed-response.jsonp | 1 + .../resources/sample-worker-interceptor.js | 62 + .../service-worker/resources/sample.html | 2 + .../service-worker/resources/sample.js | 1 + .../service-worker/resources/sample.txt | 1 + .../sandboxed-iframe-fetch-event-iframe.html | 63 + .../sandboxed-iframe-fetch-event-iframe.py | 18 + .../sandboxed-iframe-fetch-event-worker.js | 20 + ...oxed-iframe-navigator-serviceworker-iframe.html | 25 + .../module-worker-importing-redirect-to-scope2.js | 1 + .../scope1/module-worker-importing-scope2.js | 1 + .../service-worker/resources/scope1/redirect.py | 6 + .../resources/scope2/import-scripts-echo.py | 6 + .../resources/scope2/imported-module-script.js | 4 + .../service-worker/resources/scope2/simple.txt | 1 + .../worker_interception_redirect_webworker.py | 6 + .../resources/secure-context-service-worker.js | 21 + .../resources/secure-context/sender.html | 1 + .../resources/secure-context/window.html | 15 + .../resources/service-worker-csp-worker.py | 183 + .../resources/service-worker-header.py | 20 + ...ce-worker-interception-dynamic-import-worker.js | 1 + .../service-worker-interception-network-worker.js | 1 + .../service-worker-interception-service-worker.js | 9 + ...ice-worker-interception-static-import-worker.js | 1 + .../service-worker/resources/silence.oga | Bin 0 -> 12983 bytes .../resources/simple-intercept-worker.js | 5 + .../resources/simple-intercept-worker.js.headers | 1 + .../service-worker/resources/simple.html | 3 + .../service-worker/resources/simple.txt | 1 + .../resources/skip-waiting-installed-worker.js | 33 + .../resources/skip-waiting-worker.js | 21 + .../service-worker/resources/square.png | Bin 0 -> 18299 bytes .../resources/square.png.sub.headers | 2 + .../resources/stalling-service-worker.js | 54 + .../service-worker/resources/subdir/blank.html | 2 + .../resources/subdir/import-scripts-echo.py | 6 + .../service-worker/resources/subdir/simple.txt | 1 + .../worker_interception_redirect_webworker.py | 6 + .../service-worker/resources/success.py | 8 + .../resources/svg-target-reftest-001-frame.html | 3 + .../resources/svg-target-reftest-001.html | 5 + .../resources/svg-target-reftest-frame.html | 2 + .../service-worker/resources/test-helpers.sub.js | 300 ++ .../resources/test-request-headers-worker.js | 10 + .../resources/test-request-headers-worker.py | 21 + .../resources/test-request-mode-worker.js | 10 + .../resources/test-request-mode-worker.py | 22 + .../resources/testharness-helpers.js | 136 + .../service-worker/resources/trickle.py | 14 + .../service-worker/resources/type-check-worker.js | 10 + .../resources/unregister-controller-page.html | 16 + .../resources/unregister-immediately-helpers.js | 19 + .../resources/unregister-rewrite-worker.html | 18 + .../resources/update-claim-worker.py | 24 + .../resources/update-during-installation-worker.js | 61 + .../resources/update-during-installation-worker.py | 11 + .../resources/update-fetch-worker.py | 18 + .../update-max-aged-worker-imported-script.py | 14 + .../resources/update-max-aged-worker.py | 30 + ...pdate-missing-import-scripts-imported-worker.py | 9 + .../update-missing-import-scripts-main-worker.py | 15 + .../resources/update-nocookie-worker.py | 14 + .../resources/update-recovery-worker.py | 25 + .../resources/update-registration-with-type.py | 33 + .../update-smaller-body-after-update-worker.js | 1 + .../update-smaller-body-before-update-worker.js | 2 + .../resources/update-worker-from-file.py | 33 + .../service-worker/resources/update-worker.py | 62 + .../update/update-after-oneday.https.html | 8 + .../service-worker/resources/update_shell.py | 32 + .../service-worker/resources/vtt-frame.html | 6 + .../resources/wait-forever-in-install-worker.js | 12 + .../service-worker/resources/websocket-worker.js | 35 + .../service-worker/resources/websocket.js | 7 + .../service-worker/resources/window-opener.html | 17 + .../resources/windowclient-navigate-worker.js | 75 + .../resources/worker-client-id-worker.js | 25 + .../resources/worker-fetching-cross-origin.js | 12 + .../worker-interception-redirect-serviceworker.js | 53 + .../worker-interception-redirect-webworker.js | 56 + .../resources/worker-load-interceptor.js | 16 + .../service-worker/resources/worker-testharness.js | 49 + .../worker_interception_redirect_webworker.py | 20 + .../resources/xhr-content-length-worker.js | 22 + .../service-worker/resources/xhr-iframe.html | 23 + .../resources/xhr-response-url-worker.js | 32 + .../resources/xsl-base-url-iframe.xml | 5 + .../resources/xsl-base-url-worker.js | 12 + .../service-worker/resources/xslt-pass.xsl | 11 + .../respond-with-body-accessed-response.https.html | 54 + .../service-worker/same-site-cookies.https.html | 496 ++ .../sandboxed-iframe-fetch-event.https.html | 536 +++ ...boxed-iframe-navigator-serviceworker.https.html | 120 + .../service-worker/secure-context.https.html | 57 + .../service-worker-csp-connect.https.html | 10 + .../service-worker-csp-default.https.html | 10 + .../service-worker-csp-script.https.html | 10 + .../service-worker-header.https.html | 23 + ...rviceworker-message-event-historical.https.html | 45 + .../serviceworkerobject-scripturl.https.html | 26 + .../skip-waiting-installed.https.html | 70 + .../skip-waiting-using-registration.https.html | 66 + .../skip-waiting-without-client.https.html | 12 + ...p-waiting-without-using-registration.https.html | 44 + .../service-worker/skip-waiting.https.html | 58 + .../service-worker/state.https.html | 74 + .../service-worker/svg-target-reftest.https.html | 28 + .../service-worker/synced-state.https.html | 93 + .../tentative/static-router/README.md | 4 + .../tentative/static-router/resources/direct.txt | 1 + .../simple-test-for-condition-main-resource.html | 3 + .../tentative/static-router/resources/simple.html | 3 + .../static-router/resources/static-router-sw.js | 35 + .../static-router/resources/test-helpers.sub.js | 303 ++ .../static-router-main-resource.https.html | 58 + .../static-router-subresource.https.html | 48 + .../service-worker/uncontrolled-page.https.html | 39 + .../unregister-controller.https.html | 108 + ...egister-immediately-before-installed.https.html | 57 + ...immediately-during-extendable-events.https.html | 50 + .../unregister-immediately.https.html | 134 + .../unregister-then-register-new-script.https.html | 136 + .../unregister-then-register.https.html | 107 + .../service-worker/unregister.https.html | 40 + .../update-after-navigation-fetch-event.https.html | 91 + .../update-after-navigation-redirect.https.html | 74 + .../service-worker/update-after-oneday.https.html | 51 + .../update-bytecheck-cors-import.https.html | 92 + .../service-worker/update-bytecheck.https.html | 92 + .../update-import-scripts.https.html | 135 + .../update-missing-import-scripts.https.html | 33 + .../update-module-request-mode.https.html | 45 + .../update-no-cache-request-headers.https.html | 48 + .../service-worker/update-not-allowed.https.html | 140 + .../service-worker/update-on-navigation.https.html | 20 + .../service-worker/update-recovery.https.html | 73 + .../update-registration-with-type.https.html | 208 + .../service-worker/update-result.https.html | 23 + .../service-worker/update.https.html | 164 + .../service-worker/waiting.https.html | 47 + .../websocket-in-service-worker.https.html | 27 + .../service-worker/websocket.https.html | 45 + .../service-worker/webvtt-cross-origin.https.html | 175 + .../windowclient-navigate.https.html | 190 + .../service-worker/worker-client-id.https.html | 58 + ...-sandboxed-iframe-by-csp-fetch-event.https.html | 132 + .../worker-interception-redirect.https.html | 212 + .../service-worker/worker-interception.https.html | 244 + .../xhr-content-length.https.window.js | 55 + .../service-worker/xhr-response-url.https.html | 103 + .../service-worker/xsl-base-url.https.html | 32 + test/wpt/tests/storage/META.yml | 4 + test/wpt/tests/storage/README.md | 7 + test/wpt/tests/storage/buckets/META.yml | 5 + .../bucket-quota-indexeddb.tentative.https.any.js | 35 + .../bucket-storage-policy.tentative.https.any.js | 21 + .../storage/buckets/resources/cached-resource.txt | 1 + test/wpt/tests/storage/buckets/resources/util.js | 57 + .../tests/storage/estimate-indexeddb.https.any.js | 61 + .../tests/storage/estimate-parallel.https.any.js | 13 + ...ate-usage-details-caches.https.tentative.any.js | 20 + ...-usage-details-indexeddb.https.tentative.any.js | 59 + ...tails-service-workers.https.tentative.window.js | 38 + .../estimate-usage-details.https.tentative.any.js | 12 + test/wpt/tests/storage/helpers.js | 46 + test/wpt/tests/storage/idlharness.https.any.js | 18 + .../tests/storage/opaque-origin.https.window.js | 80 + ...e-usage-details-caches.tentative.https.sub.html | 74 + ...sage-details-indexeddb.tentative.https.sub.html | 84 + ...etails-service-workers.tentative.https.sub.html | 88 + .../tests/storage/permission-query.https.any.js | 10 + .../storage/persist-permission-manual.https.html | 27 + test/wpt/tests/storage/persisted.https.any.js | 14 + ...achange-in-detached-iframe.tentative.https.html | 21 + ...estimate-usage-details-caches-helper-frame.html | 30 + ...imate-usage-details-indexeddb-helper-frame.html | 28 + ...usage-details-service-workers-helper-frame.html | 30 + test/wpt/tests/storage/resources/worker.js | 3 + .../storage/storagemanager-estimate.https.any.js | 60 + ...agemanager-persist-persisted-match.https.any.js | 9 + .../storage/storagemanager-persist.https.window.js | 10 + .../storage/storagemanager-persist.https.worker.js | 8 + .../storage/storagemanager-persisted.https.any.js | 10 + test/wpt/tests/websockets/Close-1000-reason.any.js | 21 + .../tests/websockets/Close-1000-verify-code.any.js | 21 + test/wpt/tests/websockets/Close-1000.any.js | 21 + .../tests/websockets/Close-1005-verify-code.any.js | 21 + test/wpt/tests/websockets/Close-1005.any.js | 18 + test/wpt/tests/websockets/Close-2999-reason.any.js | 17 + test/wpt/tests/websockets/Close-3000-reason.any.js | 21 + .../tests/websockets/Close-3000-verify-code.any.js | 20 + test/wpt/tests/websockets/Close-4999-reason.any.js | 21 + .../tests/websockets/Close-Reason-124Bytes.any.js | 20 + test/wpt/tests/websockets/Close-delayed.any.js | 27 + test/wpt/tests/websockets/Close-onlyReason.any.js | 17 + .../websockets/Close-readyState-Closed.any.js | 21 + .../websockets/Close-readyState-Closing.any.js | 20 + .../Close-reason-unpaired-surrogates.any.js | 22 + .../websockets/Close-server-initiated-close.any.js | 21 + test/wpt/tests/websockets/Close-undefined.any.js | 19 + .../Create-asciiSep-protocol-string.any.js | 12 + .../tests/websockets/Create-blocked-port.any.js | 97 + .../websockets/Create-extensions-empty.any.js | 20 + test/wpt/tests/websockets/Create-http-urls.any.js | 19 + .../tests/websockets/Create-invalid-urls.any.js | 14 + .../websockets/Create-non-absolute-url.any.js | 14 + .../Create-nonAscii-protocol-string.any.js | 12 + .../websockets/Create-on-worker-shutdown.any.js | 26 + .../websockets/Create-protocol-with-space.any.js | 11 + ...eate-protocols-repeated-case-insensitive.any.js | 11 + .../websockets/Create-protocols-repeated.any.js | 11 + .../tests/websockets/Create-url-with-space.any.js | 12 + .../Create-url-with-windows-1252-encoding.html | 20 + .../Create-valid-url-array-protocols.any.js | 21 + .../Create-valid-url-binaryType-blob.any.js | 21 + .../Create-valid-url-protocol-empty.any.js | 10 + .../Create-valid-url-protocol-setCorrectly.any.js | 21 + .../Create-valid-url-protocol-string.any.js | 21 + .../websockets/Create-valid-url-protocol.any.js | 21 + test/wpt/tests/websockets/Create-valid-url.any.js | 21 + test/wpt/tests/websockets/META.yml | 6 + test/wpt/tests/websockets/README.md | 1 + test/wpt/tests/websockets/Send-0byte-data.any.js | 30 + test/wpt/tests/websockets/Send-65K-data.any.js | 33 + test/wpt/tests/websockets/Send-before-open.any.js | 11 + .../websockets/Send-binary-65K-arraybuffer.any.js | 33 + .../websockets/Send-binary-arraybuffer.any.js | 33 + .../Send-binary-arraybufferview-float32.any.js | 40 + .../Send-binary-arraybufferview-float64.any.js | 40 + ...Send-binary-arraybufferview-int16-offset.any.js | 40 + .../Send-binary-arraybufferview-int32.any.js | 40 + .../Send-binary-arraybufferview-int8.any.js | 40 + ...ary-arraybufferview-uint16-offset-length.any.js | 40 + ...end-binary-arraybufferview-uint32-offset.any.js | 40 + ...nary-arraybufferview-uint8-offset-length.any.js | 40 + ...Send-binary-arraybufferview-uint8-offset.any.js | 40 + test/wpt/tests/websockets/Send-binary-blob.any.js | 36 + test/wpt/tests/websockets/Send-data.any.js | 30 + test/wpt/tests/websockets/Send-data.worker.js | 26 + test/wpt/tests/websockets/Send-null.any.js | 32 + .../tests/websockets/Send-paired-surrogates.any.js | 30 + test/wpt/tests/websockets/Send-unicode-data.any.js | 30 + .../websockets/Send-unpaired-surrogates.any.js | 30 + ...d-websocket-connection-ccns.tentative.window.js | 31 + ...ache-with-closed-websocket-connection.window.js | 20 + ...n-websocket-connection-ccns.tentative.window.js | 32 + ...-cache-with-open-websocket-connection.window.js | 21 + test/wpt/tests/websockets/basic-auth.any.js | 17 + test/wpt/tests/websockets/binary/001.html | 27 + test/wpt/tests/websockets/binary/002.html | 28 + test/wpt/tests/websockets/binary/004.html | 27 + test/wpt/tests/websockets/binary/005.html | 26 + .../tests/websockets/binaryType-wrong-value.any.js | 23 + .../bufferedAmount-unchanged-by-sync-xhr.any.js | 25 + test/wpt/tests/websockets/close-invalid.any.js | 21 + .../tests/websockets/closing-handshake/002.html | 23 + .../tests/websockets/closing-handshake/003.html | 24 + .../tests/websockets/closing-handshake/004.html | 25 + test/wpt/tests/websockets/constants.sub.js | 94 + test/wpt/tests/websockets/constructor.any.js | 10 + test/wpt/tests/websockets/constructor/001.html | 14 + test/wpt/tests/websockets/constructor/004.html | 36 + test/wpt/tests/websockets/constructor/005.html | 14 + test/wpt/tests/websockets/constructor/006.html | 29 + test/wpt/tests/websockets/constructor/007.html | 17 + test/wpt/tests/websockets/constructor/008.html | 15 + test/wpt/tests/websockets/constructor/009.html | 24 + test/wpt/tests/websockets/constructor/010.html | 22 + test/wpt/tests/websockets/constructor/011.html | 28 + test/wpt/tests/websockets/constructor/012.html | 20 + test/wpt/tests/websockets/constructor/013.html | 42 + test/wpt/tests/websockets/constructor/014.html | 39 + test/wpt/tests/websockets/constructor/016.html | 20 + test/wpt/tests/websockets/constructor/017.html | 19 + test/wpt/tests/websockets/constructor/018.html | 20 + test/wpt/tests/websockets/constructor/019.html | 21 + test/wpt/tests/websockets/constructor/020.html | 21 + test/wpt/tests/websockets/constructor/021.html | 12 + test/wpt/tests/websockets/constructor/022.html | 23 + test/wpt/tests/websockets/cookies/001.html | 28 + test/wpt/tests/websockets/cookies/002.html | 26 + test/wpt/tests/websockets/cookies/003.html | 34 + test/wpt/tests/websockets/cookies/004.html | 31 + test/wpt/tests/websockets/cookies/005.html | 35 + test/wpt/tests/websockets/cookies/006.html | 35 + test/wpt/tests/websockets/cookies/007.html | 36 + .../tests/websockets/cookies/support/set-cookie.py | 7 + .../support/websocket-cookies-helper.sub.js | 57 + .../cookies/third-party-cookie-accepted.https.html | 25 + test/wpt/tests/websockets/eventhandlers.any.js | 15 + .../tests/websockets/extended-payload-length.html | 72 + .../tests/websockets/handlers/basic_auth_wsh.py | 26 + .../handlers/delayed-passive-close_wsh.py | 27 + .../tests/websockets/handlers/echo-cookie_wsh.py | 12 + .../websockets/handlers/echo-query_v13_wsh.py | 11 + .../tests/websockets/handlers/echo-query_wsh.py | 9 + .../websockets/handlers/echo_close_data_wsh.py | 20 + .../wpt/tests/websockets/handlers/echo_exit_wsh.py | 19 + test/wpt/tests/websockets/handlers/echo_raw_wsh.py | 16 + test/wpt/tests/websockets/handlers/echo_wsh.py | 36 + .../tests/websockets/handlers/empty-message_wsh.py | 13 + .../handlers/handshake_no_extensions_wsh.py | 9 + .../handlers/handshake_no_protocol_wsh.py | 8 + .../websockets/handlers/handshake_protocol_wsh.py | 7 + .../websockets/handlers/handshake_sleep_2_wsh.py | 9 + test/wpt/tests/websockets/handlers/invalid_wsh.py | 8 + .../tests/websockets/handlers/msg_channel_wsh.py | 234 + test/wpt/tests/websockets/handlers/origin_wsh.py | 11 + .../websockets/handlers/protocol_array_wsh.py | 14 + test/wpt/tests/websockets/handlers/protocol_wsh.py | 12 + .../handlers/receive-backpressure_wsh.py | 14 + .../handlers/receive-many-with-backpressure_wsh.py | 23 + test/wpt/tests/websockets/handlers/referrer_wsh.py | 12 + .../websockets/handlers/send-backpressure_wsh.py | 39 + .../websockets/handlers/set-cookie-secure_wsh.py | 11 + .../websockets/handlers/set-cookie_http_wsh.py | 11 + .../tests/websockets/handlers/set-cookie_wsh.py | 11 + .../handlers/set-cookies-samesite_wsh.py | 25 + .../websockets/handlers/simple_handshake_wsh.py | 35 + .../tests/websockets/handlers/sleep_10_v13_wsh.py | 24 + .../handlers/stash_responder_blocking_wsh.py | 45 + .../websockets/handlers/stash_responder_wsh.py | 45 + .../websockets/handlers/wrong_accept_key_wsh.py | 19 + test/wpt/tests/websockets/idlharness.any.js | 17 + .../interfaces/CloseEvent/clean-close.html | 24 + .../interfaces/CloseEvent/constructor.html | 35 + .../interfaces/CloseEvent/historical.html | 12 + .../bufferedAmount/bufferedAmount-arraybuffer.html | 27 + .../bufferedAmount/bufferedAmount-blob.html | 28 + .../bufferedAmount-defineProperty-getter.html | 18 + .../bufferedAmount-defineProperty-setter.html | 20 + .../bufferedAmount/bufferedAmount-deleting.html | 23 + .../bufferedAmount/bufferedAmount-getting.html | 54 + .../bufferedAmount/bufferedAmount-initial.html | 15 + .../bufferedAmount/bufferedAmount-large.html | 29 + .../bufferedAmount/bufferedAmount-readonly.html | 16 + .../bufferedAmount/bufferedAmount-unicode.html | 25 + .../interfaces/WebSocket/close/close-basic.html | 26 + .../WebSocket/close/close-connecting.html | 25 + .../interfaces/WebSocket/close/close-multiple.html | 29 + .../interfaces/WebSocket/close/close-nested.html | 28 + .../interfaces/WebSocket/close/close-replace.html | 15 + .../interfaces/WebSocket/close/close-return.html | 14 + .../interfaces/WebSocket/constants/001.html | 17 + .../interfaces/WebSocket/constants/002.html | 24 + .../interfaces/WebSocket/constants/003.html | 22 + .../interfaces/WebSocket/constants/004.html | 21 + .../interfaces/WebSocket/constants/005.html | 20 + .../interfaces/WebSocket/constants/006.html | 20 + .../interfaces/WebSocket/events/001.html | 18 + .../interfaces/WebSocket/events/002.html | 20 + .../interfaces/WebSocket/events/003.html | 21 + .../interfaces/WebSocket/events/004.html | 16 + .../interfaces/WebSocket/events/006.html | 17 + .../interfaces/WebSocket/events/007.html | 22 + .../interfaces/WebSocket/events/008.html | 24 + .../interfaces/WebSocket/events/009.html | 21 + .../interfaces/WebSocket/events/010.html | 21 + .../interfaces/WebSocket/events/011.html | 18 + .../interfaces/WebSocket/events/012.html | 18 + .../interfaces/WebSocket/events/013.html | 20 + .../interfaces/WebSocket/events/014.html | 21 + .../interfaces/WebSocket/events/015.html | 36 + .../interfaces/WebSocket/events/016.html | 39 + .../interfaces/WebSocket/events/017.html | 56 + .../interfaces/WebSocket/events/018.html | 52 + .../interfaces/WebSocket/events/019.html | 31 + .../interfaces/WebSocket/events/020.html | 17 + .../interfaces/WebSocket/extensions/001.html | 14 + .../WebSocket/protocol/protocol-initial.html | 14 + .../interfaces/WebSocket/readyState/001.html | 13 + .../interfaces/WebSocket/readyState/002.html | 15 + .../interfaces/WebSocket/readyState/003.html | 18 + .../interfaces/WebSocket/readyState/004.html | 17 + .../interfaces/WebSocket/readyState/005.html | 19 + .../interfaces/WebSocket/readyState/006.html | 19 + .../interfaces/WebSocket/readyState/007.html | 19 + .../interfaces/WebSocket/readyState/008.html | 21 + .../websockets/interfaces/WebSocket/send/001.html | 15 + .../websockets/interfaces/WebSocket/send/002.html | 15 + .../websockets/interfaces/WebSocket/send/003.html | 15 + .../websockets/interfaces/WebSocket/send/004.html | 25 + .../websockets/interfaces/WebSocket/send/005.html | 19 + .../websockets/interfaces/WebSocket/send/006.html | 28 + .../websockets/interfaces/WebSocket/send/007.html | 27 + .../websockets/interfaces/WebSocket/send/008.html | 25 + .../websockets/interfaces/WebSocket/send/009.html | 27 + .../websockets/interfaces/WebSocket/send/010.html | 42 + .../websockets/interfaces/WebSocket/send/011.html | 28 + .../websockets/interfaces/WebSocket/send/012.html | 28 + .../websockets/interfaces/WebSocket/url/001.html | 13 + .../websockets/interfaces/WebSocket/url/002.html | 15 + .../websockets/interfaces/WebSocket/url/003.html | 17 + .../websockets/interfaces/WebSocket/url/004.html | 17 + .../websockets/interfaces/WebSocket/url/005.html | 17 + .../websockets/interfaces/WebSocket/url/006.html | 19 + .../interfaces/WebSocket/url/resolve.html | 14 + .../websockets/keeping-connection-open/001.html | 29 + .../tests/websockets/mixed-content.https.any.js | 7 + .../websockets/multi-globals/message-received.html | 33 + .../multi-globals/support/incumbent.sub.html | 24 + .../websockets/multi-globals/support/relevant.html | 2 + .../multi-globals/url-parsing/current/current.html | 2 + .../url-parsing/incumbent/incumbent.html | 13 + .../multi-globals/url-parsing/url-parsing.html | 22 + .../tests/websockets/opening-handshake/001.html | 20 + .../tests/websockets/opening-handshake/002.html | 24 + .../opening-handshake/003-sets-origin.worker.js | 17 + .../tests/websockets/opening-handshake/003.html | 27 + .../tests/websockets/opening-handshake/005.html | 25 + test/wpt/tests/websockets/referrer.any.js | 13 + .../remove-own-iframe-during-onerror.window.js | 23 + .../resources/websockets-test-helpers.sub.js | 25 + test/wpt/tests/websockets/security/001.html | 16 + test/wpt/tests/websockets/security/002.html | 20 + test/wpt/tests/websockets/security/check.py | 2 + ...send-many-64K-messages-with-backpressure.any.js | 49 + .../tests/websockets/stream/tentative/README.md | 9 + .../tests/websockets/stream/tentative/abort.any.js | 50 + .../stream/tentative/backpressure-receive.any.js | 40 + .../stream/tentative/backpressure-send.any.js | 25 + .../tests/websockets/stream/tentative/close.any.js | 187 + .../websockets/stream/tentative/constructor.any.js | 67 + .../stream/tentative/resources/url-constants.js | 8 + .../tests/websockets/unload-a-document/001-1.html | 25 + .../tests/websockets/unload-a-document/001-2.html | 4 + .../tests/websockets/unload-a-document/001.html | 26 + .../tests/websockets/unload-a-document/002-1.html | 32 + .../tests/websockets/unload-a-document/002-2.html | 4 + .../tests/websockets/unload-a-document/002.html | 27 + .../tests/websockets/unload-a-document/003.html | 14 + .../tests/websockets/unload-a-document/004.html | 16 + .../tests/websockets/unload-a-document/005-1.html | 22 + .../tests/websockets/unload-a-document/005.html | 21 + test/wpt/tests/wpt | 10 + test/wpt/tests/wpt.py | 7 + test/wpt/tests/xhr/META.yml | 7 + test/wpt/tests/xhr/README.md | 7 + .../xhr/XMLHttpRequest-withCredentials.any.js | 40 + test/wpt/tests/xhr/abort-after-receive.any.js | 30 + test/wpt/tests/xhr/abort-after-send.any.js | 29 + test/wpt/tests/xhr/abort-after-stop.window.js | 22 + test/wpt/tests/xhr/abort-after-timeout.any.js | 43 + test/wpt/tests/xhr/abort-during-done.window.js | 78 + .../xhr/abort-during-headers-received.window.js | 41 + test/wpt/tests/xhr/abort-during-loading.window.js | 41 + test/wpt/tests/xhr/abort-during-open.any.js | 18 + .../tests/xhr/abort-during-readystatechange.any.js | 19 + test/wpt/tests/xhr/abort-during-unsent.any.js | 19 + test/wpt/tests/xhr/abort-during-upload.any.js | 17 + test/wpt/tests/xhr/abort-event-abort.any.js | 32 + test/wpt/tests/xhr/abort-event-listeners.any.js | 13 + test/wpt/tests/xhr/abort-event-loadend.any.js | 30 + test/wpt/tests/xhr/abort-event-order.htm | 52 + test/wpt/tests/xhr/abort-upload-event-abort.any.js | 31 + .../tests/xhr/abort-upload-event-loadend.any.js | 31 + ...-control-and-redirects-async-same-origin.any.js | 61 + .../xhr/access-control-and-redirects-async.any.js | 79 + .../tests/xhr/access-control-and-redirects.any.js | 50 + ...allow-access-control-origin-header-data-url.htm | 43 + ...basic-allow-access-control-origin-header.any.js | 13 + .../xhr/access-control-basic-allow-async.any.js | 19 + ...c-allow-non-cors-safelisted-method-async.any.js | 17 + ...l-basic-allow-non-cors-safelisted-method.any.js | 14 + ...w-preflight-cache-invalidation-by-header.any.js | 38 + ...w-preflight-cache-invalidation-by-method.any.js | 37 + ...trol-basic-allow-preflight-cache-timeout.any.js | 37 + ...cess-control-basic-allow-preflight-cache.any.js | 35 + .../xhr/access-control-basic-allow-star.any.js | 12 + .../tests/xhr/access-control-basic-allow.any.js | 12 + ...ntrol-basic-cors-safelisted-request-headers.htm | 31 + ...trol-basic-cors-safelisted-response-headers.htm | 32 + test/wpt/tests/xhr/access-control-basic-denied.htm | 30 + .../access-control-basic-get-fail-non-simple.htm | 26 + ...trol-basic-non-cors-safelisted-content-type.htm | 30 + ...-control-basic-post-success-no-content-type.htm | 26 + ...-post-with-non-cors-safelisted-content-type.htm | 37 + .../xhr/access-control-basic-preflight-denied.htm | 31 + .../access-control-expose-headers-on-redirect.html | 33 + ...ccess-control-preflight-async-header-denied.htm | 39 + ...ccess-control-preflight-async-method-denied.htm | 38 + ...ccess-control-preflight-async-not-supported.htm | 37 + .../access-control-preflight-credential-async.htm | 29 + .../access-control-preflight-credential-sync.htm | 24 + .../xhr/access-control-preflight-headers-async.htm | 35 + .../xhr/access-control-preflight-headers-sync.htm | 29 + ...light-request-allow-headers-returns-star.any.js | 26 + ...-control-preflight-request-header-lowercase.htm | 29 + ...-preflight-request-header-returns-origin.any.js | 26 + ...ess-control-preflight-request-header-sorted.htm | 28 + ...ss-control-preflight-request-headers-origin.htm | 29 + ...ontrol-preflight-request-invalid-status-301.htm | 28 + ...ontrol-preflight-request-invalid-status-400.htm | 28 + ...ontrol-preflight-request-invalid-status-501.htm | 28 + ...l-preflight-request-must-not-contain-cookie.htm | 57 + ...access-control-preflight-sync-header-denied.htm | 34 + ...access-control-preflight-sync-method-denied.htm | 33 + ...access-control-preflight-sync-not-supported.htm | 33 + .../access-control-recursive-failed-request.htm | 38 + .../xhr/access-control-response-with-body-sync.htm | 25 + .../xhr/access-control-response-with-body.htm | 29 + ...ccess-control-response-with-exposed-headers.htm | 38 + ...-control-sandboxed-iframe-allow-origin-null.htm | 32 + .../xhr/access-control-sandboxed-iframe-allow.htm | 32 + ...ol-sandboxed-iframe-denied-without-wildcard.htm | 43 + .../xhr/access-control-sandboxed-iframe-denied.htm | 41 + .../tests/xhr/allow-lists-starting-with-comma.htm | 33 + test/wpt/tests/xhr/anonymous-mode-unsupported.htm | 40 + test/wpt/tests/xhr/blob-range.any.js | 246 + .../xhr/close-worker-with-xhr-in-progress.html | 26 + test/wpt/tests/xhr/content-type-unmodified.any.js | 16 + test/wpt/tests/xhr/cookies.http.html | 41 + test/wpt/tests/xhr/cors-expose-star.sub.any.js | 52 + test/wpt/tests/xhr/cors-upload.any.js | 59 + test/wpt/tests/xhr/data-uri.htm | 41 + test/wpt/tests/xhr/event-abort.any.js | 15 + test/wpt/tests/xhr/event-error-order.sub.html | 35 + test/wpt/tests/xhr/event-error.sub.any.js | 28 + test/wpt/tests/xhr/event-load.any.js | 21 + test/wpt/tests/xhr/event-loadend.any.js | 19 + test/wpt/tests/xhr/event-loadstart-upload.any.js | 19 + test/wpt/tests/xhr/event-loadstart.any.js | 17 + test/wpt/tests/xhr/event-progress.any.js | 18 + .../tests/xhr/event-readystate-sync-open.any.js | 23 + .../tests/xhr/event-readystatechange-loaded.any.js | 23 + test/wpt/tests/xhr/event-timeout-order.any.js | 21 + test/wpt/tests/xhr/event-timeout.any.js | 18 + .../xhr/event-upload-progress-crossorigin.any.js | 26 + test/wpt/tests/xhr/event-upload-progress.any.js | 30 + .../xhr/firing-events-http-content-length.html | 32 + .../xhr/firing-events-http-no-content-length.html | 35 + test/wpt/tests/xhr/folder.txt | 1 + test/wpt/tests/xhr/formdata.html | 90 + .../wpt/tests/xhr/formdata/append-formelement.html | 52 + test/wpt/tests/xhr/formdata/append.any.js | 37 + .../xhr/formdata/constructor-formelement.html | 150 + .../tests/xhr/formdata/constructor-submitter.html | 100 + test/wpt/tests/xhr/formdata/constructor.any.js | 6 + .../wpt/tests/xhr/formdata/delete-formelement.html | 41 + test/wpt/tests/xhr/formdata/delete.any.js | 26 + test/wpt/tests/xhr/formdata/foreach.any.js | 56 + test/wpt/tests/xhr/formdata/get-formelement.html | 34 + test/wpt/tests/xhr/formdata/get.any.js | 28 + test/wpt/tests/xhr/formdata/has-formelement.html | 25 + test/wpt/tests/xhr/formdata/has.any.js | 19 + test/wpt/tests/xhr/formdata/iteration.any.js | 65 + test/wpt/tests/xhr/formdata/set-blob.any.js | 61 + test/wpt/tests/xhr/formdata/set-formelement.html | 51 + test/wpt/tests/xhr/formdata/set.any.js | 36 + .../tests/xhr/getallresponseheaders-cookies.htm | 38 + .../wpt/tests/xhr/getallresponseheaders-status.htm | 33 + test/wpt/tests/xhr/getallresponseheaders.htm | 35 + .../xhr/getresponseheader-case-insensitive.htm | 34 + .../xhr/getresponseheader-chunked-trailer.htm | 32 + .../xhr/getresponseheader-cookies-and-more.htm | 36 + .../tests/xhr/getresponseheader-error-state.htm | 36 + .../tests/xhr/getresponseheader-server-date.htm | 29 + .../xhr/getresponseheader-special-characters.htm | 34 + .../xhr/getresponseheader-unsent-opened-state.htm | 32 + test/wpt/tests/xhr/getresponseheader.any.js | 18 + test/wpt/tests/xhr/header-user-agent-async.htm | 26 + test/wpt/tests/xhr/header-user-agent-sync.htm | 20 + test/wpt/tests/xhr/headers-normalize-response.htm | 43 + test/wpt/tests/xhr/historical.html | 15 + test/wpt/tests/xhr/idlharness.any.js | 28 + test/wpt/tests/xhr/json.any.js | 23 + test/wpt/tests/xhr/loadstart-and-state.html | 40 + test/wpt/tests/xhr/open-after-abort.htm | 77 + test/wpt/tests/xhr/open-after-setrequestheader.htm | 33 + test/wpt/tests/xhr/open-after-stop.window.js | 43 + test/wpt/tests/xhr/open-during-abort-event.htm | 56 + .../wpt/tests/xhr/open-during-abort-processing.htm | 62 + test/wpt/tests/xhr/open-during-abort.htm | 33 + test/wpt/tests/xhr/open-method-bogus.htm | 28 + .../wpt/tests/xhr/open-method-case-insensitive.htm | 29 + test/wpt/tests/xhr/open-method-case-sensitive.htm | 31 + test/wpt/tests/xhr/open-method-insecure.htm | 29 + .../xhr/open-method-responsetype-set-sync.htm | 32 + test/wpt/tests/xhr/open-open-send.htm | 33 + test/wpt/tests/xhr/open-open-sync-send.htm | 31 + test/wpt/tests/xhr/open-parameters-toString.htm | 54 + test/wpt/tests/xhr/open-referer.htm | 20 + test/wpt/tests/xhr/open-send-during-abort.htm | 27 + test/wpt/tests/xhr/open-send-open.htm | 33 + test/wpt/tests/xhr/open-sync-open-send.htm | 41 + test/wpt/tests/xhr/open-url-about-blank-window.htm | 23 + .../xhr/open-url-base-inserted-after-open.htm | 24 + test/wpt/tests/xhr/open-url-base-inserted.htm | 24 + test/wpt/tests/xhr/open-url-base.htm | 22 + test/wpt/tests/xhr/open-url-encoding.htm | 26 + test/wpt/tests/xhr/open-url-fragment.htm | 38 + .../wpt/tests/xhr/open-url-javascript-window-2.htm | 19 + test/wpt/tests/xhr/open-url-javascript-window.htm | 28 + test/wpt/tests/xhr/open-url-multi-window-2.htm | 25 + test/wpt/tests/xhr/open-url-multi-window-3.htm | 25 + test/wpt/tests/xhr/open-url-multi-window-4.htm | 50 + test/wpt/tests/xhr/open-url-multi-window-5.htm | 32 + test/wpt/tests/xhr/open-url-multi-window-6.htm | 41 + test/wpt/tests/xhr/open-url-multi-window.htm | 31 + .../open-url-redirected-sharedworker-origin.htm | 11 + .../xhr/open-url-redirected-worker-origin.htm | 11 + test/wpt/tests/xhr/open-url-worker-origin.htm | 9 + test/wpt/tests/xhr/open-url-worker-simple.htm | 25 + .../xhr/open-user-password-non-same-origin.htm | 25 + test/wpt/tests/xhr/over-1-meg.any.js | 16 + test/wpt/tests/xhr/overridemimetype-blob.html | 57 + .../tests/xhr/overridemimetype-done-state.any.js | 20 + .../xhr/overridemimetype-edge-cases.window.js | 50 + ...etype-headers-received-state-force-shiftjis.htm | 34 + .../xhr/overridemimetype-invalid-mime-type.htm | 41 + .../tests/xhr/overridemimetype-loading-state.htm | 32 + .../overridemimetype-open-state-force-utf-8.htm | 27 + .../xhr/overridemimetype-open-state-force-xml.htm | 34 + ...ridemimetype-unsent-state-force-shiftjis.any.js | 12 + .../tests/xhr/preserve-ua-header-on-redirect.htm | 43 + .../xhr/progress-events-response-data-gzip.htm | 83 + test/wpt/tests/xhr/progressevent-constructor.html | 47 + test/wpt/tests/xhr/progressevent-interface.html | 49 + test/wpt/tests/xhr/request-content-length.any.js | 31 + test/wpt/tests/xhr/resources/accept-language.py | 3 + test/wpt/tests/xhr/resources/accept.py | 2 + .../xhr/resources/access-control-allow-lists.py | 26 + .../resources/access-control-allow-with-body.py | 15 + .../xhr/resources/access-control-auth-basic.py | 17 + .../access-control-basic-allow-no-credentials.py | 5 + .../resources/access-control-basic-allow-star.py | 5 + .../xhr/resources/access-control-basic-allow.py | 6 + ...ontrol-basic-cors-safelisted-request-headers.py | 16 + ...ntrol-basic-cors-safelisted-response-headers.py | 19 + .../xhr/resources/access-control-basic-denied.py | 5 + .../access-control-basic-options-not-supported.py | 12 + ...s-control-basic-preflight-cache-invalidation.py | 49 + ...access-control-basic-preflight-cache-timeout.py | 50 + .../access-control-basic-preflight-cache.py | 50 + .../resources/access-control-basic-put-allow.py | 22 + .../tests/xhr/resources/access-control-cookie.py | 16 + .../xhr/resources/access-control-origin-header.py | 8 + .../resources/access-control-preflight-denied.py | 49 + ...preflight-request-allow-headers-returns-star.py | 12 + ...s-control-preflight-request-header-lowercase.py | 16 + ...trol-preflight-request-header-returns-origin.py | 12 + ...cess-control-preflight-request-header-sorted.py | 18 + ...ess-control-preflight-request-headers-origin.py | 12 + ...ess-control-preflight-request-invalid-status.py | 16 + ...ol-preflight-request-must-not-contain-cookie.py | 12 + .../resources/access-control-sandboxed-iframe.html | 24 + test/wpt/tests/xhr/resources/auth1/auth.py | 12 + test/wpt/tests/xhr/resources/auth10/auth.py | 12 + test/wpt/tests/xhr/resources/auth11/auth.py | 12 + test/wpt/tests/xhr/resources/auth2/auth.py | 12 + test/wpt/tests/xhr/resources/auth2/corsenabled.py | 18 + test/wpt/tests/xhr/resources/auth3/auth.py | 12 + test/wpt/tests/xhr/resources/auth4/auth.py | 12 + test/wpt/tests/xhr/resources/auth5/auth.py | 15 + test/wpt/tests/xhr/resources/auth6/auth.py | 15 + test/wpt/tests/xhr/resources/auth7/corsenabled.py | 20 + .../resources/auth8/corsenabled-no-authorize.py | 20 + test/wpt/tests/xhr/resources/auth9/auth.py | 12 + test/wpt/tests/xhr/resources/authentication.py | 24 + test/wpt/tests/xhr/resources/bad-chunk-encoding.py | 17 + test/wpt/tests/xhr/resources/base.xml | 1 + test/wpt/tests/xhr/resources/chunked.py | 17 + test/wpt/tests/xhr/resources/conditional.py | 29 + test/wpt/tests/xhr/resources/content.py | 20 + test/wpt/tests/xhr/resources/corsenabled.py | 25 + test/wpt/tests/xhr/resources/delay.py | 7 + test/wpt/tests/xhr/resources/echo-content-cors.py | 23 + test/wpt/tests/xhr/resources/echo-content-type.py | 6 + test/wpt/tests/xhr/resources/echo-headers.py | 7 + test/wpt/tests/xhr/resources/echo-method.py | 16 + .../wpt/tests/xhr/resources/empty-div-utf8-html.py | 5 + test/wpt/tests/xhr/resources/folder.txt | 1 + test/wpt/tests/xhr/resources/form.py | 2 + test/wpt/tests/xhr/resources/get-set-cookie.py | 18 + test/wpt/tests/xhr/resources/gzip.py | 24 + .../xhr/resources/header-content-length-twice.asis | 3 + .../tests/xhr/resources/header-content-length.asis | 2 + test/wpt/tests/xhr/resources/header-user-agent.py | 15 + test/wpt/tests/xhr/resources/headers-basic.asis | 4 + .../tests/xhr/resources/headers-double-empty.asis | 3 + .../xhr/resources/headers-some-are-empty.asis | 7 + .../xhr/resources/headers-www-authenticate.asis | 4 + test/wpt/tests/xhr/resources/headers.asis | 6 + test/wpt/tests/xhr/resources/headers.py | 12 + test/wpt/tests/xhr/resources/image.gif | Bin 0 -> 167145 bytes test/wpt/tests/xhr/resources/img-utf8-html.py | 5 + test/wpt/tests/xhr/resources/img.jpg | Bin 0 -> 108761 bytes test/wpt/tests/xhr/resources/infinite-redirects.py | 24 + test/wpt/tests/xhr/resources/init.htm | 20 + test/wpt/tests/xhr/resources/inspect-headers.py | 36 + test/wpt/tests/xhr/resources/invalid-utf8-html.py | 5 + test/wpt/tests/xhr/resources/last-modified.py | 9 + .../xhr/resources/no-custom-header-on-preflight.py | 27 + test/wpt/tests/xhr/resources/nocors/folder.txt | 1 + test/wpt/tests/xhr/resources/over-1-meg.txt | 1 + test/wpt/tests/xhr/resources/parse-headers.py | 6 + test/wpt/tests/xhr/resources/pass.txt | 1 + test/wpt/tests/xhr/resources/redirect-cors.py | 20 + test/wpt/tests/xhr/resources/redirect.py | 16 + test/wpt/tests/xhr/resources/requri.py | 5 + test/wpt/tests/xhr/resources/reset-token.py | 5 + .../resources/responseType-document-in-worker.js | 9 + .../resources/responseXML-unavailable-in-worker.js | 9 + ...send-after-setting-document-domain-window-1.htm | 23 + ...send-after-setting-document-domain-window-2.htm | 20 + ...-after-setting-document-domain-window-helper.js | 32 + test/wpt/tests/xhr/resources/shift-jis-html.py | 6 + test/wpt/tests/xhr/resources/status.py | 11 + test/wpt/tests/xhr/resources/top.txt | 1 + test/wpt/tests/xhr/resources/trickle.py | 15 + test/wpt/tests/xhr/resources/upload.py | 17 + test/wpt/tests/xhr/resources/utf16-bom.json | Bin 0 -> 30 bytes test/wpt/tests/xhr/resources/utf16.txt | Bin 0 -> 18 bytes test/wpt/tests/xhr/resources/well-formed.xml | 4 + test/wpt/tests/xhr/resources/win-1252-html.py | 5 + test/wpt/tests/xhr/resources/win-1252-xml.py | 5 + .../xhr/resources/workerxhr-origin-referrer.js | 63 + test/wpt/tests/xhr/resources/workerxhr-simple.js | 9 + .../xhr/resources/xmlhttprequest-event-order.js | 83 + .../resources/xmlhttprequest-timeout-aborted.js | 15 + .../xmlhttprequest-timeout-abortedonmain.js | 8 + .../resources/xmlhttprequest-timeout-overrides.js | 12 + .../xmlhttprequest-timeout-overridesexpires.js | 12 + .../xhr/resources/xmlhttprequest-timeout-runner.js | 21 + .../xhr/resources/xmlhttprequest-timeout-simple.js | 6 + .../resources/xmlhttprequest-timeout-synconmain.js | 2 + .../xmlhttprequest-timeout-synconworker.js | 11 + .../xhr/resources/xmlhttprequest-timeout-twice.js | 6 + .../tests/xhr/resources/xmlhttprequest-timeout.js | 333 ++ test/wpt/tests/xhr/resources/zlib.py | 19 + test/wpt/tests/xhr/response-body-errors.any.js | 23 + test/wpt/tests/xhr/response-data-arraybuffer.htm | 54 + test/wpt/tests/xhr/response-data-blob.htm | 55 + test/wpt/tests/xhr/response-data-deflate.htm | 42 + test/wpt/tests/xhr/response-data-gzip.htm | 42 + test/wpt/tests/xhr/response-data-progress.htm | 52 + .../tests/xhr/response-invalid-responsetype.htm | 38 + test/wpt/tests/xhr/response-json.htm | 61 + test/wpt/tests/xhr/response-method.htm | 21 + test/wpt/tests/xhr/responseText-status.html | 33 + .../tests/xhr/responseType-document-in-worker.html | 13 + .../xhr/responseXML-unavailable-in-worker.html | 13 + test/wpt/tests/xhr/responsedocument-decoding.htm | 39 + test/wpt/tests/xhr/responsetext-decoding.htm | 93 + test/wpt/tests/xhr/responsetype.any.js | 135 + test/wpt/tests/xhr/responseurl.html | 37 + test/wpt/tests/xhr/responsexml-basic.htm | 33 + .../tests/xhr/responsexml-document-properties.htm | 123 + test/wpt/tests/xhr/responsexml-get-twice.htm | 66 + test/wpt/tests/xhr/responsexml-invalid-type.html | 21 + test/wpt/tests/xhr/responsexml-media-type.htm | 41 + .../tests/xhr/responsexml-non-document-types.htm | 45 + test/wpt/tests/xhr/responsexml-non-well-formed.htm | 30 + test/wpt/tests/xhr/security-consideration.sub.html | 36 + test/wpt/tests/xhr/send-accept-language.htm | 27 + test/wpt/tests/xhr/send-accept.htm | 24 + .../xhr/send-after-setting-document-domain.htm | 39 + .../send-authentication-basic-cors-not-enabled.htm | 29 + .../tests/xhr/send-authentication-basic-cors.htm | 35 + .../send-authentication-basic-repeat-no-args.htm | 33 + ...cation-basic-setrequestheader-and-arguments.htm | 36 + ...ion-basic-setrequestheader-existing-session.htm | 53 + .../send-authentication-basic-setrequestheader.htm | 36 + test/wpt/tests/xhr/send-authentication-basic.htm | 27 + ...nd-authentication-competing-names-passwords.htm | 50 + ...-authentication-cors-basic-setrequestheader.htm | 31 + ...uthentication-cors-setrequestheader-no-cred.htm | 62 + ...send-authentication-existing-session-manual.htm | 33 + .../xhr/send-authentication-prompt-2-manual.htm | 25 + .../xhr/send-authentication-prompt-manual.htm | 25 + .../wpt/tests/xhr/send-blob-with-no-mime-type.html | 61 + test/wpt/tests/xhr/send-conditional-cors.htm | 42 + test/wpt/tests/xhr/send-conditional.htm | 34 + test/wpt/tests/xhr/send-content-type-charset.htm | 115 + test/wpt/tests/xhr/send-content-type-string.htm | 26 + test/wpt/tests/xhr/send-data-arraybuffer.any.js | 31 + .../wpt/tests/xhr/send-data-arraybufferview.any.js | 18 + test/wpt/tests/xhr/send-data-blob.htm | 62 + test/wpt/tests/xhr/send-data-es-object.any.js | 58 + test/wpt/tests/xhr/send-data-formdata.any.js | 21 + .../tests/xhr/send-data-sharedarraybuffer.any.js | 27 + .../xhr/send-data-string-invalid-unicode.any.js | 46 + .../tests/xhr/send-data-unexpected-tostring.htm | 56 + test/wpt/tests/xhr/send-entity-body-basic.htm | 28 + .../tests/xhr/send-entity-body-document-bogus.htm | 26 + test/wpt/tests/xhr/send-entity-body-document.htm | 92 + test/wpt/tests/xhr/send-entity-body-empty.htm | 26 + .../tests/xhr/send-entity-body-get-head-async.htm | 39 + test/wpt/tests/xhr/send-entity-body-get-head.htm | 36 + test/wpt/tests/xhr/send-entity-body-none.htm | 40 + .../xhr/send-network-error-async-events.sub.htm | 47 + .../xhr/send-network-error-sync-events.sub.htm | 45 + .../tests/xhr/send-no-response-event-loadend.htm | 48 + .../tests/xhr/send-no-response-event-loadstart.htm | 48 + .../wpt/tests/xhr/send-no-response-event-order.htm | 45 + test/wpt/tests/xhr/send-non-same-origin.htm | 33 + test/wpt/tests/xhr/send-receive-utf16.htm | 37 + test/wpt/tests/xhr/send-redirect-bogus-sync.htm | 26 + test/wpt/tests/xhr/send-redirect-bogus.htm | 36 + test/wpt/tests/xhr/send-redirect-infinite-sync.htm | 24 + test/wpt/tests/xhr/send-redirect-infinite.htm | 35 + test/wpt/tests/xhr/send-redirect-no-location.htm | 40 + test/wpt/tests/xhr/send-redirect-post-upload.htm | 140 + test/wpt/tests/xhr/send-redirect-to-cors.htm | 92 + test/wpt/tests/xhr/send-redirect-to-non-cors.htm | 37 + test/wpt/tests/xhr/send-redirect.htm | 36 + test/wpt/tests/xhr/send-response-event-order.htm | 40 + .../xhr/send-response-upload-event-loadend.htm | 40 + .../xhr/send-response-upload-event-loadstart.htm | 39 + .../xhr/send-response-upload-event-progress.htm | 39 + test/wpt/tests/xhr/send-send.any.js | 7 + test/wpt/tests/xhr/send-sync-blocks-async.htm | 53 + .../tests/xhr/send-sync-no-response-event-load.htm | 38 + .../xhr/send-sync-no-response-event-loadend.htm | 38 + .../xhr/send-sync-no-response-event-order.htm | 51 + .../tests/xhr/send-sync-response-event-order.htm | 35 + test/wpt/tests/xhr/send-sync-timeout.htm | 29 + test/wpt/tests/xhr/send-timeout-events.htm | 62 + test/wpt/tests/xhr/send-usp.any.js | 46 + test/wpt/tests/xhr/setrequestheader-after-send.htm | 27 + .../xhr/setrequestheader-allow-empty-value.htm | 26 + .../setrequestheader-allow-whitespace-in-value.htm | 27 + .../wpt/tests/xhr/setrequestheader-before-open.htm | 18 + test/wpt/tests/xhr/setrequestheader-bogus-name.htm | 59 + .../wpt/tests/xhr/setrequestheader-bogus-value.htm | 36 + .../xhr/setrequestheader-case-insensitive.htm | 34 + .../tests/xhr/setrequestheader-combining.window.js | 12 + .../tests/xhr/setrequestheader-content-type.htm | 220 + .../tests/xhr/setrequestheader-header-allowed.htm | 34 + .../xhr/setrequestheader-header-forbidden.htm | 95 + .../xhr/setrequestheader-open-setrequestheader.htm | 53 + test/wpt/tests/xhr/status-async.htm | 62 + test/wpt/tests/xhr/status-basic.htm | 51 + test/wpt/tests/xhr/status-error.htm | 87 + test/wpt/tests/xhr/status.h2.window.js | 21 + test/wpt/tests/xhr/sync-no-progress.any.js | 13 + test/wpt/tests/xhr/sync-no-timeout.any.js | 16 + test/wpt/tests/xhr/sync-xhr-and-window-onload.html | 25 + .../xhr/sync-xhr-supported-by-feature-policy.html | 11 + test/wpt/tests/xhr/template-element.html | 36 + test/wpt/tests/xhr/thrown-error-in-events.html | 60 + test/wpt/tests/xhr/timeout-cors-async.htm | 43 + test/wpt/tests/xhr/timeout-multiple-fetches.html | 32 + test/wpt/tests/xhr/timeout-sync.htm | 25 + .../tests/xhr/xhr-authorization-redirect.any.js | 28 + test/wpt/tests/xhr/xhr-timeout-longtask.any.js | 14 + test/wpt/tests/xhr/xmlhttprequest-basic.htm | 45 + test/wpt/tests/xhr/xmlhttprequest-eventtarget.htm | 48 + .../xhr/xmlhttprequest-network-error-sync.htm | 34 + .../wpt/tests/xhr/xmlhttprequest-network-error.htm | 39 + ...prequest-sync-block-defer-scripts-subframe.html | 17 + .../xmlhttprequest-sync-block-defer-scripts.html | 15 + .../xhr/xmlhttprequest-sync-block-scripts.html | 22 + ...ttprequest-sync-default-feature-policy.sub.html | 32 + ...equest-sync-not-hang-scriptloader-subframe.html | 17 + .../xmlhttprequest-sync-not-hang-scriptloader.html | 16 + .../tests/xhr/xmlhttprequest-timeout-aborted.html | 29 + .../xhr/xmlhttprequest-timeout-abortedonmain.html | 25 + .../xhr/xmlhttprequest-timeout-overrides.html | 26 + .../xmlhttprequest-timeout-overridesexpires.html | 26 + .../tests/xhr/xmlhttprequest-timeout-reused.html | 49 + .../tests/xhr/xmlhttprequest-timeout-simple.html | 27 + .../xhr/xmlhttprequest-timeout-synconmain.html | 23 + .../tests/xhr/xmlhttprequest-timeout-twice.html | 28 + .../xhr/xmlhttprequest-timeout-worker-aborted.html | 31 + .../xmlhttprequest-timeout-worker-overrides.html | 27 + ...ttprequest-timeout-worker-overridesexpires.html | 28 + .../xhr/xmlhttprequest-timeout-worker-simple.html | 29 + ...xmlhttprequest-timeout-worker-synconworker.html | 28 + .../xhr/xmlhttprequest-timeout-worker-twice.html | 29 + test/wpt/tests/xhr/xmlhttprequest-unsent.htm | 36 + types/README.md | 6 + types/agent.d.ts | 31 + types/api.d.ts | 43 + types/balanced-pool.d.ts | 18 + types/cache.d.ts | 36 + types/client.d.ts | 97 + types/connector.d.ts | 34 + types/content-type.d.ts | 21 + types/cookies.d.ts | 28 + types/diagnostics-channel.d.ts | 67 + types/dispatcher.d.ts | 241 + types/errors.d.ts | 128 + types/fetch.d.ts | 209 + types/file.d.ts | 39 + types/filereader.d.ts | 54 + types/formdata.d.ts | 108 + types/global-dispatcher.d.ts | 9 + types/global-origin.d.ts | 7 + types/handlers.d.ts | 9 + types/header.d.ts | 4 + types/index.d.ts | 65 + types/interceptors.d.ts | 5 + types/mock-agent.d.ts | 50 + types/mock-client.d.ts | 25 + types/mock-errors.d.ts | 12 + types/mock-interceptor.d.ts | 93 + types/mock-pool.d.ts | 25 + types/patch.d.ts | 71 + types/pool-stats.d.ts | 19 + types/pool.d.ts | 28 + types/proxy-agent.d.ts | 30 + types/readable.d.ts | 61 + types/retry-handler.d.ts | 116 + types/webidl.d.ts | 220 + types/websocket.d.ts | 131 + 3690 files changed, 304422 insertions(+) create mode 100644 .dockerignore create mode 100644 .editorconfig create mode 100644 .github/ISSUE_TEMPLATE/bug-report.md create mode 100644 .github/ISSUE_TEMPLATE/feature-request.md create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/bench.yml create mode 100644 .github/workflows/codeql.yml create mode 100644 .github/workflows/dependency-review.yml create mode 100644 .github/workflows/fuzz.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/nodejs.yml create mode 100644 .github/workflows/publish-undici-types.yml create mode 100644 .github/workflows/scorecard.yml create mode 100644 .gitignore create mode 100755 .husky/pre-commit create mode 100644 .nojekyll create mode 100644 .npmignore create mode 100644 .taprc create mode 100644 CNAME create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.md create mode 100644 GOVERNANCE.md create mode 100644 LICENSE create mode 100644 MAINTAINERS.md create mode 100644 README.md create mode 100644 SECURITY.md create mode 100644 benchmarks/benchmark-http2.js create mode 100644 benchmarks/benchmark-https.js create mode 100644 benchmarks/benchmark.js create mode 100644 benchmarks/server-http2.js create mode 100644 benchmarks/server-https.js create mode 100644 benchmarks/server.js create mode 100644 benchmarks/wait.js create mode 100644 binary-search/.gitignore create mode 100644 binary-search/.travis.yml create mode 100644 binary-search/README.md create mode 100644 binary-search/binary-search.d.ts create mode 100644 binary-search/index.js create mode 100644 binary-search/package.json create mode 100644 binary-search/test.js create mode 100644 build/Dockerfile create mode 100644 build/wasm.js create mode 100644 docs/api/Agent.md create mode 100644 docs/api/BalancedPool.md create mode 100644 docs/api/CacheStorage.md create mode 100644 docs/api/Client.md create mode 100644 docs/api/Connector.md create mode 100644 docs/api/ContentType.md create mode 100644 docs/api/Cookies.md create mode 100644 docs/api/DiagnosticsChannel.md create mode 100644 docs/api/DispatchInterceptor.md create mode 100644 docs/api/Dispatcher.md create mode 100644 docs/api/Errors.md create mode 100644 docs/api/Fetch.md create mode 100644 docs/api/MockAgent.md create mode 100644 docs/api/MockClient.md create mode 100644 docs/api/MockErrors.md create mode 100644 docs/api/MockPool.md create mode 100644 docs/api/Pool.md create mode 100644 docs/api/PoolStats.md create mode 100644 docs/api/ProxyAgent.md create mode 100644 docs/api/RetryHandler.md create mode 100644 docs/api/WebSocket.md create mode 100644 docs/api/api-lifecycle.md create mode 100644 docs/assets/lifecycle-diagram.png create mode 100644 docs/best-practices/client-certificate.md create mode 100644 docs/best-practices/mocking-request.md create mode 100644 docs/best-practices/proxy.md create mode 100644 docs/best-practices/writing-tests.md create mode 100644 docsify/sidebar.md create mode 100644 examples/ca-fingerprint/index.js create mode 100644 examples/fetch.js create mode 100644 examples/proxy-agent.js create mode 100644 examples/proxy/index.js create mode 100644 examples/proxy/proxy.js create mode 100644 examples/request.js create mode 100644 fastify-busboy/.eslintrc.js create mode 100644 fastify-busboy/.gitattributes create mode 100644 fastify-busboy/.github/dependabot.yml create mode 100644 fastify-busboy/.github/workflows/ci.yml create mode 100644 fastify-busboy/.github/workflows/coverage.yml create mode 100644 fastify-busboy/.github/workflows/linting.yml create mode 100644 fastify-busboy/.gitignore create mode 100644 fastify-busboy/.taprc create mode 100644 fastify-busboy/CHANGELOG.md create mode 100644 fastify-busboy/LICENSE create mode 100644 fastify-busboy/README.md create mode 100644 fastify-busboy/bench/busboy-form-bench-latin1.js create mode 100644 fastify-busboy/bench/busboy-form-bench-utf8.js create mode 100644 fastify-busboy/bench/createMultipartBufferForEncodingBench.js create mode 100644 fastify-busboy/bench/dicer/dicer-bench-multipart-parser.js create mode 100644 fastify-busboy/bench/dicer/formidable-bench-multipart-parser.js create mode 100644 fastify-busboy/bench/dicer/multipartser-bench-multipart-parser.js create mode 100644 fastify-busboy/bench/dicer/multiparty-bench-multipart-parser.js create mode 100644 fastify-busboy/bench/dicer/parted-bench-multipart-parser.js create mode 100644 fastify-busboy/bench/dicer/parted-multipart.js create mode 100644 fastify-busboy/bench/fastify-busboy-form-bench-latin1.js create mode 100644 fastify-busboy/bench/fastify-busboy-form-bench-utf8.js create mode 100644 fastify-busboy/bench/parse-params.js create mode 100644 fastify-busboy/benchmarks/_results/Busboy_comparison-busboy-Node_12.json create mode 100644 fastify-busboy/benchmarks/_results/Busboy_comparison-busboy-Node_16.json create mode 100644 fastify-busboy/benchmarks/_results/Busboy_comparison-fastify-busboy-Node_16.json create mode 100644 fastify-busboy/benchmarks/busboy/contestants/busboy.js create mode 100644 fastify-busboy/benchmarks/busboy/contestants/fastify-busboy.js create mode 100644 fastify-busboy/benchmarks/busboy/data.js create mode 100644 fastify-busboy/benchmarks/busboy/executioner.js create mode 100644 fastify-busboy/benchmarks/busboy/regenerate.cmd create mode 100644 fastify-busboy/benchmarks/busboy/validator.js create mode 100644 fastify-busboy/benchmarks/common/commonBuilder.js create mode 100644 fastify-busboy/benchmarks/common/contestantResolver.js create mode 100644 fastify-busboy/benchmarks/common/executionUtils.js create mode 100644 fastify-busboy/benchmarks/common/resultUtils.js create mode 100644 fastify-busboy/benchmarks/common/resultsCombinator.js create mode 100644 fastify-busboy/benchmarks/package.json create mode 100644 fastify-busboy/deps/dicer/LICENSE create mode 100644 fastify-busboy/deps/dicer/lib/Dicer.js create mode 100644 fastify-busboy/deps/dicer/lib/HeaderParser.js create mode 100644 fastify-busboy/deps/dicer/lib/PartStream.js create mode 100644 fastify-busboy/deps/dicer/lib/dicer.d.ts create mode 100644 fastify-busboy/deps/streamsearch/sbmh.js create mode 100644 fastify-busboy/lib/main.d.ts create mode 100644 fastify-busboy/lib/main.js create mode 100644 fastify-busboy/lib/types/multipart.js create mode 100644 fastify-busboy/lib/types/urlencoded.js create mode 100644 fastify-busboy/lib/utils/Decoder.js create mode 100644 fastify-busboy/lib/utils/basename.js create mode 100644 fastify-busboy/lib/utils/decodeText.js create mode 100644 fastify-busboy/lib/utils/getLimit.js create mode 100644 fastify-busboy/lib/utils/parseParams.js create mode 100644 fastify-busboy/package.json create mode 100644 fastify-busboy/test/busboy-constructor.test.js create mode 100644 fastify-busboy/test/decoder.test.js create mode 100644 fastify-busboy/test/dicer-constructor.test.js create mode 100644 fastify-busboy/test/dicer-endfinish.test.js create mode 100644 fastify-busboy/test/dicer-export.test.js create mode 100644 fastify-busboy/test/dicer-headerparser.test.js create mode 100644 fastify-busboy/test/dicer-malformed-header.test.js create mode 100644 fastify-busboy/test/dicer-multipart-extra-trailer.test.js create mode 100644 fastify-busboy/test/dicer-multipart-nolisteners.test.js create mode 100644 fastify-busboy/test/dicer-multipart.test.js create mode 100644 fastify-busboy/test/fixtures/many-noend/original create mode 100644 fastify-busboy/test/fixtures/many-noend/part1 create mode 100644 fastify-busboy/test/fixtures/many-noend/part1.header create mode 100644 fastify-busboy/test/fixtures/many-noend/part2 create mode 100644 fastify-busboy/test/fixtures/many-noend/part2.header create mode 100644 fastify-busboy/test/fixtures/many-noend/part3 create mode 100644 fastify-busboy/test/fixtures/many-noend/part3.header create mode 100644 fastify-busboy/test/fixtures/many-noend/part4 create mode 100644 fastify-busboy/test/fixtures/many-noend/part4.header create mode 100644 fastify-busboy/test/fixtures/many-noend/part5 create mode 100644 fastify-busboy/test/fixtures/many-noend/part5.header create mode 100644 fastify-busboy/test/fixtures/many-noend/part6 create mode 100644 fastify-busboy/test/fixtures/many-noend/part6.header create mode 100644 fastify-busboy/test/fixtures/many-noend/part7.header create mode 100644 fastify-busboy/test/fixtures/many-wrongboundary/original create mode 100644 fastify-busboy/test/fixtures/many-wrongboundary/preamble create mode 100644 fastify-busboy/test/fixtures/many-wrongboundary/preamble.error create mode 100644 fastify-busboy/test/fixtures/many/original create mode 100644 fastify-busboy/test/fixtures/many/part1 create mode 100644 fastify-busboy/test/fixtures/many/part1.header create mode 100644 fastify-busboy/test/fixtures/many/part2 create mode 100644 fastify-busboy/test/fixtures/many/part2.header create mode 100644 fastify-busboy/test/fixtures/many/part3 create mode 100644 fastify-busboy/test/fixtures/many/part3.header create mode 100644 fastify-busboy/test/fixtures/many/part4 create mode 100644 fastify-busboy/test/fixtures/many/part4.header create mode 100644 fastify-busboy/test/fixtures/many/part5 create mode 100644 fastify-busboy/test/fixtures/many/part5.header create mode 100644 fastify-busboy/test/fixtures/many/part6 create mode 100644 fastify-busboy/test/fixtures/many/part6.header create mode 100644 fastify-busboy/test/fixtures/many/part7 create mode 100644 fastify-busboy/test/fixtures/many/part7.header create mode 100644 fastify-busboy/test/fixtures/nested-full/original create mode 100644 fastify-busboy/test/fixtures/nested-full/part1 create mode 100644 fastify-busboy/test/fixtures/nested-full/part1.header create mode 100644 fastify-busboy/test/fixtures/nested-full/part2 create mode 100644 fastify-busboy/test/fixtures/nested-full/part2.header create mode 100644 fastify-busboy/test/fixtures/nested-full/preamble.header create mode 100644 fastify-busboy/test/fixtures/nested/original create mode 100644 fastify-busboy/test/fixtures/nested/part1 create mode 100644 fastify-busboy/test/fixtures/nested/part1.header create mode 100644 fastify-busboy/test/fixtures/nested/part2 create mode 100644 fastify-busboy/test/fixtures/nested/part2.header create mode 100644 fastify-busboy/test/get-limit.test.js create mode 100644 fastify-busboy/test/multipart-stream-pause.test.js create mode 100644 fastify-busboy/test/parse-params.test.js create mode 100644 fastify-busboy/test/streamsearch.test.js create mode 100644 fastify-busboy/test/types-multipart.test.js create mode 100644 fastify-busboy/test/types-urlencoded.test.js create mode 100644 fastify-busboy/test/types/dicer.test-d.ts create mode 100644 fastify-busboy/test/types/main.test-d.ts create mode 100644 fastify-busboy/tsconfig.json create mode 100644 index-fetch.js create mode 100644 index.d.ts create mode 100644 index.html create mode 100644 index.js create mode 100644 lib/agent.js create mode 100644 lib/api/abort-signal.js create mode 100644 lib/api/api-connect.js create mode 100644 lib/api/api-pipeline.js create mode 100644 lib/api/api-request.js create mode 100644 lib/api/api-stream.js create mode 100644 lib/api/api-upgrade.js create mode 100644 lib/api/index.js create mode 100644 lib/api/readable.js create mode 100644 lib/api/util.js create mode 100644 lib/balanced-pool.js create mode 100644 lib/cache/cache.js create mode 100644 lib/cache/cachestorage.js create mode 100644 lib/cache/symbols.js create mode 100644 lib/cache/util.js create mode 100644 lib/client.js create mode 100644 lib/compat/dispatcher-weakref.js create mode 100644 lib/cookies/constants.js create mode 100644 lib/cookies/index.js create mode 100644 lib/cookies/parse.js create mode 100644 lib/cookies/util.js create mode 100644 lib/core/connect.js create mode 100644 lib/core/errors.js create mode 100644 lib/core/request.js create mode 100644 lib/core/symbols.js create mode 100644 lib/core/util.js create mode 100644 lib/dispatcher-base.js create mode 100644 lib/dispatcher.js create mode 100644 lib/fetch/LICENSE create mode 100644 lib/fetch/body.js create mode 100644 lib/fetch/constants.js create mode 100644 lib/fetch/dataURL.js create mode 100644 lib/fetch/file.js create mode 100644 lib/fetch/formdata.js create mode 100644 lib/fetch/global.js create mode 100644 lib/fetch/headers.js create mode 100644 lib/fetch/index.js create mode 100644 lib/fetch/request.js create mode 100644 lib/fetch/response.js create mode 100644 lib/fetch/symbols.js create mode 100644 lib/fetch/util.js create mode 100644 lib/fetch/webidl.js create mode 100644 lib/fileapi/encoding.js create mode 100644 lib/fileapi/filereader.js create mode 100644 lib/fileapi/progressevent.js create mode 100644 lib/fileapi/symbols.js create mode 100644 lib/fileapi/util.js create mode 100644 lib/global.js create mode 100644 lib/handler/DecoratorHandler.js create mode 100644 lib/handler/RedirectHandler.js create mode 100644 lib/handler/RetryHandler.js create mode 100644 lib/interceptor/redirectInterceptor.js create mode 100644 lib/llhttp/constants.d.ts create mode 100644 lib/llhttp/constants.js create mode 100644 lib/llhttp/utils.d.ts create mode 100644 lib/llhttp/utils.js create mode 100644 lib/llhttp/wasm_build_env.txt create mode 100644 lib/mock/mock-agent.js create mode 100644 lib/mock/mock-client.js create mode 100644 lib/mock/mock-errors.js create mode 100644 lib/mock/mock-interceptor.js create mode 100644 lib/mock/mock-pool.js create mode 100644 lib/mock/mock-symbols.js create mode 100644 lib/mock/mock-utils.js create mode 100644 lib/mock/pending-interceptors-formatter.js create mode 100644 lib/mock/pluralizer.js create mode 100644 lib/node/fixed-queue.js create mode 100644 lib/pool-base.js create mode 100644 lib/pool-stats.js create mode 100644 lib/pool.js create mode 100644 lib/proxy-agent.js create mode 100644 lib/timers.js create mode 100644 lib/websocket/connection.js create mode 100644 lib/websocket/constants.js create mode 100644 lib/websocket/events.js create mode 100644 lib/websocket/frame.js create mode 100644 lib/websocket/receiver.js create mode 100644 lib/websocket/symbols.js create mode 100644 lib/websocket/util.js create mode 100644 lib/websocket/websocket.js create mode 100644 llhttp/.dockerignore create mode 100644 llhttp/.eslintrc.js create mode 100644 llhttp/.github/workflows/aiohttp.yml create mode 100644 llhttp/.github/workflows/ci.yaml create mode 100644 llhttp/.gitignore create mode 100644 llhttp/.npmrc create mode 100644 llhttp/CMakeLists.txt create mode 100644 llhttp/CNAME create mode 100644 llhttp/CODE_OF_CONDUCT.md create mode 100644 llhttp/Dockerfile create mode 100644 llhttp/LICENSE-MIT create mode 100644 llhttp/Makefile create mode 100644 llhttp/README.md create mode 100644 llhttp/_config.yml create mode 100644 llhttp/bench/index.ts create mode 100644 llhttp/bin/build_wasm.ts create mode 100755 llhttp/bin/generate.ts create mode 100644 llhttp/docs/releasing.md create mode 100644 llhttp/examples/wasm.ts create mode 100644 llhttp/images/http-loose-none.png create mode 100644 llhttp/images/http-strict-none.png create mode 100644 llhttp/libllhttp.pc.in create mode 100644 llhttp/package-lock.json create mode 100644 llhttp/package.json create mode 100644 llhttp/src/common.gypi create mode 100644 llhttp/src/llhttp.gyp create mode 100644 llhttp/src/llhttp.ts create mode 100644 llhttp/src/llhttp/c-headers.ts create mode 100644 llhttp/src/llhttp/constants.ts create mode 100644 llhttp/src/llhttp/http.ts create mode 100644 llhttp/src/llhttp/url.ts create mode 100644 llhttp/src/llhttp/utils.ts create mode 100644 llhttp/src/native/api.c create mode 100644 llhttp/src/native/api.h create mode 100644 llhttp/src/native/http.c create mode 100644 llhttp/test/fixtures/extra.c create mode 100644 llhttp/test/fixtures/index.ts create mode 100644 llhttp/test/fuzzers/fuzz_parser.c create mode 100644 llhttp/test/md-test.ts create mode 100644 llhttp/test/request/connection.md create mode 100644 llhttp/test/request/content-length.md create mode 100644 llhttp/test/request/finish.md create mode 100644 llhttp/test/request/invalid.md create mode 100644 llhttp/test/request/lenient-headers.md create mode 100644 llhttp/test/request/lenient-version.md create mode 100644 llhttp/test/request/method.md create mode 100644 llhttp/test/request/pausing.md create mode 100644 llhttp/test/request/pipelining.md create mode 100644 llhttp/test/request/sample.md create mode 100644 llhttp/test/request/transfer-encoding.md create mode 100644 llhttp/test/request/uri.md create mode 100644 llhttp/test/response/connection.md create mode 100644 llhttp/test/response/content-length.md create mode 100644 llhttp/test/response/finish.md create mode 100644 llhttp/test/response/invalid.md create mode 100644 llhttp/test/response/lenient-version.md create mode 100644 llhttp/test/response/pausing.md create mode 100644 llhttp/test/response/pipelining.md create mode 100644 llhttp/test/response/sample.md create mode 100644 llhttp/test/response/transfer-encoding.md create mode 100644 llhttp/test/url.md create mode 100644 llhttp/tsconfig.json create mode 100644 llhttp/tslint.json create mode 100644 llparse-builder/.gitignore create mode 100644 llparse-builder/.travis.yml create mode 100644 llparse-builder/README.md create mode 100644 llparse-builder/package-lock.json create mode 100644 llparse-builder/package.json create mode 100644 llparse-builder/src/builder.ts create mode 100644 llparse-builder/src/code/and.ts create mode 100644 llparse-builder/src/code/base.ts create mode 100644 llparse-builder/src/code/creator.ts create mode 100644 llparse-builder/src/code/field-value.ts create mode 100644 llparse-builder/src/code/field.ts create mode 100644 llparse-builder/src/code/index.ts create mode 100644 llparse-builder/src/code/is-equal.ts create mode 100644 llparse-builder/src/code/load.ts create mode 100644 llparse-builder/src/code/match.ts create mode 100644 llparse-builder/src/code/mul-add.ts create mode 100644 llparse-builder/src/code/or.ts create mode 100644 llparse-builder/src/code/span.ts create mode 100644 llparse-builder/src/code/store.ts create mode 100644 llparse-builder/src/code/test.ts create mode 100644 llparse-builder/src/code/update.ts create mode 100644 llparse-builder/src/code/value.ts create mode 100644 llparse-builder/src/edge.ts create mode 100644 llparse-builder/src/loop-checker/index.ts create mode 100644 llparse-builder/src/loop-checker/lattice.ts create mode 100644 llparse-builder/src/node/base.ts create mode 100644 llparse-builder/src/node/consume.ts create mode 100644 llparse-builder/src/node/error.ts create mode 100644 llparse-builder/src/node/index.ts create mode 100644 llparse-builder/src/node/invoke.ts create mode 100644 llparse-builder/src/node/match.ts create mode 100644 llparse-builder/src/node/pause.ts create mode 100644 llparse-builder/src/node/span-end.ts create mode 100644 llparse-builder/src/node/span-start.ts create mode 100644 llparse-builder/src/property.ts create mode 100644 llparse-builder/src/reachability.ts create mode 100644 llparse-builder/src/span-allocator.ts create mode 100644 llparse-builder/src/span.ts create mode 100644 llparse-builder/src/transform/base.ts create mode 100644 llparse-builder/src/transform/creator.ts create mode 100644 llparse-builder/src/transform/index.ts create mode 100644 llparse-builder/src/transform/to-lower-unsafe.ts create mode 100644 llparse-builder/src/transform/to-lower.ts create mode 100644 llparse-builder/src/utils.ts create mode 100644 llparse-builder/test/builder-test.ts create mode 100644 llparse-builder/test/loop-checker-test.ts create mode 100644 llparse-builder/test/span-allocator-test.ts create mode 100644 llparse-builder/tsconfig.json create mode 100644 llparse-builder/tslint.json create mode 100644 llparse-frontend/.gitignore create mode 100644 llparse-frontend/.travis.yml create mode 100644 llparse-frontend/README.md create mode 100644 llparse-frontend/package-lock.json create mode 100644 llparse-frontend/package.json create mode 100644 llparse-frontend/src/code/and.ts create mode 100644 llparse-frontend/src/code/base.ts create mode 100644 llparse-frontend/src/code/external.ts create mode 100644 llparse-frontend/src/code/field-value.ts create mode 100644 llparse-frontend/src/code/field.ts create mode 100644 llparse-frontend/src/code/index.ts create mode 100644 llparse-frontend/src/code/is-equal.ts create mode 100644 llparse-frontend/src/code/load.ts create mode 100644 llparse-frontend/src/code/match.ts create mode 100644 llparse-frontend/src/code/mul-add.ts create mode 100644 llparse-frontend/src/code/or.ts create mode 100644 llparse-frontend/src/code/span.ts create mode 100644 llparse-frontend/src/code/store.ts create mode 100644 llparse-frontend/src/code/test.ts create mode 100644 llparse-frontend/src/code/update.ts create mode 100644 llparse-frontend/src/code/value.ts create mode 100644 llparse-frontend/src/container/index.ts create mode 100644 llparse-frontend/src/container/wrap.ts create mode 100644 llparse-frontend/src/enumerator.ts create mode 100644 llparse-frontend/src/frontend.ts create mode 100644 llparse-frontend/src/implementation/code.ts create mode 100644 llparse-frontend/src/implementation/full.ts create mode 100644 llparse-frontend/src/implementation/index.ts create mode 100644 llparse-frontend/src/implementation/node.ts create mode 100644 llparse-frontend/src/implementation/transform.ts create mode 100644 llparse-frontend/src/namespace/frontend.ts create mode 100644 llparse-frontend/src/node/base.ts create mode 100644 llparse-frontend/src/node/consume.ts create mode 100644 llparse-frontend/src/node/empty.ts create mode 100644 llparse-frontend/src/node/error.ts create mode 100644 llparse-frontend/src/node/index.ts create mode 100644 llparse-frontend/src/node/invoke.ts create mode 100644 llparse-frontend/src/node/match.ts create mode 100644 llparse-frontend/src/node/pause.ts create mode 100644 llparse-frontend/src/node/sequence.ts create mode 100644 llparse-frontend/src/node/single.ts create mode 100644 llparse-frontend/src/node/slot.ts create mode 100644 llparse-frontend/src/node/span-end.ts create mode 100644 llparse-frontend/src/node/span-start.ts create mode 100644 llparse-frontend/src/node/table-lookup.ts create mode 100644 llparse-frontend/src/peephole.ts create mode 100644 llparse-frontend/src/span-field.ts create mode 100644 llparse-frontend/src/transform/base.ts create mode 100644 llparse-frontend/src/transform/id.ts create mode 100644 llparse-frontend/src/transform/index.ts create mode 100644 llparse-frontend/src/transform/to-lower-unsafe.ts create mode 100644 llparse-frontend/src/transform/to-lower.ts create mode 100644 llparse-frontend/src/trie/empty.ts create mode 100644 llparse-frontend/src/trie/index.ts create mode 100644 llparse-frontend/src/trie/node.ts create mode 100644 llparse-frontend/src/trie/sequence.ts create mode 100644 llparse-frontend/src/trie/single.ts create mode 100644 llparse-frontend/src/utils/identifier.ts create mode 100644 llparse-frontend/src/utils/index.ts create mode 100644 llparse-frontend/src/wrap.ts create mode 100644 llparse-frontend/test/container-test.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/and.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/base.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/index.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/is-equal.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/load.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/match.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/mul-add.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/or.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/span.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/store.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/test.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/update.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/code/value.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/index.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/base.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/consume.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/empty.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/error.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/index.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/invoke.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/pause.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/sequence.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/single.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/span-end.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/span-start.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/node/table-lookup.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/transform/base.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/transform/id.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/transform/index.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/transform/to-lower-unsafe.ts create mode 100644 llparse-frontend/test/fixtures/a-implementation/transform/to-lower.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/and.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/base.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/index.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/is-equal.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/load.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/match.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/mul-add.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/or.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/span.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/store.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/test.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/update.ts create mode 100644 llparse-frontend/test/fixtures/implementation/code/value.ts create mode 100644 llparse-frontend/test/fixtures/implementation/index.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/base.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/consume.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/empty.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/error.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/index.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/invoke.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/pause.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/sequence.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/single.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/span-end.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/span-start.ts create mode 100644 llparse-frontend/test/fixtures/implementation/node/table-lookup.ts create mode 100644 llparse-frontend/test/fixtures/implementation/transform/base.ts create mode 100644 llparse-frontend/test/fixtures/implementation/transform/id.ts create mode 100644 llparse-frontend/test/fixtures/implementation/transform/index.ts create mode 100644 llparse-frontend/test/fixtures/implementation/transform/to-lower-unsafe.ts create mode 100644 llparse-frontend/test/fixtures/implementation/transform/to-lower.ts create mode 100644 llparse-frontend/test/frontend-test.ts create mode 100644 llparse-frontend/tsconfig.json create mode 100644 llparse-frontend/tslint.json create mode 100644 llparse/.gitignore create mode 100644 llparse/.travis.yml create mode 100644 llparse/CNAME create mode 100644 llparse/CODE_OF_CONDUCT.md create mode 100644 llparse/LICENSE-MIT create mode 100644 llparse/README.md create mode 100644 llparse/_config.yml create mode 100644 llparse/examples/http/.gitignore create mode 100644 llparse/examples/http/Makefile create mode 100644 llparse/examples/http/index.ts create mode 100644 llparse/examples/http/main.c create mode 100644 llparse/package-lock.json create mode 100644 llparse/package.json create mode 100644 llparse/src/api.ts create mode 100644 llparse/src/compiler/header-builder.ts create mode 100644 llparse/src/compiler/index.ts create mode 100644 llparse/src/implementation/c/code/and.ts create mode 100644 llparse/src/implementation/c/code/base.ts create mode 100644 llparse/src/implementation/c/code/external.ts create mode 100644 llparse/src/implementation/c/code/field.ts create mode 100644 llparse/src/implementation/c/code/index.ts create mode 100644 llparse/src/implementation/c/code/is-equal.ts create mode 100644 llparse/src/implementation/c/code/load.ts create mode 100644 llparse/src/implementation/c/code/mul-add.ts create mode 100644 llparse/src/implementation/c/code/or.ts create mode 100644 llparse/src/implementation/c/code/store.ts create mode 100644 llparse/src/implementation/c/code/test.ts create mode 100644 llparse/src/implementation/c/code/update.ts create mode 100644 llparse/src/implementation/c/compilation.ts create mode 100644 llparse/src/implementation/c/constants.ts create mode 100644 llparse/src/implementation/c/helpers/match-sequence.ts create mode 100644 llparse/src/implementation/c/index.ts create mode 100644 llparse/src/implementation/c/node/base.ts create mode 100644 llparse/src/implementation/c/node/consume.ts create mode 100644 llparse/src/implementation/c/node/empty.ts create mode 100644 llparse/src/implementation/c/node/error.ts create mode 100644 llparse/src/implementation/c/node/index.ts create mode 100644 llparse/src/implementation/c/node/invoke.ts create mode 100644 llparse/src/implementation/c/node/pause.ts create mode 100644 llparse/src/implementation/c/node/sequence.ts create mode 100644 llparse/src/implementation/c/node/single.ts create mode 100644 llparse/src/implementation/c/node/span-end.ts create mode 100644 llparse/src/implementation/c/node/span-start.ts create mode 100644 llparse/src/implementation/c/node/table-lookup.ts create mode 100644 llparse/src/implementation/c/transform/base.ts create mode 100644 llparse/src/implementation/c/transform/id.ts create mode 100644 llparse/src/implementation/c/transform/index.ts create mode 100644 llparse/src/implementation/c/transform/to-lower-unsafe.ts create mode 100644 llparse/src/implementation/c/transform/to-lower.ts create mode 100644 llparse/test/code-test.ts create mode 100644 llparse/test/compiler-test.ts create mode 100644 llparse/test/consume-test.ts create mode 100644 llparse/test/fixtures/extra.c create mode 100644 llparse/test/fixtures/index.ts create mode 100644 llparse/test/resumption-test.ts create mode 100644 llparse/test/span-test.ts create mode 100644 llparse/test/transform-test.ts create mode 100644 llparse/tsconfig.json create mode 100644 llparse/tslint.json create mode 100644 package.json create mode 100644 scripts/generate-pem.js create mode 100644 scripts/generate-undici-types-package-json.js create mode 100644 scripts/verifyVersion.js create mode 100644 test/abort-controller.js create mode 100644 test/abort-event-emitter.js create mode 100644 test/agent.js create mode 100644 test/async_hooks.js create mode 100644 test/autoselectfamily.js create mode 100644 test/balanced-pool.js create mode 100644 test/ca-fingerprint.js create mode 100644 test/client-abort.js create mode 100644 test/client-connect.js create mode 100644 test/client-dispatch.js create mode 100644 test/client-errors.js create mode 100644 test/client-head-reset-override.js create mode 100644 test/client-idempotent-body.js create mode 100644 test/client-keep-alive.js create mode 100644 test/client-node-max-header-size.js create mode 100644 test/client-pipeline.js create mode 100644 test/client-pipelining.js create mode 100644 test/client-post.js create mode 100644 test/client-reconnect.js create mode 100644 test/client-request.js create mode 100644 test/client-stream.js create mode 100644 test/client-timeout.js create mode 100644 test/client-unref.js create mode 100644 test/client-upgrade.js create mode 100644 test/client-write-max-listeners.js create mode 100644 test/client.js create mode 100644 test/close-and-destroy.js create mode 100644 test/connect-abort.js create mode 100644 test/connect-errconnect.js create mode 100644 test/connect-timeout.js create mode 100644 test/content-length.js create mode 100644 test/cookie/cookies.js create mode 100644 test/cookie/global-headers.js create mode 100644 test/diagnostics-channel/connect-error.js create mode 100644 test/diagnostics-channel/error.js create mode 100644 test/diagnostics-channel/get.js create mode 100644 test/diagnostics-channel/post-stream.js create mode 100644 test/diagnostics-channel/post.js create mode 100644 test/dispatcher.js create mode 100644 test/errors.js create mode 100644 test/esm-wrapper.js create mode 100644 test/fetch/407-statuscode-window-null.js create mode 100644 test/fetch/abort.js create mode 100644 test/fetch/abort2.js create mode 100644 test/fetch/about-uri.js create mode 100644 test/fetch/blob-uri.js create mode 100644 test/fetch/bundle.js create mode 100644 test/fetch/client-error-stack-trace.js create mode 100644 test/fetch/client-fetch.js create mode 100644 test/fetch/client-node-max-header-size.js create mode 100644 test/fetch/content-length.js create mode 100644 test/fetch/cookies.js create mode 100644 test/fetch/data-uri.js create mode 100644 test/fetch/encoding.js create mode 100644 test/fetch/fetch-leak.js create mode 100644 test/fetch/fetch-timeouts.js create mode 100644 test/fetch/file.js create mode 100644 test/fetch/formdata.js create mode 100644 test/fetch/general.js create mode 100644 test/fetch/headers.js create mode 100644 test/fetch/http2.js create mode 100644 test/fetch/integrity.js create mode 100644 test/fetch/issue-1447.js create mode 100644 test/fetch/issue-2009.js create mode 100644 test/fetch/issue-2021.js create mode 100644 test/fetch/issue-2171.js create mode 100644 test/fetch/issue-2242.js create mode 100644 test/fetch/issue-2318.js create mode 100644 test/fetch/issue-node-46525.js create mode 100644 test/fetch/iterators.js create mode 100644 test/fetch/jsdom-abortcontroller-1910-1464495619.js create mode 100644 test/fetch/redirect-cross-origin-header.js create mode 100644 test/fetch/redirect.js create mode 100644 test/fetch/relative-url.js create mode 100644 test/fetch/request.js create mode 100644 test/fetch/resource-timing.js create mode 100644 test/fetch/response-json.js create mode 100644 test/fetch/response.js create mode 100644 test/fetch/user-agent.js create mode 100644 test/fetch/util.js create mode 100644 test/fixed-queue.js create mode 100644 test/fixtures/ca.pem create mode 100644 test/fixtures/cert.pem create mode 100644 test/fixtures/client-ca-crt.pem create mode 100644 test/fixtures/client-crt-2048.pem create mode 100644 test/fixtures/client-crt.pem create mode 100644 test/fixtures/client-key-2048.pem create mode 100644 test/fixtures/client-key.pem create mode 100644 test/fixtures/key.pem create mode 100644 test/fuzzing/client/client-fuzz-body.js create mode 100644 test/fuzzing/client/client-fuzz-headers.js create mode 100644 test/fuzzing/client/client-fuzz-options.js create mode 100644 test/fuzzing/client/index.js create mode 100644 test/fuzzing/fuzz.js create mode 100644 test/fuzzing/server/index.js create mode 100644 test/fuzzing/server/server-fuzz-append-data.js create mode 100644 test/fuzzing/server/server-fuzz-split-data.js create mode 100644 test/gc.js create mode 100644 test/get-head-body.js create mode 100644 test/headers-as-array.js create mode 100644 test/headers-crlf.js create mode 100644 test/http-100.js create mode 100644 test/http-req-destroy.js create mode 100644 test/http2-alpn.js create mode 100644 test/http2.js create mode 100644 test/https.js create mode 100644 test/imports/undici-import.ts create mode 100644 test/inflight-and-close.js create mode 100644 test/invalid-headers.js create mode 100644 test/issue-1670.js create mode 100644 test/issue-1903.js create mode 100644 test/issue-2065.js create mode 100644 test/issue-2078.js create mode 100644 test/issue-2349.js create mode 100644 test/issue-803.js create mode 100644 test/issue-810.js create mode 100644 test/jest/instanceof-error.test.js create mode 100644 test/jest/interceptor.test.js create mode 100644 test/jest/issue-1757.test.js create mode 100644 test/jest/mock-agent.test.js create mode 100644 test/jest/mock-scope.test.js create mode 100644 test/jest/test.js create mode 100644 test/max-headers.js create mode 100644 test/max-response-size.js create mode 100644 test/mock-agent.js create mode 100644 test/mock-client.js create mode 100644 test/mock-errors.js create mode 100644 test/mock-interceptor-unused-assertions.js create mode 100644 test/mock-interceptor.js create mode 100644 test/mock-pool.js create mode 100644 test/mock-scope.js create mode 100644 test/mock-utils.js create mode 100644 test/no-strict-content-length.js create mode 100644 test/node-fetch/LICENSE create mode 100644 test/node-fetch/headers.js create mode 100644 test/node-fetch/main.js create mode 100644 test/node-fetch/mock.js create mode 100644 test/node-fetch/request.js create mode 100644 test/node-fetch/response.js create mode 100644 test/node-fetch/utils/chai-timeout.js create mode 100644 test/node-fetch/utils/dummy.txt create mode 100644 test/node-fetch/utils/read-stream.js create mode 100644 test/node-fetch/utils/server.js create mode 100644 test/parser-issues.js create mode 100644 test/pipeline-pipelining.js create mode 100644 test/pool.js create mode 100644 test/promises.js create mode 100644 test/proxy-agent.js create mode 100644 test/proxy.js create mode 100644 test/readable.test.js create mode 100644 test/redirect-pipeline.js create mode 100644 test/redirect-relative.js create mode 100644 test/redirect-request.js create mode 100644 test/redirect-stream.js create mode 100644 test/redirect-upgrade.js create mode 100644 test/request-crlf.js create mode 100644 test/request-timeout.js create mode 100644 test/request-timeout2.js create mode 100644 test/request.js create mode 100644 test/retry-handler.js create mode 100644 test/socket-back-pressure.js create mode 100644 test/socket-timeout.js create mode 100644 test/stream-compat.js create mode 100644 test/tls-client-cert.js create mode 100644 test/tls-session-reuse.js create mode 100644 test/tls.js create mode 100644 test/trailers.js create mode 100644 test/types/agent.test-d.ts create mode 100644 test/types/api.test-d.ts create mode 100644 test/types/balanced-pool.test-d.ts create mode 100644 test/types/cache-storage.test-d.ts create mode 100644 test/types/client.test-d.ts create mode 100644 test/types/connector.test-d.ts create mode 100644 test/types/diagnostics-channel.test-d.ts create mode 100644 test/types/dispatcher.events.test-d.ts create mode 100644 test/types/dispatcher.test-d.ts create mode 100644 test/types/errors.test-d.ts create mode 100644 test/types/fetch.test-d.ts create mode 100644 test/types/formdata.test-d.ts create mode 100644 test/types/global-dispatcher.test-d.ts create mode 100644 test/types/header.test-d.ts create mode 100644 test/types/index.test-d.ts create mode 100644 test/types/interceptor.test-d.ts create mode 100644 test/types/mock-agent.test-d.ts create mode 100644 test/types/mock-client.test-d.ts create mode 100644 test/types/mock-errors.test-d.ts create mode 100644 test/types/mock-interceptor.test-d.ts create mode 100644 test/types/mock-pool.test-d.ts create mode 100644 test/types/pool.test-d.ts create mode 100644 test/types/proxy-agent.test-d.ts create mode 100644 test/types/readable.test-d.ts create mode 100644 test/unix.js create mode 100644 test/util.js create mode 100644 test/utils/async-iterators.js create mode 100644 test/utils/esm-wrapper.mjs create mode 100644 test/utils/formdata.js create mode 100644 test/utils/redirecting-servers.js create mode 100644 test/utils/stream.js create mode 100644 test/validations.js create mode 100644 test/webidl/converters.js create mode 100644 test/webidl/helpers.js create mode 100644 test/webidl/util.js create mode 100644 test/websocket/close.js create mode 100644 test/websocket/constructor.js create mode 100644 test/websocket/custom-headers.js create mode 100644 test/websocket/diagnostics-channel.js create mode 100644 test/websocket/events.js create mode 100644 test/websocket/fragments.js create mode 100644 test/websocket/frame.js create mode 100644 test/websocket/opening-handshake.js create mode 100644 test/websocket/ping-pong.js create mode 100644 test/websocket/receive.js create mode 100644 test/websocket/send.js create mode 100644 test/websocket/websocketinit.js create mode 100644 test/wpt/runner/runner.mjs create mode 100644 test/wpt/runner/util.mjs create mode 100644 test/wpt/runner/worker.mjs create mode 100644 test/wpt/server/routes/network-partition-key.mjs create mode 100644 test/wpt/server/routes/redirect.mjs create mode 100644 test/wpt/server/server.mjs create mode 100644 test/wpt/server/websocket.mjs create mode 100644 test/wpt/start-FileAPI.mjs create mode 100644 test/wpt/start-cacheStorage.mjs create mode 100644 test/wpt/start-fetch.mjs create mode 100644 test/wpt/start-mimesniff.mjs create mode 100644 test/wpt/start-websockets.mjs create mode 100644 test/wpt/start-xhr.mjs create mode 100644 test/wpt/status/FileAPI.status.json create mode 100644 test/wpt/status/fetch.status.json create mode 100644 test/wpt/status/mimesniff.status.json create mode 100644 test/wpt/status/service-workers/cache-storage.status.json create mode 100644 test/wpt/status/websockets.status.json create mode 100644 test/wpt/status/xhr/formdata.status.json create mode 100644 test/wpt/tests/.azure-pipelines.yml create mode 100644 test/wpt/tests/.gitattributes create mode 100644 test/wpt/tests/.gitignore create mode 100644 test/wpt/tests/.mailmap create mode 100644 test/wpt/tests/.taskcluster.yml create mode 100644 test/wpt/tests/CODEOWNERS create mode 100644 test/wpt/tests/CODE_OF_CONDUCT.md create mode 100644 test/wpt/tests/CONTRIBUTING.md create mode 100644 test/wpt/tests/FileAPI/Blob-methods-from-detached-frame.html create mode 100644 test/wpt/tests/FileAPI/BlobURL/cross-partition.tentative.https.html create mode 100644 test/wpt/tests/FileAPI/BlobURL/support/file_test2.txt create mode 100644 test/wpt/tests/FileAPI/BlobURL/test2-manual.html create mode 100644 test/wpt/tests/FileAPI/FileReader/progress_event_bubbles_cancelable.html create mode 100644 test/wpt/tests/FileAPI/FileReader/support/file_test1.txt create mode 100644 test/wpt/tests/FileAPI/FileReader/test_errors-manual.html create mode 100644 test/wpt/tests/FileAPI/FileReader/test_notreadableerrors-manual.html create mode 100644 test/wpt/tests/FileAPI/FileReader/test_securityerrors-manual.html create mode 100644 test/wpt/tests/FileAPI/FileReader/workers.html create mode 100644 test/wpt/tests/FileAPI/FileReaderSync.worker.js create mode 100644 test/wpt/tests/FileAPI/META.yml create mode 100644 test/wpt/tests/FileAPI/blob/Blob-array-buffer.any.js create mode 100644 test/wpt/tests/FileAPI/blob/Blob-constructor-dom.window.js create mode 100644 test/wpt/tests/FileAPI/blob/Blob-constructor-endings.html create mode 100644 test/wpt/tests/FileAPI/blob/Blob-constructor.any.js create mode 100644 test/wpt/tests/FileAPI/blob/Blob-in-worker.worker.js create mode 100644 test/wpt/tests/FileAPI/blob/Blob-slice-overflow.any.js create mode 100644 test/wpt/tests/FileAPI/blob/Blob-slice.any.js create mode 100644 test/wpt/tests/FileAPI/blob/Blob-stream-byob-crash.html create mode 100644 test/wpt/tests/FileAPI/blob/Blob-stream-sync-xhr-crash.html create mode 100644 test/wpt/tests/FileAPI/blob/Blob-stream.any.js create mode 100644 test/wpt/tests/FileAPI/blob/Blob-text.any.js create mode 100644 test/wpt/tests/FileAPI/file/File-constructor-endings.html create mode 100644 test/wpt/tests/FileAPI/file/File-constructor.any.js create mode 100644 test/wpt/tests/FileAPI/file/Worker-read-file-constructor.worker.js create mode 100644 test/wpt/tests/FileAPI/file/resources/echo-content-escaped.py create mode 100644 test/wpt/tests/FileAPI/file/send-file-form-controls.html create mode 100644 test/wpt/tests/FileAPI/file/send-file-form-iso-2022-jp.html create mode 100644 test/wpt/tests/FileAPI/file/send-file-form-punctuation.html create mode 100644 test/wpt/tests/FileAPI/file/send-file-form-utf-8.html create mode 100644 test/wpt/tests/FileAPI/file/send-file-form-windows-1252.html create mode 100644 test/wpt/tests/FileAPI/file/send-file-form-x-user-defined.html create mode 100644 test/wpt/tests/FileAPI/file/send-file-form.html create mode 100644 test/wpt/tests/FileAPI/file/send-file-formdata-controls.any.js create mode 100644 test/wpt/tests/FileAPI/file/send-file-formdata-punctuation.any.js create mode 100644 test/wpt/tests/FileAPI/file/send-file-formdata-utf-8.any.js create mode 100644 test/wpt/tests/FileAPI/file/send-file-formdata.any.js create mode 100644 test/wpt/tests/FileAPI/fileReader.any.js create mode 100644 test/wpt/tests/FileAPI/filelist-section/filelist.html create mode 100644 test/wpt/tests/FileAPI/filelist-section/filelist_multiple_selected_files-manual.html create mode 100644 test/wpt/tests/FileAPI/filelist-section/filelist_selected_file-manual.html create mode 100644 test/wpt/tests/FileAPI/filelist-section/support/upload.txt create mode 100644 test/wpt/tests/FileAPI/filelist-section/support/upload.zip create mode 100644 test/wpt/tests/FileAPI/historical.https.html create mode 100644 test/wpt/tests/FileAPI/idlharness-manual.html create mode 100644 test/wpt/tests/FileAPI/idlharness.any.js create mode 100644 test/wpt/tests/FileAPI/idlharness.html create mode 100644 test/wpt/tests/FileAPI/idlharness.worker.js create mode 100644 test/wpt/tests/FileAPI/progress-manual.html create mode 100644 test/wpt/tests/FileAPI/reading-data-section/Determining-Encoding.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/FileReader-event-handler-attributes.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/FileReader-multiple-reads.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/filereader_abort.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/filereader_error.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/filereader_events.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/filereader_file-manual.html create mode 100644 test/wpt/tests/FileAPI/reading-data-section/filereader_file_img-manual.html create mode 100644 test/wpt/tests/FileAPI/reading-data-section/filereader_readAsArrayBuffer.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/filereader_readAsBinaryString.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/filereader_readAsDataURL.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/filereader_readAsText.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/filereader_readystate.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/filereader_result.any.js create mode 100644 test/wpt/tests/FileAPI/reading-data-section/support/blue-100x100.png create mode 100644 test/wpt/tests/FileAPI/support/Blob.js create mode 100644 test/wpt/tests/FileAPI/support/document-domain-setter.sub.html create mode 100644 test/wpt/tests/FileAPI/support/empty-document.html create mode 100644 test/wpt/tests/FileAPI/support/historical-serviceworker.js create mode 100644 test/wpt/tests/FileAPI/support/incumbent.sub.html create mode 100644 test/wpt/tests/FileAPI/support/send-file-form-helper.js create mode 100644 test/wpt/tests/FileAPI/support/send-file-formdata-helper.js create mode 100644 test/wpt/tests/FileAPI/support/upload.txt create mode 100644 test/wpt/tests/FileAPI/support/url-origin.html create mode 100644 test/wpt/tests/FileAPI/unicode.html create mode 100644 test/wpt/tests/FileAPI/url/cross-global-revoke.sub.html create mode 100644 test/wpt/tests/FileAPI/url/multi-global-origin-serialization.sub.html create mode 100644 test/wpt/tests/FileAPI/url/resources/create-helper.html create mode 100644 test/wpt/tests/FileAPI/url/resources/create-helper.js create mode 100644 test/wpt/tests/FileAPI/url/resources/fetch-tests.js create mode 100644 test/wpt/tests/FileAPI/url/resources/revoke-helper.html create mode 100644 test/wpt/tests/FileAPI/url/resources/revoke-helper.js create mode 100644 test/wpt/tests/FileAPI/url/sandboxed-iframe.html create mode 100644 test/wpt/tests/FileAPI/url/unicode-origin.sub.html create mode 100644 test/wpt/tests/FileAPI/url/url-charset.window.js create mode 100644 test/wpt/tests/FileAPI/url/url-format.any.js create mode 100644 test/wpt/tests/FileAPI/url/url-in-tags-revoke.window.js create mode 100644 test/wpt/tests/FileAPI/url/url-in-tags.window.js create mode 100644 test/wpt/tests/FileAPI/url/url-lifetime.html create mode 100644 test/wpt/tests/FileAPI/url/url-reload.window.js create mode 100644 test/wpt/tests/FileAPI/url/url-with-fetch.any.js create mode 100644 test/wpt/tests/FileAPI/url/url-with-xhr.any.js create mode 100644 test/wpt/tests/FileAPI/url/url_createobjecturl_file-manual.html create mode 100644 test/wpt/tests/FileAPI/url/url_createobjecturl_file_img-manual.html create mode 100644 test/wpt/tests/FileAPI/url/url_xmlhttprequest_img-ref.html create mode 100644 test/wpt/tests/FileAPI/url/url_xmlhttprequest_img.html create mode 100644 test/wpt/tests/LICENSE.md create mode 100644 test/wpt/tests/README.md create mode 100644 test/wpt/tests/common/CustomCorsResponse.py create mode 100644 test/wpt/tests/common/META.yml create mode 100644 test/wpt/tests/common/PrefixedLocalStorage.js create mode 100644 test/wpt/tests/common/PrefixedLocalStorage.js.headers create mode 100644 test/wpt/tests/common/PrefixedPostMessage.js create mode 100644 test/wpt/tests/common/PrefixedPostMessage.js.headers create mode 100644 test/wpt/tests/common/README.md create mode 100644 test/wpt/tests/common/__init__.py create mode 100644 test/wpt/tests/common/arrays.js create mode 100644 test/wpt/tests/common/blank-with-cors.html create mode 100644 test/wpt/tests/common/blank-with-cors.html.headers create mode 100644 test/wpt/tests/common/blank.html create mode 100644 test/wpt/tests/common/custom-cors-response.js create mode 100644 test/wpt/tests/common/dispatcher/README.md create mode 100644 test/wpt/tests/common/dispatcher/dispatcher.js create mode 100644 test/wpt/tests/common/dispatcher/dispatcher.py create mode 100644 test/wpt/tests/common/dispatcher/executor-service-worker.js create mode 100644 test/wpt/tests/common/dispatcher/executor-worker.js create mode 100644 test/wpt/tests/common/dispatcher/executor.html create mode 100644 test/wpt/tests/common/dispatcher/remote-executor.html create mode 100644 test/wpt/tests/common/domain-setter.sub.html create mode 100644 test/wpt/tests/common/dummy.xhtml create mode 100644 test/wpt/tests/common/dummy.xml create mode 100644 test/wpt/tests/common/echo.py create mode 100644 test/wpt/tests/common/gc.js create mode 100644 test/wpt/tests/common/get-host-info.sub.js create mode 100644 test/wpt/tests/common/get-host-info.sub.js.headers create mode 100644 test/wpt/tests/common/media.js create mode 100644 test/wpt/tests/common/media.js.headers create mode 100644 test/wpt/tests/common/object-association.js create mode 100644 test/wpt/tests/common/object-association.js.headers create mode 100644 test/wpt/tests/common/performance-timeline-utils.js create mode 100644 test/wpt/tests/common/performance-timeline-utils.js.headers create mode 100644 test/wpt/tests/common/proxy-all.sub.pac create mode 100644 test/wpt/tests/common/redirect-opt-in.py create mode 100644 test/wpt/tests/common/redirect.py create mode 100644 test/wpt/tests/common/refresh.py create mode 100644 test/wpt/tests/common/reftest-wait.js create mode 100644 test/wpt/tests/common/reftest-wait.js.headers create mode 100644 test/wpt/tests/common/rendering-utils.js create mode 100644 test/wpt/tests/common/sab.js create mode 100644 test/wpt/tests/common/security-features/README.md create mode 100644 test/wpt/tests/common/security-features/__init__.py create mode 100644 test/wpt/tests/common/security-features/resources/common.sub.js create mode 100644 test/wpt/tests/common/security-features/resources/common.sub.js.headers create mode 100644 test/wpt/tests/common/security-features/scope/__init__.py create mode 100644 test/wpt/tests/common/security-features/scope/document.py create mode 100644 test/wpt/tests/common/security-features/scope/template/document.html.template create mode 100644 test/wpt/tests/common/security-features/scope/template/worker.js.template create mode 100644 test/wpt/tests/common/security-features/scope/util.py create mode 100644 test/wpt/tests/common/security-features/scope/worker.py create mode 100644 test/wpt/tests/common/security-features/subresource/__init__.py create mode 100644 test/wpt/tests/common/security-features/subresource/audio.py create mode 100644 test/wpt/tests/common/security-features/subresource/document.py create mode 100644 test/wpt/tests/common/security-features/subresource/empty.py create mode 100644 test/wpt/tests/common/security-features/subresource/font.py create mode 100644 test/wpt/tests/common/security-features/subresource/image.py create mode 100644 test/wpt/tests/common/security-features/subresource/referrer.py create mode 100644 test/wpt/tests/common/security-features/subresource/script.py create mode 100644 test/wpt/tests/common/security-features/subresource/shared-worker.py create mode 100644 test/wpt/tests/common/security-features/subresource/static-import.py create mode 100644 test/wpt/tests/common/security-features/subresource/stylesheet.py create mode 100644 test/wpt/tests/common/security-features/subresource/subresource.py create mode 100644 test/wpt/tests/common/security-features/subresource/svg.py create mode 100644 test/wpt/tests/common/security-features/subresource/template/document.html.template create mode 100644 test/wpt/tests/common/security-features/subresource/template/font.css.template create mode 100644 test/wpt/tests/common/security-features/subresource/template/image.css.template create mode 100644 test/wpt/tests/common/security-features/subresource/template/script.js.template create mode 100644 test/wpt/tests/common/security-features/subresource/template/shared-worker.js.template create mode 100644 test/wpt/tests/common/security-features/subresource/template/static-import.js.template create mode 100644 test/wpt/tests/common/security-features/subresource/template/svg.css.template create mode 100644 test/wpt/tests/common/security-features/subresource/template/svg.embedded.template create mode 100644 test/wpt/tests/common/security-features/subresource/template/worker.js.template create mode 100644 test/wpt/tests/common/security-features/subresource/video.py create mode 100644 test/wpt/tests/common/security-features/subresource/worker.py create mode 100644 test/wpt/tests/common/security-features/subresource/xhr.py create mode 100644 test/wpt/tests/common/security-features/tools/format_spec_src_json.py create mode 100644 test/wpt/tests/common/security-features/tools/generate.py create mode 100644 test/wpt/tests/common/security-features/tools/spec.src.json create mode 100644 test/wpt/tests/common/security-features/tools/spec_validator.py create mode 100644 test/wpt/tests/common/security-features/tools/template/disclaimer.template create mode 100644 test/wpt/tests/common/security-features/tools/template/spec_json.js.template create mode 100644 test/wpt/tests/common/security-features/tools/template/test.debug.html.template create mode 100644 test/wpt/tests/common/security-features/tools/template/test.release.html.template create mode 100644 test/wpt/tests/common/security-features/tools/util.py create mode 100644 test/wpt/tests/common/security-features/types.md create mode 100644 test/wpt/tests/common/slow-redirect.py create mode 100644 test/wpt/tests/common/slow.py create mode 100644 test/wpt/tests/common/square.png create mode 100644 test/wpt/tests/common/stringifiers.js create mode 100644 test/wpt/tests/common/stringifiers.js.headers create mode 100644 test/wpt/tests/common/subset-tests-by-key.js create mode 100644 test/wpt/tests/common/subset-tests.js create mode 100644 test/wpt/tests/common/test-setting-immutable-prototype.js create mode 100644 test/wpt/tests/common/test-setting-immutable-prototype.js.headers create mode 100644 test/wpt/tests/common/text-plain.txt create mode 100644 test/wpt/tests/common/third_party/reftest-analyzer.xhtml create mode 100644 test/wpt/tests/common/utils.js create mode 100644 test/wpt/tests/common/utils.js.headers create mode 100644 test/wpt/tests/common/window-name-setter.html create mode 100644 test/wpt/tests/common/worklet-reftest.js create mode 100644 test/wpt/tests/common/worklet-reftest.js.headers create mode 100644 test/wpt/tests/fetch/META.yml create mode 100644 test/wpt/tests/fetch/README.md create mode 100644 test/wpt/tests/fetch/api/abort/cache.https.any.js create mode 100644 test/wpt/tests/fetch/api/abort/destroyed-context.html create mode 100644 test/wpt/tests/fetch/api/abort/general.any.js create mode 100644 test/wpt/tests/fetch/api/abort/keepalive.html create mode 100644 test/wpt/tests/fetch/api/abort/request.any.js create mode 100644 test/wpt/tests/fetch/api/abort/serviceworker-intercepted.https.html create mode 100644 test/wpt/tests/fetch/api/basic/accept-header.any.js create mode 100644 test/wpt/tests/fetch/api/basic/block-mime-as-script.html create mode 100644 test/wpt/tests/fetch/api/basic/conditional-get.any.js create mode 100644 test/wpt/tests/fetch/api/basic/error-after-response.any.js create mode 100644 test/wpt/tests/fetch/api/basic/header-value-combining.any.js create mode 100644 test/wpt/tests/fetch/api/basic/header-value-null-byte.any.js create mode 100644 test/wpt/tests/fetch/api/basic/historical.any.js create mode 100644 test/wpt/tests/fetch/api/basic/http-response-code.any.js create mode 100644 test/wpt/tests/fetch/api/basic/integrity.sub.any.js create mode 100644 test/wpt/tests/fetch/api/basic/keepalive.any.js create mode 100644 test/wpt/tests/fetch/api/basic/mediasource.window.js create mode 100644 test/wpt/tests/fetch/api/basic/mode-no-cors.sub.any.js create mode 100644 test/wpt/tests/fetch/api/basic/mode-same-origin.any.js create mode 100644 test/wpt/tests/fetch/api/basic/referrer.any.js create mode 100644 test/wpt/tests/fetch/api/basic/request-forbidden-headers.any.js create mode 100644 test/wpt/tests/fetch/api/basic/request-head.any.js create mode 100644 test/wpt/tests/fetch/api/basic/request-headers-case.any.js create mode 100644 test/wpt/tests/fetch/api/basic/request-headers-nonascii.any.js create mode 100644 test/wpt/tests/fetch/api/basic/request-headers.any.js create mode 100644 test/wpt/tests/fetch/api/basic/request-referrer-redirected-worker.html create mode 100644 test/wpt/tests/fetch/api/basic/request-referrer.any.js create mode 100644 test/wpt/tests/fetch/api/basic/request-upload.any.js create mode 100644 test/wpt/tests/fetch/api/basic/request-upload.h2.any.js create mode 100644 test/wpt/tests/fetch/api/basic/response-null-body.any.js create mode 100644 test/wpt/tests/fetch/api/basic/response-url.sub.any.js create mode 100644 test/wpt/tests/fetch/api/basic/scheme-about.any.js create mode 100644 test/wpt/tests/fetch/api/basic/scheme-blob.sub.any.js create mode 100644 test/wpt/tests/fetch/api/basic/scheme-data.any.js create mode 100644 test/wpt/tests/fetch/api/basic/scheme-others.sub.any.js create mode 100644 test/wpt/tests/fetch/api/basic/status.h2.any.js create mode 100644 test/wpt/tests/fetch/api/basic/stream-response.any.js create mode 100644 test/wpt/tests/fetch/api/basic/stream-safe-creation.any.js create mode 100644 test/wpt/tests/fetch/api/basic/text-utf8.any.js create mode 100644 test/wpt/tests/fetch/api/body/cloned-any.js create mode 100644 test/wpt/tests/fetch/api/body/formdata.any.js create mode 100644 test/wpt/tests/fetch/api/body/mime-type.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-basic.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-cookies-redirect.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-cookies.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-expose-star.sub.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-filtering.sub.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-keepalive.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-multiple-origins.sub.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-no-preflight.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-origin.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-preflight-cache.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-preflight-not-cors-safelisted.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-preflight-redirect.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-preflight-referrer.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-preflight-response-validation.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-preflight-star.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-preflight-status.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-preflight.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-redirect-credentials.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-redirect-preflight.any.js create mode 100644 test/wpt/tests/fetch/api/cors/cors-redirect.any.js create mode 100644 test/wpt/tests/fetch/api/cors/data-url-iframe.html create mode 100644 test/wpt/tests/fetch/api/cors/data-url-shared-worker.html create mode 100644 test/wpt/tests/fetch/api/cors/data-url-worker.html create mode 100644 test/wpt/tests/fetch/api/cors/resources/corspreflight.js create mode 100644 test/wpt/tests/fetch/api/cors/resources/not-cors-safelisted.json create mode 100644 test/wpt/tests/fetch/api/cors/sandboxed-iframe.html create mode 100644 test/wpt/tests/fetch/api/crashtests/body-window-destroy.html create mode 100644 test/wpt/tests/fetch/api/crashtests/request.html create mode 100644 test/wpt/tests/fetch/api/credentials/authentication-basic.any.js create mode 100644 test/wpt/tests/fetch/api/credentials/authentication-redirection.any.js create mode 100644 test/wpt/tests/fetch/api/credentials/cookies.any.js create mode 100644 test/wpt/tests/fetch/api/headers/header-setcookie.any.js create mode 100644 test/wpt/tests/fetch/api/headers/header-values-normalize.any.js create mode 100644 test/wpt/tests/fetch/api/headers/header-values.any.js create mode 100644 test/wpt/tests/fetch/api/headers/headers-basic.any.js create mode 100644 test/wpt/tests/fetch/api/headers/headers-casing.any.js create mode 100644 test/wpt/tests/fetch/api/headers/headers-combine.any.js create mode 100644 test/wpt/tests/fetch/api/headers/headers-errors.any.js create mode 100644 test/wpt/tests/fetch/api/headers/headers-no-cors.any.js create mode 100644 test/wpt/tests/fetch/api/headers/headers-normalize.any.js create mode 100644 test/wpt/tests/fetch/api/headers/headers-record.any.js create mode 100644 test/wpt/tests/fetch/api/headers/headers-structure.any.js create mode 100644 test/wpt/tests/fetch/api/idlharness.any.js create mode 100644 test/wpt/tests/fetch/api/policies/csp-blocked-worker.html create mode 100644 test/wpt/tests/fetch/api/policies/csp-blocked.html create mode 100644 test/wpt/tests/fetch/api/policies/csp-blocked.html.headers create mode 100644 test/wpt/tests/fetch/api/policies/csp-blocked.js create mode 100644 test/wpt/tests/fetch/api/policies/csp-blocked.js.headers create mode 100644 test/wpt/tests/fetch/api/policies/nested-policy.js create mode 100644 test/wpt/tests/fetch/api/policies/nested-policy.js.headers create mode 100644 test/wpt/tests/fetch/api/policies/referrer-no-referrer-service-worker.https.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-no-referrer-worker.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-no-referrer.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-no-referrer.html.headers create mode 100644 test/wpt/tests/fetch/api/policies/referrer-no-referrer.js create mode 100644 test/wpt/tests/fetch/api/policies/referrer-no-referrer.js.headers create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin-service-worker.https.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin-service-worker.https.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin-worker.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.html.headers create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.js create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.js.headers create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin-worker.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin.html.headers create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin.js create mode 100644 test/wpt/tests/fetch/api/policies/referrer-origin.js.headers create mode 100644 test/wpt/tests/fetch/api/policies/referrer-unsafe-url-service-worker.https.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-unsafe-url-worker.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-unsafe-url.html create mode 100644 test/wpt/tests/fetch/api/policies/referrer-unsafe-url.html.headers create mode 100644 test/wpt/tests/fetch/api/policies/referrer-unsafe-url.js create mode 100644 test/wpt/tests/fetch/api/policies/referrer-unsafe-url.js.headers create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-back-to-original-origin.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-count.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-empty-location.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-keepalive.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-location-escape.tentative.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-location.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-method.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-mode.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-origin.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-referrer-override.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-referrer.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-schemes.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-to-dataurl.any.js create mode 100644 test/wpt/tests/fetch/api/redirect/redirect-upload.h2.any.js create mode 100644 test/wpt/tests/fetch/api/request/destination/fetch-destination-frame.https.html create mode 100644 test/wpt/tests/fetch/api/request/destination/fetch-destination-iframe.https.html create mode 100644 test/wpt/tests/fetch/api/request/destination/fetch-destination-no-load-event.https.html create mode 100644 test/wpt/tests/fetch/api/request/destination/fetch-destination-prefetch.https.html create mode 100644 test/wpt/tests/fetch/api/request/destination/fetch-destination-worker.https.html create mode 100644 test/wpt/tests/fetch/api/request/destination/fetch-destination.https.html create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/dummy create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/dummy.es create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/dummy.es.headers create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/dummy.html create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/dummy.png create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/dummy.ttf create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/dummy_audio.mp3 create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/dummy_audio.oga create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/dummy_video.mp4 create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/dummy_video.ogv create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/dummy_video.webm create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/empty.https.html create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-frame.js create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-iframe.js create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-no-load-event.js create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker.js create mode 100644 test/wpt/tests/fetch/api/request/destination/resources/importer.js create mode 100644 test/wpt/tests/fetch/api/request/forbidden-method.any.js create mode 100644 test/wpt/tests/fetch/api/request/multi-globals/construct-in-detached-frame.window.js create mode 100644 test/wpt/tests/fetch/api/request/multi-globals/current/current.html create mode 100644 test/wpt/tests/fetch/api/request/multi-globals/incumbent/incumbent.html create mode 100644 test/wpt/tests/fetch/api/request/multi-globals/url-parsing.html create mode 100644 test/wpt/tests/fetch/api/request/request-bad-port.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-cache-default-conditional.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-cache-default.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-cache-force-cache.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-cache-no-cache.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-cache-no-store.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-cache-only-if-cached.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-cache-reload.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-cache.js create mode 100644 test/wpt/tests/fetch/api/request/request-clone.sub.html create mode 100644 test/wpt/tests/fetch/api/request/request-consume-empty.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-consume.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-disturbed.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-error.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-error.js create mode 100644 test/wpt/tests/fetch/api/request/request-headers.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-init-001.sub.html create mode 100644 test/wpt/tests/fetch/api/request/request-init-002.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-init-003.sub.html create mode 100644 test/wpt/tests/fetch/api/request/request-init-contenttype.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-init-priority.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-init-stream.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-keepalive-quota.html create mode 100644 test/wpt/tests/fetch/api/request/request-keepalive.any.js create mode 100644 test/wpt/tests/fetch/api/request/request-reset-attributes.https.html create mode 100644 test/wpt/tests/fetch/api/request/request-structure.any.js create mode 100644 test/wpt/tests/fetch/api/request/resources/cache.py create mode 100644 test/wpt/tests/fetch/api/request/resources/hello.txt create mode 100644 test/wpt/tests/fetch/api/request/resources/request-reset-attributes-worker.js create mode 100644 test/wpt/tests/fetch/api/request/url-encoding.html create mode 100644 test/wpt/tests/fetch/api/resources/authentication.py create mode 100644 test/wpt/tests/fetch/api/resources/bad-chunk-encoding.py create mode 100644 test/wpt/tests/fetch/api/resources/basic.html create mode 100644 test/wpt/tests/fetch/api/resources/cache.py create mode 100644 test/wpt/tests/fetch/api/resources/clean-stash.py create mode 100644 test/wpt/tests/fetch/api/resources/cors-top.txt create mode 100644 test/wpt/tests/fetch/api/resources/cors-top.txt.headers create mode 100644 test/wpt/tests/fetch/api/resources/data.json create mode 100644 test/wpt/tests/fetch/api/resources/dump-authorization-header.py create mode 100644 test/wpt/tests/fetch/api/resources/echo-content.h2.py create mode 100644 test/wpt/tests/fetch/api/resources/echo-content.py create mode 100644 test/wpt/tests/fetch/api/resources/empty.txt create mode 100644 test/wpt/tests/fetch/api/resources/infinite-slow-response.py create mode 100644 test/wpt/tests/fetch/api/resources/inspect-headers.py create mode 100644 test/wpt/tests/fetch/api/resources/keepalive-helper.js create mode 100644 test/wpt/tests/fetch/api/resources/keepalive-iframe.html create mode 100644 test/wpt/tests/fetch/api/resources/keepalive-redirect-iframe.html create mode 100644 test/wpt/tests/fetch/api/resources/keepalive-redirect-window.html create mode 100644 test/wpt/tests/fetch/api/resources/method.py create mode 100644 test/wpt/tests/fetch/api/resources/preflight.py create mode 100644 test/wpt/tests/fetch/api/resources/redirect-empty-location.py create mode 100644 test/wpt/tests/fetch/api/resources/redirect.h2.py create mode 100644 test/wpt/tests/fetch/api/resources/redirect.py create mode 100644 test/wpt/tests/fetch/api/resources/sandboxed-iframe.html create mode 100644 test/wpt/tests/fetch/api/resources/script-with-header.py create mode 100644 test/wpt/tests/fetch/api/resources/stash-put.py create mode 100644 test/wpt/tests/fetch/api/resources/stash-take.py create mode 100644 test/wpt/tests/fetch/api/resources/status.py create mode 100644 test/wpt/tests/fetch/api/resources/sw-intercept-abort.js create mode 100644 test/wpt/tests/fetch/api/resources/sw-intercept.js create mode 100644 test/wpt/tests/fetch/api/resources/top.txt create mode 100644 test/wpt/tests/fetch/api/resources/trickle.py create mode 100644 test/wpt/tests/fetch/api/resources/utils.js create mode 100644 test/wpt/tests/fetch/api/response/json.any.js create mode 100644 test/wpt/tests/fetch/api/response/many-empty-chunks-crash.html create mode 100644 test/wpt/tests/fetch/api/response/multi-globals/current/current.html create mode 100644 test/wpt/tests/fetch/api/response/multi-globals/incumbent/incumbent.html create mode 100644 test/wpt/tests/fetch/api/response/multi-globals/relevant/relevant.html create mode 100644 test/wpt/tests/fetch/api/response/multi-globals/url-parsing.html create mode 100644 test/wpt/tests/fetch/api/response/response-body-read-task-handling.html create mode 100644 test/wpt/tests/fetch/api/response/response-cancel-stream.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-clone-iframe.window.js create mode 100644 test/wpt/tests/fetch/api/response/response-clone.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-consume-empty.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-consume-stream.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-consume.html create mode 100644 test/wpt/tests/fetch/api/response/response-error-from-stream.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-error.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-from-stream.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-init-001.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-init-002.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-init-contenttype.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-static-error.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-static-json.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-static-redirect.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-stream-bad-chunk.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-stream-disturbed-1.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-stream-disturbed-2.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-stream-disturbed-3.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-stream-disturbed-4.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-stream-disturbed-5.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-stream-disturbed-6.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-stream-disturbed-by-pipe.any.js create mode 100644 test/wpt/tests/fetch/api/response/response-stream-disturbed-util.js create mode 100644 test/wpt/tests/fetch/api/response/response-stream-with-broken-then.any.js create mode 100644 test/wpt/tests/fetch/connection-pool/network-partition-key.html create mode 100644 test/wpt/tests/fetch/connection-pool/resources/network-partition-about-blank-checker.html create mode 100644 test/wpt/tests/fetch/connection-pool/resources/network-partition-checker.html create mode 100644 test/wpt/tests/fetch/connection-pool/resources/network-partition-iframe-checker.html create mode 100644 test/wpt/tests/fetch/connection-pool/resources/network-partition-key.js create mode 100644 test/wpt/tests/fetch/connection-pool/resources/network-partition-key.py create mode 100644 test/wpt/tests/fetch/connection-pool/resources/network-partition-worker-checker.html create mode 100644 test/wpt/tests/fetch/connection-pool/resources/network-partition-worker.js create mode 100644 test/wpt/tests/fetch/content-encoding/bad-gzip-body.any.js create mode 100644 test/wpt/tests/fetch/content-encoding/gzip-body.any.js create mode 100644 test/wpt/tests/fetch/content-encoding/resources/bad-gzip-body.py create mode 100644 test/wpt/tests/fetch/content-encoding/resources/foo.octetstream.gz create mode 100644 test/wpt/tests/fetch/content-encoding/resources/foo.octetstream.gz.headers create mode 100644 test/wpt/tests/fetch/content-encoding/resources/foo.text.gz create mode 100644 test/wpt/tests/fetch/content-encoding/resources/foo.text.gz.headers create mode 100644 test/wpt/tests/fetch/content-length/api-and-duplicate-headers.any.js create mode 100644 test/wpt/tests/fetch/content-length/content-length.html create mode 100644 test/wpt/tests/fetch/content-length/content-length.html.headers create mode 100644 test/wpt/tests/fetch/content-length/parsing.window.js create mode 100644 test/wpt/tests/fetch/content-length/resources/content-length.py create mode 100644 test/wpt/tests/fetch/content-length/resources/content-lengths.json create mode 100644 test/wpt/tests/fetch/content-length/resources/identical-duplicates.asis create mode 100644 test/wpt/tests/fetch/content-length/too-long.window.js create mode 100644 test/wpt/tests/fetch/content-type/README.md create mode 100644 test/wpt/tests/fetch/content-type/multipart-malformed.any.js create mode 100644 test/wpt/tests/fetch/content-type/multipart.window.js create mode 100644 test/wpt/tests/fetch/content-type/resources/content-type.py create mode 100644 test/wpt/tests/fetch/content-type/resources/content-types.json create mode 100644 test/wpt/tests/fetch/content-type/resources/script-content-types.json create mode 100644 test/wpt/tests/fetch/content-type/response.window.js create mode 100644 test/wpt/tests/fetch/content-type/script.window.js create mode 100644 test/wpt/tests/fetch/corb/README.md create mode 100644 test/wpt/tests/fetch/corb/img-html-correctly-labeled.sub-ref.html create mode 100644 test/wpt/tests/fetch/corb/img-html-correctly-labeled.sub.html create mode 100644 test/wpt/tests/fetch/corb/img-mime-types-coverage.tentative.sub.html create mode 100644 test/wpt/tests/fetch/corb/img-png-mislabeled-as-html-nosniff.tentative.sub-ref.html create mode 100644 test/wpt/tests/fetch/corb/img-png-mislabeled-as-html-nosniff.tentative.sub.html create mode 100644 test/wpt/tests/fetch/corb/img-png-mislabeled-as-html.sub-ref.html create mode 100644 test/wpt/tests/fetch/corb/img-png-mislabeled-as-html.sub.html create mode 100644 test/wpt/tests/fetch/corb/img-svg-doctype-html-mimetype-empty.sub.html create mode 100644 test/wpt/tests/fetch/corb/img-svg-doctype-html-mimetype-svg.sub.html create mode 100644 test/wpt/tests/fetch/corb/img-svg-invalid.sub-ref.html create mode 100644 test/wpt/tests/fetch/corb/img-svg-labeled-as-dash.sub.html create mode 100644 test/wpt/tests/fetch/corb/img-svg-labeled-as-svg-xml.sub.html create mode 100644 test/wpt/tests/fetch/corb/img-svg-xml-decl.sub.html create mode 100644 test/wpt/tests/fetch/corb/img-svg.sub-ref.html create mode 100644 test/wpt/tests/fetch/corb/preload-image-png-mislabeled-as-html-nosniff.tentative.sub.html create mode 100644 test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html-nosniff.css create mode 100644 test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html-nosniff.css.headers create mode 100644 test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html.css create mode 100644 test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html.css.headers create mode 100644 test/wpt/tests/fetch/corb/resources/css-with-json-parser-breaker.css create mode 100644 test/wpt/tests/fetch/corb/resources/empty-labeled-as-png.png create mode 100644 test/wpt/tests/fetch/corb/resources/empty-labeled-as-png.png.headers create mode 100644 test/wpt/tests/fetch/corb/resources/html-correctly-labeled.html create mode 100644 test/wpt/tests/fetch/corb/resources/html-correctly-labeled.html.headers create mode 100644 test/wpt/tests/fetch/corb/resources/html-js-polyglot.js create mode 100644 test/wpt/tests/fetch/corb/resources/html-js-polyglot.js.headers create mode 100644 test/wpt/tests/fetch/corb/resources/html-js-polyglot2.js create mode 100644 test/wpt/tests/fetch/corb/resources/html-js-polyglot2.js.headers create mode 100644 test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html-nosniff.js create mode 100644 test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html-nosniff.js.headers create mode 100644 test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html.js create mode 100644 test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html.js.headers create mode 100644 test/wpt/tests/fetch/corb/resources/png-correctly-labeled.png create mode 100644 test/wpt/tests/fetch/corb/resources/png-correctly-labeled.png.headers create mode 100644 test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html-nosniff.png create mode 100644 test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html-nosniff.png.headers create mode 100644 test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html.png create mode 100644 test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html.png.headers create mode 100644 test/wpt/tests/fetch/corb/resources/response_block_probe.js create mode 100644 test/wpt/tests/fetch/corb/resources/response_block_probe.js.headers create mode 100644 test/wpt/tests/fetch/corb/resources/sniffable-resource.py create mode 100644 test/wpt/tests/fetch/corb/resources/subframe-that-posts-html-containing-blob-url-to-parent.html create mode 100644 test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-empty.svg create mode 100644 test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-empty.svg.headers create mode 100644 test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-svg.svg create mode 100644 test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-svg.svg.headers create mode 100644 test/wpt/tests/fetch/corb/resources/svg-labeled-as-dash.svg create mode 100644 test/wpt/tests/fetch/corb/resources/svg-labeled-as-dash.svg.headers create mode 100644 test/wpt/tests/fetch/corb/resources/svg-labeled-as-svg-xml.svg create mode 100644 test/wpt/tests/fetch/corb/resources/svg-labeled-as-svg-xml.svg.headers create mode 100644 test/wpt/tests/fetch/corb/resources/svg-xml-decl.svg create mode 100644 test/wpt/tests/fetch/corb/resources/svg.svg create mode 100644 test/wpt/tests/fetch/corb/resources/svg.svg.headers create mode 100644 test/wpt/tests/fetch/corb/response_block.tentative.https.html create mode 100644 test/wpt/tests/fetch/corb/script-html-correctly-labeled.tentative.sub.html create mode 100644 test/wpt/tests/fetch/corb/script-html-js-polyglot.sub.html create mode 100644 test/wpt/tests/fetch/corb/script-html-via-cross-origin-blob-url.sub.html create mode 100644 test/wpt/tests/fetch/corb/script-js-mislabeled-as-html-nosniff.sub.html create mode 100644 test/wpt/tests/fetch/corb/script-js-mislabeled-as-html.sub.html create mode 100644 test/wpt/tests/fetch/corb/script-resource-with-json-parser-breaker.tentative.sub.html create mode 100644 test/wpt/tests/fetch/corb/script-resource-with-nonsniffable-types.tentative.sub.html create mode 100644 test/wpt/tests/fetch/corb/style-css-mislabeled-as-html-nosniff.sub.html create mode 100644 test/wpt/tests/fetch/corb/style-css-mislabeled-as-html.sub.html create mode 100644 test/wpt/tests/fetch/corb/style-css-with-json-parser-breaker.sub.html create mode 100644 test/wpt/tests/fetch/corb/style-html-correctly-labeled.sub.html create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/fetch-in-iframe.html create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/fetch.any.js create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/fetch.https.any.js create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/iframe-loads.html create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/image-loads.html create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/resources/green.png create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/resources/hello.py create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/resources/iframe.py create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/resources/iframeFetch.html create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/resources/image.py create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/resources/redirect.py create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/resources/script.py create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/scheme-restriction.any.js create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/scheme-restriction.https.window.js create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/script-loads.html create mode 100644 test/wpt/tests/fetch/cross-origin-resource-policy/syntax.any.js create mode 100644 test/wpt/tests/fetch/data-urls/README.md create mode 100644 test/wpt/tests/fetch/data-urls/base64.any.js create mode 100644 test/wpt/tests/fetch/data-urls/navigate.window.js create mode 100644 test/wpt/tests/fetch/data-urls/processing.any.js create mode 100644 test/wpt/tests/fetch/data-urls/resources/base64.json create mode 100644 test/wpt/tests/fetch/data-urls/resources/data-urls.json create mode 100644 test/wpt/tests/fetch/fetch-later/META.yml create mode 100644 test/wpt/tests/fetch/fetch-later/README.md create mode 100644 test/wpt/tests/fetch/fetch-later/basic.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/fetch-later/non-secure.window.js create mode 100644 test/wpt/tests/fetch/fetch-later/sendondiscard.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/h1-parsing/README.md create mode 100644 test/wpt/tests/fetch/h1-parsing/lone-cr.window.js create mode 100644 test/wpt/tests/fetch/h1-parsing/resources-with-0x00-in-header.window.js create mode 100644 test/wpt/tests/fetch/h1-parsing/resources/README.md create mode 100644 test/wpt/tests/fetch/h1-parsing/resources/blue-with-0x00-in-a-header.asis create mode 100644 test/wpt/tests/fetch/h1-parsing/resources/document-with-0x00-in-header.py create mode 100644 test/wpt/tests/fetch/h1-parsing/resources/message.py create mode 100644 test/wpt/tests/fetch/h1-parsing/resources/script-with-0x00-in-header.py create mode 100644 test/wpt/tests/fetch/h1-parsing/resources/status-code.py create mode 100644 test/wpt/tests/fetch/h1-parsing/status-code.window.js create mode 100644 test/wpt/tests/fetch/http-cache/304-update.any.js create mode 100644 test/wpt/tests/fetch/http-cache/README.md create mode 100644 test/wpt/tests/fetch/http-cache/basic-auth-cache-test-ref.html create mode 100644 test/wpt/tests/fetch/http-cache/basic-auth-cache-test.html create mode 100644 test/wpt/tests/fetch/http-cache/cache-mode.any.js create mode 100644 test/wpt/tests/fetch/http-cache/cc-request.any.js create mode 100644 test/wpt/tests/fetch/http-cache/credentials.tentative.any.js create mode 100644 test/wpt/tests/fetch/http-cache/freshness.any.js create mode 100644 test/wpt/tests/fetch/http-cache/heuristic.any.js create mode 100644 test/wpt/tests/fetch/http-cache/http-cache.js create mode 100644 test/wpt/tests/fetch/http-cache/invalidate.any.js create mode 100644 test/wpt/tests/fetch/http-cache/partial.any.js create mode 100644 test/wpt/tests/fetch/http-cache/post-patch.any.js create mode 100644 test/wpt/tests/fetch/http-cache/resources/http-cache.py create mode 100644 test/wpt/tests/fetch/http-cache/resources/securedimage.py create mode 100644 test/wpt/tests/fetch/http-cache/resources/split-cache-popup-with-iframe.html create mode 100644 test/wpt/tests/fetch/http-cache/resources/split-cache-popup.html create mode 100644 test/wpt/tests/fetch/http-cache/split-cache.html create mode 100644 test/wpt/tests/fetch/http-cache/status.any.js create mode 100644 test/wpt/tests/fetch/http-cache/vary.any.js create mode 100644 test/wpt/tests/fetch/images/canvas-remote-read-remote-image-redirect.html create mode 100644 test/wpt/tests/fetch/metadata/META.yml create mode 100644 test/wpt/tests/fetch/metadata/README.md create mode 100644 test/wpt/tests/fetch/metadata/audio-worklet.https.html create mode 100644 test/wpt/tests/fetch/metadata/embed.https.sub.tentative.html create mode 100644 test/wpt/tests/fetch/metadata/fetch-preflight.https.sub.any.js create mode 100644 test/wpt/tests/fetch/metadata/fetch.https.sub.any.js create mode 100644 test/wpt/tests/fetch/metadata/generated/appcache-manifest.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/audioworklet.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/css-font-face.https.sub.tentative.html create mode 100644 test/wpt/tests/fetch/metadata/generated/css-font-face.sub.tentative.html create mode 100644 test/wpt/tests/fetch/metadata/generated/css-images.https.sub.tentative.html create mode 100644 test/wpt/tests/fetch/metadata/generated/css-images.sub.tentative.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-a.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-a.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-area.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-area.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-audio.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-audio.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-embed.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-embed.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-frame.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-frame.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-iframe.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-iframe.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-img-environment-change.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-img-environment-change.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-img.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-img.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-input-image.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-input-image.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-link-icon.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-link-icon.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-link-prefetch.https.optional.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-link-prefetch.optional.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-meta-refresh.https.optional.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-meta-refresh.optional.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-picture.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-picture.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-script.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-script.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-video-poster.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-video-poster.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-video.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/element-video.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/fetch-via-serviceworker.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/fetch.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/fetch.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/form-submission.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/form-submission.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/header-link.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/header-link.https.sub.tentative.html create mode 100644 test/wpt/tests/fetch/metadata/generated/header-link.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/header-refresh.https.optional.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/header-refresh.optional.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/script-module-import-dynamic.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/script-module-import-dynamic.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/script-module-import-static.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/script-module-import-static.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/serviceworker.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/svg-image.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/svg-image.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/window-history.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/window-history.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/window-location.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/window-location.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/worker-dedicated-constructor.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/worker-dedicated-constructor.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/worker-dedicated-importscripts.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/generated/worker-dedicated-importscripts.sub.html create mode 100644 test/wpt/tests/fetch/metadata/navigation.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/object.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/paint-worklet.https.html create mode 100644 test/wpt/tests/fetch/metadata/portal.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/preload.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/redirect/multiple-redirect-https-downgrade-upgrade.sub.html create mode 100644 test/wpt/tests/fetch/metadata/redirect/redirect-http-upgrade.sub.html create mode 100644 test/wpt/tests/fetch/metadata/redirect/redirect-https-downgrade.sub.html create mode 100644 test/wpt/tests/fetch/metadata/report.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/report.https.sub.html.sub.headers create mode 100644 test/wpt/tests/fetch/metadata/resources/appcache-iframe.sub.html create mode 100644 test/wpt/tests/fetch/metadata/resources/dedicatedWorker.js create mode 100644 test/wpt/tests/fetch/metadata/resources/echo-as-json.py create mode 100644 test/wpt/tests/fetch/metadata/resources/echo-as-script.py create mode 100644 test/wpt/tests/fetch/metadata/resources/es-module.sub.js create mode 100644 test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker--fallback--sw.js create mode 100644 test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker--respondWith--sw.js create mode 100644 test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker-frame.html create mode 100644 test/wpt/tests/fetch/metadata/resources/header-link.py create mode 100644 test/wpt/tests/fetch/metadata/resources/helper.js create mode 100644 test/wpt/tests/fetch/metadata/resources/helper.sub.js create mode 100644 test/wpt/tests/fetch/metadata/resources/message-opener.html create mode 100644 test/wpt/tests/fetch/metadata/resources/post-to-owner.py create mode 100644 test/wpt/tests/fetch/metadata/resources/record-header.py create mode 100644 test/wpt/tests/fetch/metadata/resources/record-headers.py create mode 100644 test/wpt/tests/fetch/metadata/resources/redirectTestHelper.sub.js create mode 100644 test/wpt/tests/fetch/metadata/resources/serviceworker-accessors-frame.html create mode 100644 test/wpt/tests/fetch/metadata/resources/serviceworker-accessors.sw.js create mode 100644 test/wpt/tests/fetch/metadata/resources/sharedWorker.js create mode 100644 test/wpt/tests/fetch/metadata/resources/unload-with-beacon.html create mode 100644 test/wpt/tests/fetch/metadata/resources/xslt-test.sub.xml create mode 100644 test/wpt/tests/fetch/metadata/serviceworker-accessors.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/sharedworker.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/style.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/README.md create mode 100644 test/wpt/tests/fetch/metadata/tools/fetch-metadata.conf.yml create mode 100644 test/wpt/tests/fetch/metadata/tools/generate.py create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/appcache-manifest.sub.https.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/audioworklet.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/css-font-face.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/css-images.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-a.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-area.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-audio.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-embed.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-frame.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-iframe.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-img-environment-change.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-img.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-input-image.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-link-icon.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-link-prefetch.optional.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-meta-refresh.optional.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-picture.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-script.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-video-poster.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/element-video.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/fetch-via-serviceworker.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/fetch.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/form-submission.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/header-link.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/header-refresh.optional.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/script-module-import-dynamic.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/script-module-import-static.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/serviceworker.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/svg-image.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/window-history.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/window-location.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/worker-dedicated-constructor.sub.html create mode 100644 test/wpt/tests/fetch/metadata/tools/templates/worker-dedicated-importscripts.sub.html create mode 100644 test/wpt/tests/fetch/metadata/track.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/trailing-dot.https.sub.any.js create mode 100644 test/wpt/tests/fetch/metadata/unload.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/window-open.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/worker.https.sub.html create mode 100644 test/wpt/tests/fetch/metadata/xslt.https.sub.html create mode 100644 test/wpt/tests/fetch/nosniff/image.html create mode 100644 test/wpt/tests/fetch/nosniff/importscripts.html create mode 100644 test/wpt/tests/fetch/nosniff/importscripts.js create mode 100644 test/wpt/tests/fetch/nosniff/parsing-nosniff.window.js create mode 100644 test/wpt/tests/fetch/nosniff/resources/css.py create mode 100644 test/wpt/tests/fetch/nosniff/resources/image.py create mode 100644 test/wpt/tests/fetch/nosniff/resources/js.py create mode 100644 test/wpt/tests/fetch/nosniff/resources/nosniff.py create mode 100644 test/wpt/tests/fetch/nosniff/resources/worker.py create mode 100644 test/wpt/tests/fetch/nosniff/resources/x-content-type-options.json create mode 100644 test/wpt/tests/fetch/nosniff/script.html create mode 100644 test/wpt/tests/fetch/nosniff/stylesheet.html create mode 100644 test/wpt/tests/fetch/nosniff/worker.html create mode 100644 test/wpt/tests/fetch/orb/resources/data.json create mode 100644 test/wpt/tests/fetch/orb/resources/data_non_ascii.json create mode 100644 test/wpt/tests/fetch/orb/resources/empty.json create mode 100644 test/wpt/tests/fetch/orb/resources/font.ttf create mode 100644 test/wpt/tests/fetch/orb/resources/image.png create mode 100644 test/wpt/tests/fetch/orb/resources/js-unlabeled-utf16-without-bom.json create mode 100644 test/wpt/tests/fetch/orb/resources/js-unlabeled.js create mode 100644 test/wpt/tests/fetch/orb/resources/png-mislabeled-as-html.png create mode 100644 test/wpt/tests/fetch/orb/resources/png-mislabeled-as-html.png.headers create mode 100644 test/wpt/tests/fetch/orb/resources/png-unlabeled.png create mode 100644 test/wpt/tests/fetch/orb/resources/script-asm-js-invalid.js create mode 100644 test/wpt/tests/fetch/orb/resources/script-asm-js-valid.js create mode 100644 test/wpt/tests/fetch/orb/resources/script-iso-8559-1.js create mode 100644 test/wpt/tests/fetch/orb/resources/script-utf16-bom.js create mode 100644 test/wpt/tests/fetch/orb/resources/script-utf16-without-bom.js create mode 100644 test/wpt/tests/fetch/orb/resources/script.js create mode 100644 test/wpt/tests/fetch/orb/resources/sound.mp3 create mode 100644 test/wpt/tests/fetch/orb/resources/text.txt create mode 100644 test/wpt/tests/fetch/orb/resources/utils.js create mode 100644 test/wpt/tests/fetch/orb/tentative/compressed-image-sniffing.sub.html create mode 100644 test/wpt/tests/fetch/orb/tentative/content-range.sub.any.js create mode 100644 test/wpt/tests/fetch/orb/tentative/img-mime-types-coverage.tentative.sub.html create mode 100644 test/wpt/tests/fetch/orb/tentative/img-png-mislabeled-as-html.sub-ref.html create mode 100644 test/wpt/tests/fetch/orb/tentative/img-png-mislabeled-as-html.sub.html create mode 100644 test/wpt/tests/fetch/orb/tentative/img-png-unlabeled.sub-ref.html create mode 100644 test/wpt/tests/fetch/orb/tentative/img-png-unlabeled.sub.html create mode 100644 test/wpt/tests/fetch/orb/tentative/known-mime-type.sub.any.js create mode 100644 test/wpt/tests/fetch/orb/tentative/nosniff.sub.any.js create mode 100644 test/wpt/tests/fetch/orb/tentative/script-js-unlabeled-gziped.sub.html create mode 100644 test/wpt/tests/fetch/orb/tentative/script-unlabeled.sub.html create mode 100644 test/wpt/tests/fetch/orb/tentative/script-utf16-without-bom-hint-charset.sub.html create mode 100644 test/wpt/tests/fetch/orb/tentative/status.sub.any.js create mode 100644 test/wpt/tests/fetch/orb/tentative/status.sub.html create mode 100644 test/wpt/tests/fetch/orb/tentative/unknown-mime-type.sub.any.js create mode 100644 test/wpt/tests/fetch/origin/assorted.window.js create mode 100644 test/wpt/tests/fetch/origin/resources/redirect-and-stash.py create mode 100644 test/wpt/tests/fetch/origin/resources/referrer-policy.py create mode 100644 test/wpt/tests/fetch/private-network-access/META.yml create mode 100644 test/wpt/tests/fetch/private-network-access/README.md create mode 100644 test/wpt/tests/fetch/private-network-access/fenced-frame-no-preflight-required.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/fenced-frame-subresource-fetch.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/fenced-frame.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/fetch-from-treat-as-public.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/fetch.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/fetch.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/iframe.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/iframe.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/mixed-content-fetch.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/nested-worker.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/nested-worker.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/preflight-cache.https.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/redirect.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/resources/executor.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/fenced-frame-fetcher.https.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/fenced-frame-fetcher.https.html.headers create mode 100644 test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access-target.https.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access.https.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access.https.html.headers create mode 100644 test/wpt/tests/fetch/private-network-access/resources/fetcher.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/fetcher.js create mode 100644 test/wpt/tests/fetch/private-network-access/resources/iframed.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/iframer.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/preflight.py create mode 100644 test/wpt/tests/fetch/private-network-access/resources/service-worker-bridge.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/service-worker.js create mode 100644 test/wpt/tests/fetch/private-network-access/resources/shared-fetcher.js create mode 100644 test/wpt/tests/fetch/private-network-access/resources/shared-worker-blob-fetcher.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/shared-worker-fetcher.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/socket-opener.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/support.sub.js create mode 100644 test/wpt/tests/fetch/private-network-access/resources/worker-blob-fetcher.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/worker-fetcher.html create mode 100644 test/wpt/tests/fetch/private-network-access/resources/worker-fetcher.js create mode 100644 test/wpt/tests/fetch/private-network-access/resources/xhr-sender.html create mode 100644 test/wpt/tests/fetch/private-network-access/service-worker-background-fetch.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/service-worker-fetch.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/service-worker-update.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/service-worker.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/shared-worker-blob-fetch.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/shared-worker-blob-fetch.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/shared-worker-fetch.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/shared-worker-fetch.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/shared-worker.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/shared-worker.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/websocket.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/websocket.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/worker-blob-fetch.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/worker-fetch.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/worker-fetch.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/worker.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/worker.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/xhr-from-treat-as-public.tentative.https.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/xhr.https.tentative.window.js create mode 100644 test/wpt/tests/fetch/private-network-access/xhr.tentative.window.js create mode 100644 test/wpt/tests/fetch/range/blob.any.js create mode 100644 test/wpt/tests/fetch/range/data.any.js create mode 100644 test/wpt/tests/fetch/range/general.any.js create mode 100644 test/wpt/tests/fetch/range/general.window.js create mode 100644 test/wpt/tests/fetch/range/non-matching-range-response.html create mode 100644 test/wpt/tests/fetch/range/resources/basic.html create mode 100644 test/wpt/tests/fetch/range/resources/long-wav.py create mode 100644 test/wpt/tests/fetch/range/resources/partial-script.py create mode 100644 test/wpt/tests/fetch/range/resources/partial-text.py create mode 100644 test/wpt/tests/fetch/range/resources/range-sw.js create mode 100644 test/wpt/tests/fetch/range/resources/stash-take.py create mode 100644 test/wpt/tests/fetch/range/resources/utils.js create mode 100644 test/wpt/tests/fetch/range/resources/video-with-range.py create mode 100644 test/wpt/tests/fetch/range/sw.https.window.js create mode 100644 test/wpt/tests/fetch/redirect-navigate/302-found-post-handler.py create mode 100644 test/wpt/tests/fetch/redirect-navigate/302-found-post.html create mode 100644 test/wpt/tests/fetch/redirect-navigate/preserve-fragment.html create mode 100644 test/wpt/tests/fetch/redirect-navigate/resources/destination.html create mode 100644 test/wpt/tests/fetch/redirects/data.window.js create mode 100644 test/wpt/tests/fetch/redirects/subresource-fragments.html create mode 100644 test/wpt/tests/fetch/security/1xx-response.any.js create mode 100644 test/wpt/tests/fetch/security/dangling-markup-mitigation-data-url.tentative.sub.html create mode 100644 test/wpt/tests/fetch/security/dangling-markup-mitigation.tentative.html create mode 100644 test/wpt/tests/fetch/security/embedded-credentials.tentative.sub.html create mode 100644 test/wpt/tests/fetch/security/redirect-to-url-with-credentials.https.html create mode 100644 test/wpt/tests/fetch/security/support/embedded-credential-window.sub.html create mode 100644 test/wpt/tests/fetch/stale-while-revalidate/fetch-sw.https.html create mode 100644 test/wpt/tests/fetch/stale-while-revalidate/fetch.any.js create mode 100644 test/wpt/tests/fetch/stale-while-revalidate/resources/stale-css.py create mode 100644 test/wpt/tests/fetch/stale-while-revalidate/resources/stale-image.py create mode 100644 test/wpt/tests/fetch/stale-while-revalidate/resources/stale-script.py create mode 100644 test/wpt/tests/fetch/stale-while-revalidate/revalidate-not-blocked-by-csp.html create mode 100644 test/wpt/tests/fetch/stale-while-revalidate/stale-css.html create mode 100644 test/wpt/tests/fetch/stale-while-revalidate/stale-image.html create mode 100644 test/wpt/tests/fetch/stale-while-revalidate/stale-script.html create mode 100644 test/wpt/tests/fetch/stale-while-revalidate/sw-intercept.js create mode 100644 test/wpt/tests/interfaces/ANGLE_instanced_arrays.idl create mode 100644 test/wpt/tests/interfaces/CSP.idl create mode 100644 test/wpt/tests/interfaces/DOM-Parsing.idl create mode 100644 test/wpt/tests/interfaces/EXT_blend_minmax.idl create mode 100644 test/wpt/tests/interfaces/EXT_color_buffer_float.idl create mode 100644 test/wpt/tests/interfaces/EXT_color_buffer_half_float.idl create mode 100644 test/wpt/tests/interfaces/EXT_disjoint_timer_query.idl create mode 100644 test/wpt/tests/interfaces/EXT_disjoint_timer_query_webgl2.idl create mode 100644 test/wpt/tests/interfaces/EXT_float_blend.idl create mode 100644 test/wpt/tests/interfaces/EXT_frag_depth.idl create mode 100644 test/wpt/tests/interfaces/EXT_sRGB.idl create mode 100644 test/wpt/tests/interfaces/EXT_shader_texture_lod.idl create mode 100644 test/wpt/tests/interfaces/EXT_texture_compression_bptc.idl create mode 100644 test/wpt/tests/interfaces/EXT_texture_compression_rgtc.idl create mode 100644 test/wpt/tests/interfaces/EXT_texture_filter_anisotropic.idl create mode 100644 test/wpt/tests/interfaces/EXT_texture_norm16.idl create mode 100644 test/wpt/tests/interfaces/FedCM.idl create mode 100644 test/wpt/tests/interfaces/FileAPI.idl create mode 100644 test/wpt/tests/interfaces/IndexedDB.idl create mode 100644 test/wpt/tests/interfaces/KHR_parallel_shader_compile.idl create mode 100644 test/wpt/tests/interfaces/META.yml create mode 100644 test/wpt/tests/interfaces/OES_draw_buffers_indexed.idl create mode 100644 test/wpt/tests/interfaces/OES_element_index_uint.idl create mode 100644 test/wpt/tests/interfaces/OES_fbo_render_mipmap.idl create mode 100644 test/wpt/tests/interfaces/OES_standard_derivatives.idl create mode 100644 test/wpt/tests/interfaces/OES_texture_float.idl create mode 100644 test/wpt/tests/interfaces/OES_texture_float_linear.idl create mode 100644 test/wpt/tests/interfaces/OES_texture_half_float.idl create mode 100644 test/wpt/tests/interfaces/OES_texture_half_float_linear.idl create mode 100644 test/wpt/tests/interfaces/OES_vertex_array_object.idl create mode 100644 test/wpt/tests/interfaces/OVR_multiview2.idl create mode 100644 test/wpt/tests/interfaces/README.md create mode 100644 test/wpt/tests/interfaces/SVG.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_blend_equation_advanced_coherent.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_clip_cull_distance.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_color_buffer_float.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_compressed_texture_astc.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_compressed_texture_etc.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_compressed_texture_etc1.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_compressed_texture_pvrtc.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_compressed_texture_s3tc.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_compressed_texture_s3tc_srgb.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_debug_renderer_info.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_debug_shaders.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_depth_texture.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_draw_buffers.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_draw_instanced_base_vertex_base_instance.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_lose_context.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_multi_draw.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_multi_draw_instanced_base_vertex_base_instance.idl create mode 100644 test/wpt/tests/interfaces/WEBGL_provoking_vertex.idl create mode 100644 test/wpt/tests/interfaces/WebCryptoAPI.idl create mode 100644 test/wpt/tests/interfaces/accelerometer.idl create mode 100644 test/wpt/tests/interfaces/ambient-light.idl create mode 100644 test/wpt/tests/interfaces/anchors.idl create mode 100644 test/wpt/tests/interfaces/attribution-reporting-api.idl create mode 100644 test/wpt/tests/interfaces/audio-output.idl create mode 100644 test/wpt/tests/interfaces/autoplay-detection.idl create mode 100644 test/wpt/tests/interfaces/background-fetch.idl create mode 100644 test/wpt/tests/interfaces/background-sync.idl create mode 100644 test/wpt/tests/interfaces/badging.idl create mode 100644 test/wpt/tests/interfaces/battery-status.idl create mode 100644 test/wpt/tests/interfaces/beacon.idl create mode 100644 test/wpt/tests/interfaces/capture-handle-identity.idl create mode 100644 test/wpt/tests/interfaces/captured-mouse-events.tentative.idl create mode 100644 test/wpt/tests/interfaces/clipboard-apis.idl create mode 100644 test/wpt/tests/interfaces/close-watcher.idl create mode 100644 test/wpt/tests/interfaces/compat.idl create mode 100644 test/wpt/tests/interfaces/compression.idl create mode 100644 test/wpt/tests/interfaces/compute-pressure.idl create mode 100644 test/wpt/tests/interfaces/console.idl create mode 100644 test/wpt/tests/interfaces/contact-picker.idl create mode 100644 test/wpt/tests/interfaces/content-index.idl create mode 100644 test/wpt/tests/interfaces/cookie-store.idl create mode 100644 test/wpt/tests/interfaces/credential-management.idl create mode 100644 test/wpt/tests/interfaces/csp-embedded-enforcement.idl create mode 100644 test/wpt/tests/interfaces/csp-next.idl create mode 100644 test/wpt/tests/interfaces/css-anchor-position.idl create mode 100644 test/wpt/tests/interfaces/css-animation-worklet.idl create mode 100644 test/wpt/tests/interfaces/css-animations-2.idl create mode 100644 test/wpt/tests/interfaces/css-animations.idl create mode 100644 test/wpt/tests/interfaces/css-cascade-6.idl create mode 100644 test/wpt/tests/interfaces/css-cascade.idl create mode 100644 test/wpt/tests/interfaces/css-color-5.idl create mode 100644 test/wpt/tests/interfaces/css-conditional.idl create mode 100644 test/wpt/tests/interfaces/css-contain-3.idl create mode 100644 test/wpt/tests/interfaces/css-contain.idl create mode 100644 test/wpt/tests/interfaces/css-counter-styles.idl create mode 100644 test/wpt/tests/interfaces/css-font-loading.idl create mode 100644 test/wpt/tests/interfaces/css-fonts.idl create mode 100644 test/wpt/tests/interfaces/css-highlight-api.idl create mode 100644 test/wpt/tests/interfaces/css-images-4.idl create mode 100644 test/wpt/tests/interfaces/css-layout-api.idl create mode 100644 test/wpt/tests/interfaces/css-masking.idl create mode 100644 test/wpt/tests/interfaces/css-nav.idl create mode 100644 test/wpt/tests/interfaces/css-nesting.idl create mode 100644 test/wpt/tests/interfaces/css-paint-api.idl create mode 100644 test/wpt/tests/interfaces/css-parser-api.idl create mode 100644 test/wpt/tests/interfaces/css-properties-values-api.idl create mode 100644 test/wpt/tests/interfaces/css-pseudo.idl create mode 100644 test/wpt/tests/interfaces/css-regions.idl create mode 100644 test/wpt/tests/interfaces/css-shadow-parts.idl create mode 100644 test/wpt/tests/interfaces/css-toggle.tentative.idl create mode 100644 test/wpt/tests/interfaces/css-transitions-2.idl create mode 100644 test/wpt/tests/interfaces/css-transitions.idl create mode 100644 test/wpt/tests/interfaces/css-typed-om.idl create mode 100644 test/wpt/tests/interfaces/css-view-transitions.idl create mode 100644 test/wpt/tests/interfaces/cssom-view.idl create mode 100644 test/wpt/tests/interfaces/cssom.idl create mode 100644 test/wpt/tests/interfaces/custom-state-pseudo-class.idl create mode 100644 test/wpt/tests/interfaces/datacue.idl create mode 100644 test/wpt/tests/interfaces/deprecation-reporting.idl create mode 100644 test/wpt/tests/interfaces/device-memory.idl create mode 100644 test/wpt/tests/interfaces/device-posture.idl create mode 100644 test/wpt/tests/interfaces/digital-goods.idl create mode 100644 test/wpt/tests/interfaces/document-picture-in-picture.idl create mode 100644 test/wpt/tests/interfaces/dom.idl create mode 100644 test/wpt/tests/interfaces/edit-context.idl create mode 100644 test/wpt/tests/interfaces/element-timing.idl create mode 100644 test/wpt/tests/interfaces/encoding.idl create mode 100644 test/wpt/tests/interfaces/encrypted-media.idl create mode 100644 test/wpt/tests/interfaces/entries-api.idl create mode 100644 test/wpt/tests/interfaces/event-timing.idl create mode 100644 test/wpt/tests/interfaces/eyedropper-api.idl create mode 100644 test/wpt/tests/interfaces/fenced-frame.idl create mode 100644 test/wpt/tests/interfaces/fetch.idl create mode 100644 test/wpt/tests/interfaces/fido.idl create mode 100644 test/wpt/tests/interfaces/file-system-access.idl create mode 100644 test/wpt/tests/interfaces/filter-effects.idl create mode 100644 test/wpt/tests/interfaces/font-metrics-api.idl create mode 100644 test/wpt/tests/interfaces/fs.idl create mode 100644 test/wpt/tests/interfaces/fullscreen.idl create mode 100644 test/wpt/tests/interfaces/gamepad-extensions.idl create mode 100644 test/wpt/tests/interfaces/gamepad.idl create mode 100644 test/wpt/tests/interfaces/generic-sensor.idl create mode 100644 test/wpt/tests/interfaces/geolocation-sensor.idl create mode 100644 test/wpt/tests/interfaces/geolocation.idl create mode 100644 test/wpt/tests/interfaces/geometry.idl create mode 100644 test/wpt/tests/interfaces/get-installed-related-apps.idl create mode 100644 test/wpt/tests/interfaces/gpc-spec.idl create mode 100644 test/wpt/tests/interfaces/gyroscope.idl create mode 100644 test/wpt/tests/interfaces/hr-time.idl create mode 100644 test/wpt/tests/interfaces/html-media-capture.idl create mode 100644 test/wpt/tests/interfaces/html.idl create mode 100644 test/wpt/tests/interfaces/idle-detection.idl create mode 100644 test/wpt/tests/interfaces/image-capture.idl create mode 100644 test/wpt/tests/interfaces/image-resource.idl create mode 100644 test/wpt/tests/interfaces/ink-enhancement.idl create mode 100644 test/wpt/tests/interfaces/input-device-capabilities.idl create mode 100644 test/wpt/tests/interfaces/input-events.idl create mode 100644 test/wpt/tests/interfaces/intersection-observer.idl create mode 100644 test/wpt/tests/interfaces/intervention-reporting.idl create mode 100644 test/wpt/tests/interfaces/is-input-pending.idl create mode 100644 test/wpt/tests/interfaces/js-self-profiling.idl create mode 100644 test/wpt/tests/interfaces/keyboard-lock.idl create mode 100644 test/wpt/tests/interfaces/keyboard-map.idl create mode 100644 test/wpt/tests/interfaces/largest-contentful-paint.idl create mode 100644 test/wpt/tests/interfaces/layout-instability.idl create mode 100644 test/wpt/tests/interfaces/local-font-access.idl create mode 100644 test/wpt/tests/interfaces/longtasks.idl create mode 100644 test/wpt/tests/interfaces/magnetometer.idl create mode 100644 test/wpt/tests/interfaces/manifest-incubations.idl create mode 100644 test/wpt/tests/interfaces/mathml-core.idl create mode 100644 test/wpt/tests/interfaces/media-capabilities.idl create mode 100644 test/wpt/tests/interfaces/media-playback-quality.idl create mode 100644 test/wpt/tests/interfaces/media-source.idl create mode 100644 test/wpt/tests/interfaces/mediacapture-automation.idl create mode 100644 test/wpt/tests/interfaces/mediacapture-fromelement.idl create mode 100644 test/wpt/tests/interfaces/mediacapture-handle-actions.idl create mode 100644 test/wpt/tests/interfaces/mediacapture-region.idl create mode 100644 test/wpt/tests/interfaces/mediacapture-streams.idl create mode 100644 test/wpt/tests/interfaces/mediacapture-transform.idl create mode 100644 test/wpt/tests/interfaces/mediacapture-viewport.idl create mode 100644 test/wpt/tests/interfaces/mediasession.idl create mode 100644 test/wpt/tests/interfaces/mediastream-recording.idl create mode 100644 test/wpt/tests/interfaces/model-element.idl create mode 100644 test/wpt/tests/interfaces/mst-content-hint.idl create mode 100644 test/wpt/tests/interfaces/navigation-timing.idl create mode 100644 test/wpt/tests/interfaces/netinfo.idl create mode 100644 test/wpt/tests/interfaces/notifications.idl create mode 100644 test/wpt/tests/interfaces/orientation-event.idl create mode 100644 test/wpt/tests/interfaces/orientation-sensor.idl create mode 100644 test/wpt/tests/interfaces/page-lifecycle.idl create mode 100644 test/wpt/tests/interfaces/paint-timing.idl create mode 100644 test/wpt/tests/interfaces/parakeet.tentative.idl create mode 100644 test/wpt/tests/interfaces/payment-handler.idl create mode 100644 test/wpt/tests/interfaces/payment-request.idl create mode 100644 test/wpt/tests/interfaces/performance-measure-memory.idl create mode 100644 test/wpt/tests/interfaces/performance-timeline.idl create mode 100644 test/wpt/tests/interfaces/periodic-background-sync.idl create mode 100644 test/wpt/tests/interfaces/permissions-policy.idl create mode 100644 test/wpt/tests/interfaces/permissions-request.idl create mode 100644 test/wpt/tests/interfaces/permissions-revoke.idl create mode 100644 test/wpt/tests/interfaces/permissions.idl create mode 100644 test/wpt/tests/interfaces/picture-in-picture.idl create mode 100644 test/wpt/tests/interfaces/pointerevents.idl create mode 100644 test/wpt/tests/interfaces/pointerlock.idl create mode 100644 test/wpt/tests/interfaces/portals.idl create mode 100644 test/wpt/tests/interfaces/prefer-current-tab.idl create mode 100644 test/wpt/tests/interfaces/prerendering-revamped.idl create mode 100644 test/wpt/tests/interfaces/presentation-api.idl create mode 100644 test/wpt/tests/interfaces/private-click-measurement.idl create mode 100644 test/wpt/tests/interfaces/proximity.idl create mode 100644 test/wpt/tests/interfaces/push-api.idl create mode 100644 test/wpt/tests/interfaces/raw-camera-access.idl create mode 100644 test/wpt/tests/interfaces/real-world-meshing.idl create mode 100644 test/wpt/tests/interfaces/referrer-policy.idl create mode 100644 test/wpt/tests/interfaces/remote-playback.idl create mode 100644 test/wpt/tests/interfaces/reporting.idl create mode 100644 test/wpt/tests/interfaces/requestStorageAccessFor.idl create mode 100644 test/wpt/tests/interfaces/requestidlecallback.idl create mode 100644 test/wpt/tests/interfaces/resize-observer.idl create mode 100644 test/wpt/tests/interfaces/resource-timing.idl create mode 100644 test/wpt/tests/interfaces/sanitizer-api.idl create mode 100644 test/wpt/tests/interfaces/sanitizer-api.tentative.idl create mode 100644 test/wpt/tests/interfaces/savedata.idl create mode 100644 test/wpt/tests/interfaces/scheduling-apis.idl create mode 100644 test/wpt/tests/interfaces/screen-capture.idl create mode 100644 test/wpt/tests/interfaces/screen-orientation.idl create mode 100644 test/wpt/tests/interfaces/screen-wake-lock.idl create mode 100644 test/wpt/tests/interfaces/scroll-animations.idl create mode 100644 test/wpt/tests/interfaces/scroll-to-text-fragment.idl create mode 100644 test/wpt/tests/interfaces/secure-payment-confirmation.idl create mode 100644 test/wpt/tests/interfaces/selection-api.idl create mode 100644 test/wpt/tests/interfaces/serial.idl create mode 100644 test/wpt/tests/interfaces/server-timing.idl create mode 100644 test/wpt/tests/interfaces/service-workers.idl create mode 100644 test/wpt/tests/interfaces/shape-detection-api.idl create mode 100644 test/wpt/tests/interfaces/shared-storage.idl create mode 100644 test/wpt/tests/interfaces/speech-api.idl create mode 100644 test/wpt/tests/interfaces/storage-access.idl create mode 100644 test/wpt/tests/interfaces/storage-buckets.idl create mode 100644 test/wpt/tests/interfaces/storage-buckets.tentative.idl create mode 100644 test/wpt/tests/interfaces/storage.idl create mode 100644 test/wpt/tests/interfaces/streams.idl create mode 100644 test/wpt/tests/interfaces/sub-apps.tentative.idl create mode 100644 test/wpt/tests/interfaces/svg-animations.idl create mode 100644 test/wpt/tests/interfaces/testutils.idl create mode 100644 test/wpt/tests/interfaces/text-detection-api.idl create mode 100644 test/wpt/tests/interfaces/touch-events.idl create mode 100644 test/wpt/tests/interfaces/trust-token-api.idl create mode 100644 test/wpt/tests/interfaces/trusted-types.idl create mode 100644 test/wpt/tests/interfaces/turtledove.idl create mode 100644 test/wpt/tests/interfaces/ua-client-hints.idl create mode 100644 test/wpt/tests/interfaces/uievents.idl create mode 100644 test/wpt/tests/interfaces/url.idl create mode 100644 test/wpt/tests/interfaces/urlpattern.idl create mode 100644 test/wpt/tests/interfaces/user-timing.idl create mode 100644 test/wpt/tests/interfaces/vibration.idl create mode 100644 test/wpt/tests/interfaces/video-rvfc.idl create mode 100644 test/wpt/tests/interfaces/virtual-keyboard.idl create mode 100644 test/wpt/tests/interfaces/virtual-keyboard.tentative.idl create mode 100644 test/wpt/tests/interfaces/wai-aria.idl create mode 100644 test/wpt/tests/interfaces/wasm-js-api.idl create mode 100644 test/wpt/tests/interfaces/wasm-web-api.idl create mode 100644 test/wpt/tests/interfaces/web-animations-2.idl create mode 100644 test/wpt/tests/interfaces/web-animations.idl create mode 100644 test/wpt/tests/interfaces/web-app-launch.idl create mode 100644 test/wpt/tests/interfaces/web-bluetooth.idl create mode 100644 test/wpt/tests/interfaces/web-locks.idl create mode 100644 test/wpt/tests/interfaces/web-nfc.idl create mode 100644 test/wpt/tests/interfaces/web-otp.idl create mode 100644 test/wpt/tests/interfaces/web-share.idl create mode 100644 test/wpt/tests/interfaces/webaudio.idl create mode 100644 test/wpt/tests/interfaces/webauthn.idl create mode 100644 test/wpt/tests/interfaces/webcodecs-aac-codec-registration.idl create mode 100644 test/wpt/tests/interfaces/webcodecs-av1-codec-registration.idl create mode 100644 test/wpt/tests/interfaces/webcodecs-avc-codec-registration.idl create mode 100644 test/wpt/tests/interfaces/webcodecs-flac-codec-registration.idl create mode 100644 test/wpt/tests/interfaces/webcodecs-hevc-codec-registration.idl create mode 100644 test/wpt/tests/interfaces/webcodecs-opus-codec-registration.idl create mode 100644 test/wpt/tests/interfaces/webcodecs-vp9-codec-registration.idl create mode 100644 test/wpt/tests/interfaces/webcodecs.idl create mode 100644 test/wpt/tests/interfaces/webcrypto-secure-curves.idl create mode 100644 test/wpt/tests/interfaces/webdriver.idl create mode 100644 test/wpt/tests/interfaces/webgl1.idl create mode 100644 test/wpt/tests/interfaces/webgl2.idl create mode 100644 test/wpt/tests/interfaces/webgpu.idl create mode 100644 test/wpt/tests/interfaces/webhid.idl create mode 100644 test/wpt/tests/interfaces/webidl.idl create mode 100644 test/wpt/tests/interfaces/webmidi.idl create mode 100644 test/wpt/tests/interfaces/webnn.idl create mode 100644 test/wpt/tests/interfaces/webrtc-encoded-transform.idl create mode 100644 test/wpt/tests/interfaces/webrtc-ice.idl create mode 100644 test/wpt/tests/interfaces/webrtc-identity.idl create mode 100644 test/wpt/tests/interfaces/webrtc-priority.idl create mode 100644 test/wpt/tests/interfaces/webrtc-stats.idl create mode 100644 test/wpt/tests/interfaces/webrtc-svc.idl create mode 100644 test/wpt/tests/interfaces/webrtc.idl create mode 100644 test/wpt/tests/interfaces/websockets.idl create mode 100644 test/wpt/tests/interfaces/webtransport.idl create mode 100644 test/wpt/tests/interfaces/webusb.idl create mode 100644 test/wpt/tests/interfaces/webvr.tentative.idl create mode 100644 test/wpt/tests/interfaces/webvtt.idl create mode 100644 test/wpt/tests/interfaces/webxr-ar-module.idl create mode 100644 test/wpt/tests/interfaces/webxr-depth-sensing.idl create mode 100644 test/wpt/tests/interfaces/webxr-dom-overlays.idl create mode 100644 test/wpt/tests/interfaces/webxr-gamepads-module.idl create mode 100644 test/wpt/tests/interfaces/webxr-hand-input.idl create mode 100644 test/wpt/tests/interfaces/webxr-hit-test.idl create mode 100644 test/wpt/tests/interfaces/webxr-lighting-estimation.idl create mode 100644 test/wpt/tests/interfaces/webxr.idl create mode 100644 test/wpt/tests/interfaces/webxrlayers.idl create mode 100644 test/wpt/tests/interfaces/window-controls-overlay.idl create mode 100644 test/wpt/tests/interfaces/window-management.idl create mode 100644 test/wpt/tests/interfaces/xhr.idl create mode 100644 test/wpt/tests/lint.ignore create mode 100644 test/wpt/tests/mimesniff/META.yml create mode 100644 test/wpt/tests/mimesniff/README.md create mode 100644 test/wpt/tests/mimesniff/media/media-sniff.window.js create mode 100644 test/wpt/tests/mimesniff/media/resources/flac.flac create mode 100644 test/wpt/tests/mimesniff/media/resources/make-vectors.sh create mode 100644 test/wpt/tests/mimesniff/media/resources/mp3-raw.mp3 create mode 100644 test/wpt/tests/mimesniff/media/resources/mp3-with-id3.mp3 create mode 100644 test/wpt/tests/mimesniff/media/resources/mp4.mp4 create mode 100644 test/wpt/tests/mimesniff/media/resources/ogg.ogg create mode 100644 test/wpt/tests/mimesniff/media/resources/wav.wav create mode 100644 test/wpt/tests/mimesniff/media/resources/webm.webm create mode 100644 test/wpt/tests/mimesniff/mime-types/README.md create mode 100644 test/wpt/tests/mimesniff/mime-types/charset-parameter.window.js create mode 100644 test/wpt/tests/mimesniff/mime-types/parsing.any.js create mode 100644 test/wpt/tests/mimesniff/mime-types/resources/generated-mime-types.json create mode 100644 test/wpt/tests/mimesniff/mime-types/resources/generated-mime-types.py create mode 100644 test/wpt/tests/mimesniff/mime-types/resources/mime-charset.py create mode 100644 test/wpt/tests/mimesniff/mime-types/resources/mime-groups.json create mode 100644 test/wpt/tests/mimesniff/mime-types/resources/mime-types.json create mode 100644 test/wpt/tests/resources/.htaccess create mode 100644 test/wpt/tests/resources/META.yml create mode 100644 test/wpt/tests/resources/SVGAnimationTestCase-testharness.js create mode 100644 test/wpt/tests/resources/accesskey.js create mode 100644 test/wpt/tests/resources/blank.html create mode 100644 test/wpt/tests/resources/channel.sub.js create mode 100644 test/wpt/tests/resources/check-layout-th.js create mode 100644 test/wpt/tests/resources/check-layout.js create mode 100644 test/wpt/tests/resources/chromium/README.md create mode 100644 test/wpt/tests/resources/chromium/contacts_manager_mock.js create mode 100644 test/wpt/tests/resources/chromium/content-index-helpers.js create mode 100644 test/wpt/tests/resources/chromium/enable-hyperlink-auditing.js create mode 100644 test/wpt/tests/resources/chromium/fake-hid.js create mode 100644 test/wpt/tests/resources/chromium/fake-serial.js create mode 100644 test/wpt/tests/resources/chromium/generic_sensor_mocks.js create mode 100644 test/wpt/tests/resources/chromium/generic_sensor_mocks.js.headers create mode 100644 test/wpt/tests/resources/chromium/mock-barcodedetection.js create mode 100644 test/wpt/tests/resources/chromium/mock-barcodedetection.js.headers create mode 100644 test/wpt/tests/resources/chromium/mock-battery-monitor.headers create mode 100644 test/wpt/tests/resources/chromium/mock-battery-monitor.js create mode 100644 test/wpt/tests/resources/chromium/mock-direct-sockets.js create mode 100644 test/wpt/tests/resources/chromium/mock-facedetection.js create mode 100644 test/wpt/tests/resources/chromium/mock-facedetection.js.headers create mode 100644 test/wpt/tests/resources/chromium/mock-idle-detection.js create mode 100644 test/wpt/tests/resources/chromium/mock-imagecapture.js create mode 100644 test/wpt/tests/resources/chromium/mock-managed-config.js create mode 100644 test/wpt/tests/resources/chromium/mock-pressure-service.js create mode 100644 test/wpt/tests/resources/chromium/mock-pressure-service.js.headers create mode 100644 test/wpt/tests/resources/chromium/mock-subapps.js create mode 100644 test/wpt/tests/resources/chromium/mock-textdetection.js create mode 100644 test/wpt/tests/resources/chromium/mock-textdetection.js.headers create mode 100644 test/wpt/tests/resources/chromium/nfc-mock.js create mode 100644 test/wpt/tests/resources/chromium/web-bluetooth-test.js create mode 100644 test/wpt/tests/resources/chromium/web-bluetooth-test.js.headers create mode 100644 test/wpt/tests/resources/chromium/webusb-child-test.js create mode 100644 test/wpt/tests/resources/chromium/webusb-child-test.js.headers create mode 100644 test/wpt/tests/resources/chromium/webusb-test.js create mode 100644 test/wpt/tests/resources/chromium/webusb-test.js.headers create mode 100644 test/wpt/tests/resources/chromium/webxr-test-math-helper.js create mode 100644 test/wpt/tests/resources/chromium/webxr-test-math-helper.js.headers create mode 100644 test/wpt/tests/resources/chromium/webxr-test.js create mode 100644 test/wpt/tests/resources/chromium/webxr-test.js.headers create mode 100644 test/wpt/tests/resources/declarative-shadow-dom-polyfill.js create mode 100644 test/wpt/tests/resources/idlharness-shadowrealm.js create mode 100644 test/wpt/tests/resources/idlharness.js create mode 100644 test/wpt/tests/resources/idlharness.js.headers create mode 100644 test/wpt/tests/resources/readme.md create mode 100644 test/wpt/tests/resources/sriharness.js create mode 100644 test/wpt/tests/resources/test-only-api.js create mode 100644 test/wpt/tests/resources/test-only-api.js.headers create mode 100644 test/wpt/tests/resources/test-only-api.m.js create mode 100644 test/wpt/tests/resources/test-only-api.m.js.headers create mode 100644 test/wpt/tests/resources/test/README.md create mode 100644 test/wpt/tests/resources/test/conftest.py create mode 100644 test/wpt/tests/resources/test/harness.html create mode 100644 test/wpt/tests/resources/test/idl-helper.js create mode 100644 test/wpt/tests/resources/test/nested-testharness.js create mode 100644 test/wpt/tests/resources/test/requirements.txt create mode 100644 test/wpt/tests/resources/test/tests/functional/abortsignal.html create mode 100644 test/wpt/tests/resources/test/tests/functional/add_cleanup.html create mode 100644 test/wpt/tests/resources/test/tests/functional/add_cleanup_async.html create mode 100644 test/wpt/tests/resources/test/tests/functional/add_cleanup_async_bad_return.html create mode 100644 test/wpt/tests/resources/test/tests/functional/add_cleanup_async_rejection.html create mode 100644 test/wpt/tests/resources/test/tests/functional/add_cleanup_async_rejection_after_load.html create mode 100644 test/wpt/tests/resources/test/tests/functional/add_cleanup_async_timeout.html create mode 100644 test/wpt/tests/resources/test/tests/functional/add_cleanup_bad_return.html create mode 100644 test/wpt/tests/resources/test/tests/functional/add_cleanup_count.html create mode 100644 test/wpt/tests/resources/test/tests/functional/add_cleanup_err.html create mode 100644 test/wpt/tests/resources/test/tests/functional/add_cleanup_err_multi.html create mode 100644 test/wpt/tests/resources/test/tests/functional/add_cleanup_sync_queue.html create mode 100644 test/wpt/tests/resources/test/tests/functional/api-tests-1.html create mode 100644 test/wpt/tests/resources/test/tests/functional/api-tests-2.html create mode 100644 test/wpt/tests/resources/test/tests/functional/api-tests-3.html create mode 100644 test/wpt/tests/resources/test/tests/functional/assert-array-equals.html create mode 100644 test/wpt/tests/resources/test/tests/functional/assert-throws-dom.html create mode 100644 test/wpt/tests/resources/test/tests/functional/force_timeout.html create mode 100644 test/wpt/tests/resources/test/tests/functional/generate-callback.html create mode 100644 test/wpt/tests/resources/test/tests/functional/idlharness/IdlDictionary/test_partial_interface_of.html create mode 100644 test/wpt/tests/resources/test/tests/functional/idlharness/IdlInterface/test_exposed_wildcard.html create mode 100644 test/wpt/tests/resources/test/tests/functional/idlharness/IdlInterface/test_immutable_prototype.html create mode 100644 test/wpt/tests/resources/test/tests/functional/idlharness/IdlInterface/test_interface_mixin.html create mode 100644 test/wpt/tests/resources/test/tests/functional/idlharness/IdlInterface/test_partial_interface_of.html create mode 100644 test/wpt/tests/resources/test/tests/functional/idlharness/IdlInterface/test_primary_interface_of.html create mode 100644 test/wpt/tests/resources/test/tests/functional/idlharness/IdlInterface/test_to_json_operation.html create mode 100644 test/wpt/tests/resources/test/tests/functional/idlharness/IdlNamespace/test_attribute.html create mode 100644 test/wpt/tests/resources/test/tests/functional/idlharness/IdlNamespace/test_operation.html create mode 100644 test/wpt/tests/resources/test/tests/functional/idlharness/IdlNamespace/test_partial_namespace.html create mode 100644 test/wpt/tests/resources/test/tests/functional/iframe-callback.html create mode 100644 test/wpt/tests/resources/test/tests/functional/iframe-consolidate-errors.html create mode 100644 test/wpt/tests/resources/test/tests/functional/iframe-consolidate-tests.html create mode 100644 test/wpt/tests/resources/test/tests/functional/iframe-msg.html create mode 100644 test/wpt/tests/resources/test/tests/functional/log-insertion.html create mode 100644 test/wpt/tests/resources/test/tests/functional/no-title.html create mode 100644 test/wpt/tests/resources/test/tests/functional/order.html create mode 100644 test/wpt/tests/resources/test/tests/functional/promise-async.html create mode 100644 test/wpt/tests/resources/test/tests/functional/promise-with-sync.html create mode 100644 test/wpt/tests/resources/test/tests/functional/promise.html create mode 100644 test/wpt/tests/resources/test/tests/functional/queue.html create mode 100644 test/wpt/tests/resources/test/tests/functional/setup-function-worker.js create mode 100644 test/wpt/tests/resources/test/tests/functional/setup-worker-service.html create mode 100644 test/wpt/tests/resources/test/tests/functional/single-page-test-fail.html create mode 100644 test/wpt/tests/resources/test/tests/functional/single-page-test-no-assertions.html create mode 100644 test/wpt/tests/resources/test/tests/functional/single-page-test-no-body.html create mode 100644 test/wpt/tests/resources/test/tests/functional/single-page-test-pass.html create mode 100644 test/wpt/tests/resources/test/tests/functional/step_wait.html create mode 100644 test/wpt/tests/resources/test/tests/functional/step_wait_func.html create mode 100644 test/wpt/tests/resources/test/tests/functional/task-scheduling-promise-test.html create mode 100644 test/wpt/tests/resources/test/tests/functional/task-scheduling-test.html create mode 100644 test/wpt/tests/resources/test/tests/functional/uncaught-exception-handle.html create mode 100644 test/wpt/tests/resources/test/tests/functional/uncaught-exception-ignore.html create mode 100644 test/wpt/tests/resources/test/tests/functional/worker-dedicated-uncaught-allow.html create mode 100644 test/wpt/tests/resources/test/tests/functional/worker-dedicated-uncaught-single.html create mode 100644 test/wpt/tests/resources/test/tests/functional/worker-dedicated.sub.html create mode 100644 test/wpt/tests/resources/test/tests/functional/worker-error.js create mode 100644 test/wpt/tests/resources/test/tests/functional/worker-service.html create mode 100644 test/wpt/tests/resources/test/tests/functional/worker-shared.html create mode 100644 test/wpt/tests/resources/test/tests/functional/worker-uncaught-allow.js create mode 100644 test/wpt/tests/resources/test/tests/functional/worker-uncaught-single.js create mode 100644 test/wpt/tests/resources/test/tests/functional/worker.js create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlArray/is_json_type.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlDictionary/get_reverse_inheritance_stack.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlDictionary/test_partial_dictionary.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/constructors.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/default_to_json_operation.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/do_member_unscopable_asserts.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/get_interface_object.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/get_interface_object_owner.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/get_legacy_namespace.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/get_qualified_name.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/get_reverse_inheritance_stack.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/has_default_to_json_regular_operation.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/has_to_json_regular_operation.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/should_have_interface_object.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterface/test_primary_interface_of_undefined.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterfaceMember/is_to_json_regular_operation.html create mode 100644 test/wpt/tests/resources/test/tests/unit/IdlInterfaceMember/toString.html create mode 100644 test/wpt/tests/resources/test/tests/unit/assert_implements.html create mode 100644 test/wpt/tests/resources/test/tests/unit/assert_implements_optional.html create mode 100644 test/wpt/tests/resources/test/tests/unit/assert_object_equals.html create mode 100644 test/wpt/tests/resources/test/tests/unit/async-test-return-restrictions.html create mode 100644 test/wpt/tests/resources/test/tests/unit/basic.html create mode 100644 test/wpt/tests/resources/test/tests/unit/exceptional-cases-timeouts.html create mode 100644 test/wpt/tests/resources/test/tests/unit/exceptional-cases.html create mode 100644 test/wpt/tests/resources/test/tests/unit/format-value.html create mode 100644 test/wpt/tests/resources/test/tests/unit/helpers.js create mode 100644 test/wpt/tests/resources/test/tests/unit/late-test.html create mode 100644 test/wpt/tests/resources/test/tests/unit/promise_setup-timeout.html create mode 100644 test/wpt/tests/resources/test/tests/unit/promise_setup.html create mode 100644 test/wpt/tests/resources/test/tests/unit/single_test.html create mode 100644 test/wpt/tests/resources/test/tests/unit/test-return-restrictions.html create mode 100644 test/wpt/tests/resources/test/tests/unit/throwing-assertions.html create mode 100644 test/wpt/tests/resources/test/tests/unit/unpaired-surrogates.html create mode 100644 test/wpt/tests/resources/test/tox.ini create mode 100644 test/wpt/tests/resources/test/wptserver.py create mode 100644 test/wpt/tests/resources/testdriver-actions.js create mode 100644 test/wpt/tests/resources/testdriver-vendor.js create mode 100644 test/wpt/tests/resources/testdriver-vendor.js.headers create mode 100644 test/wpt/tests/resources/testdriver.js create mode 100644 test/wpt/tests/resources/testdriver.js.headers create mode 100644 test/wpt/tests/resources/testharness.js create mode 100644 test/wpt/tests/resources/testharness.js.headers create mode 100644 test/wpt/tests/resources/testharnessreport.js create mode 100644 test/wpt/tests/resources/testharnessreport.js.headers create mode 100644 test/wpt/tests/resources/webidl2/build.sh create mode 100644 test/wpt/tests/resources/webidl2/lib/README.md create mode 100644 test/wpt/tests/resources/webidl2/lib/VERSION.md create mode 100644 test/wpt/tests/resources/webidl2/lib/webidl2.js create mode 100644 test/wpt/tests/resources/webidl2/lib/webidl2.js.headers create mode 100644 test/wpt/tests/service-workers/META.yml create mode 100644 test/wpt/tests/service-workers/cache-storage/META.yml create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-abort.https.any.js create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-add.https.any.js create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-delete.https.any.js create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-keys-attributes-for-service-worker.https.html create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-keys.https.any.js create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-match.https.any.js create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-matchAll.https.any.js create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-put.https.any.js create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-storage-buckets.https.any.js create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-storage-keys.https.any.js create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-storage-match.https.any.js create mode 100644 test/wpt/tests/service-workers/cache-storage/cache-storage.https.any.js create mode 100644 test/wpt/tests/service-workers/cache-storage/common.https.window.js create mode 100644 test/wpt/tests/service-workers/cache-storage/crashtests/cache-response-clone.https.html create mode 100644 test/wpt/tests/service-workers/cache-storage/credentials.https.html create mode 100644 test/wpt/tests/service-workers/cache-storage/cross-partition.https.tentative.html create mode 100644 test/wpt/tests/service-workers/cache-storage/resources/blank.html create mode 100644 test/wpt/tests/service-workers/cache-storage/resources/cache-keys-attributes-for-service-worker.js create mode 100644 test/wpt/tests/service-workers/cache-storage/resources/common-worker.js create mode 100644 test/wpt/tests/service-workers/cache-storage/resources/credentials-iframe.html create mode 100644 test/wpt/tests/service-workers/cache-storage/resources/credentials-worker.js create mode 100644 test/wpt/tests/service-workers/cache-storage/resources/fetch-status.py create mode 100644 test/wpt/tests/service-workers/cache-storage/resources/iframe.html create mode 100644 test/wpt/tests/service-workers/cache-storage/resources/simple.txt create mode 100644 test/wpt/tests/service-workers/cache-storage/resources/test-helpers.js create mode 100644 test/wpt/tests/service-workers/cache-storage/resources/vary.py create mode 100644 test/wpt/tests/service-workers/cache-storage/sandboxed-iframes.https.html create mode 100644 test/wpt/tests/service-workers/idlharness.https.any.js create mode 100644 test/wpt/tests/service-workers/service-worker/Service-Worker-Allowed-header.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/close.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/extendable-message-event-constructor.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/extendable-message-event.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/fetch-on-the-right-interface.https.any.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/isSecureContext.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/isSecureContext.serviceworker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/postmessage.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/registration-attribute.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/close-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/error-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/extendable-message-event-constructor-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/extendable-message-event-loopback-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/extendable-message-event-ping-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/extendable-message-event-pong-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/extendable-message-event-utils.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/extendable-message-event-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/postmessage-loopback-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/postmessage-ping-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/postmessage-pong-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/registration-attribute-newer-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/registration-attribute-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/unregister-controlling-worker.html create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/unregister-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/update-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/resources/update-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/service-worker-error-event.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/unregister.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/ServiceWorkerGlobalScope/update.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/about-blank-replacement.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/activate-event-after-install-state-change.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/activation-after-registration.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/activation.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/active.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/claim-affect-other-registration.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/claim-fetch.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/claim-not-using-registration.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/claim-shared-worker-fetch.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/claim-using-registration.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/claim-with-redirect.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/claim-worker-fetch.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/client-id.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/client-navigate.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/client-url-of-blob-url-worker.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-get-client-types.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-get-cross-origin.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-get-resultingClientId.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-get.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-matchall-blob-url-worker.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-matchall-client-types.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-matchall-exact-controller.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-matchall-frozen.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-matchall-include-uncontrolled.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-matchall-on-evaluation.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-matchall-order.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/clients-matchall.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/controlled-dedicatedworker-postMessage.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/controlled-iframe-postMessage.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/controller-on-disconnect.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/controller-on-load.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/controller-on-reload.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/controller-with-no-fetch-event-handler.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/credentials.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/data-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/data-transfer-files.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/dedicated-worker-service-worker-interception.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/detached-context.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/embed-and-object-are-not-intercepted.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/extendable-event-async-waituntil.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/extendable-event-waituntil.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-audio-tainting.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-canvas-tainting-double-write.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-canvas-tainting-image-cache.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-canvas-tainting-image.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-canvas-tainting-video-cache.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-canvas-tainting-video-with-range-request.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-canvas-tainting-video.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-cors-exposed-header-names.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-cors-xhr.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-csp.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-error.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-add-async.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-after-navigation-within-page.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-async-respond-with.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-handled.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-is-history-backward-navigation-manual.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-is-history-forward-navigation-manual.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-is-reload-iframe-navigation-manual.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-is-reload-navigation-manual.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-network-error.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-redirect.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-referrer-policy.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-respond-with-argument.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-respond-with-body-loaded-in-chunk.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-respond-with-custom-response.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-respond-with-partial-stream.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-respond-with-readable-stream-chunk.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-respond-with-readable-stream.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-respond-with-response-body-with-invalid-chunk.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-respond-with-stops-propagation.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-throws-after-respond-with.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-within-sw-manual.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event-within-sw.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event.https.h2.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-event.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-frame-resource.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-header-visibility.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-mixed-content-to-inscope.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-mixed-content-to-outscope.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-request-css-base-url.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-request-css-cross-origin.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-request-css-images.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-request-fallback.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-request-no-freshness-headers.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-request-redirect.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-request-resources.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-request-xhr-sync-error.https.window.js create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-request-xhr-sync-on-worker.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-request-xhr-sync.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-request-xhr.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-response-taint.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-response-xhr.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/fetch-waits-for-activate.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/getregistration.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/getregistrations.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/global-serviceworker.https.any.js create mode 100644 test/wpt/tests/service-workers/service-worker/historical.https.any.js create mode 100644 test/wpt/tests/service-workers/service-worker/http-to-https-redirect-and-register.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/immutable-prototype-serviceworker.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/import-scripts-cross-origin.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/import-scripts-data-url.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/import-scripts-mime-types.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/import-scripts-redirect.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/import-scripts-resource-map.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/import-scripts-updated-flag.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/indexeddb.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/install-event-type.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/installing.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/interface-requirements-sw.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/invalid-blobtype.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/invalid-header.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/iso-latin1-header.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/local-url-inherit-controller.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/mime-sniffing.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/multi-globals/current/current.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/multi-globals/current/test-sw.js create mode 100644 test/wpt/tests/service-workers/service-worker/multi-globals/incumbent/incumbent.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/multi-globals/incumbent/test-sw.js create mode 100644 test/wpt/tests/service-workers/service-worker/multi-globals/relevant/relevant.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/multi-globals/relevant/test-sw.js create mode 100644 test/wpt/tests/service-workers/service-worker/multi-globals/test-sw.js create mode 100644 test/wpt/tests/service-workers/service-worker/multi-globals/url-parsing.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/multipart-image.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/multiple-register.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/multiple-update.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigate-window.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-headers.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/broken-chunked-encoding.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/chunked-encoding.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/empty-preload-response-body.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/get-state.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/navigationPreload.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/redirect.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/request-headers.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resource-timing.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/broken-chunked-encoding-scope.asis create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/broken-chunked-encoding-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/chunked-encoding-scope.py create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/chunked-encoding-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/cookie.py create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/empty-preload-response-body-scope.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/empty-preload-response-body-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/get-state-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/helpers.js create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/navigation-preload-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/redirect-redirected.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/redirect-scope.py create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/redirect-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/request-headers-scope.py create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/request-headers-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/resource-timing-scope.py create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/resource-timing-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/samesite-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/samesite-sw-helper.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/samesite-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/resources/wait-for-activate-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/samesite-cookies.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-preload/samesite-iframe.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-redirect-body.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-redirect-resolution.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-redirect-to-http.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-redirect.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-sets-cookie.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-timing-extended.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/navigation-timing.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/nested-blob-url-workers.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/next-hop-protocol.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/no-dynamic-import-in-module.any.js create mode 100644 test/wpt/tests/service-workers/service-worker/no-dynamic-import.any.js create mode 100644 test/wpt/tests/service-workers/service-worker/onactivate-script-error.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/oninstall-script-error.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/opaque-response-preloaded.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/opaque-script.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/partitioned-claim.tentative.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/partitioned-cookies.tentative.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/partitioned-getRegistrations.tentative.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/partitioned-matchAll.tentative.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/partitioned.tentative.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/performance-timeline.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/postMessage-client-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/postmessage-blob-url.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/postmessage-from-waiting-serviceworker.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/postmessage-msgport-to-client.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/postmessage-to-client-message-queue.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/postmessage-to-client.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/postmessage.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/ready.https.window.js create mode 100644 test/wpt/tests/service-workers/service-worker/redirected-response.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/referer.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/referrer-policy-header.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/referrer-toplevel-script-fetch.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/register-closed-window.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/register-default-scope.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/register-same-scope-different-script-url.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/register-wait-forever-in-install-worker.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-basic.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-end-to-end.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-events.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-iframe.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-mime-types.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-schedule-job.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-scope-module-static-import.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-scope.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-script-module.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-script-url.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-script.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-security-error.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-service-worker-attributes.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/registration-updateviacache.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/rejections.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/request-end-to-end.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/resource-timing-bodySize.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/resource-timing-cross-origin.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/resource-timing-fetch-variants.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/resource-timing.sub.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/404.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/about-blank-replacement-blank-dynamic-nested-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/about-blank-replacement-blank-nested-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/about-blank-replacement-frame.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/about-blank-replacement-ping-frame.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/about-blank-replacement-popup-frame.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/about-blank-replacement-srcdoc-nested-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/about-blank-replacement-uncontrolled-nested-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/about-blank-replacement-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/basic-module-2.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/basic-module.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/blank.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/bytecheck-worker-imported-script.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/bytecheck-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/claim-blob-url-worker-fetch-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/claim-nested-worker-fetch-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/claim-nested-worker-fetch-parent-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/claim-shared-worker-fetch-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/claim-shared-worker-fetch-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/claim-with-redirect-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/claim-worker-fetch-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/claim-worker-fetch-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/claim-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/classic-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/client-id-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/client-navigate-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/client-navigate-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/client-navigated-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/client-url-of-blob-url-worker.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/client-url-of-blob-url-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-frame-freeze.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-get-client-types-frame-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-get-client-types-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-get-client-types-shared-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-get-client-types-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-get-cross-origin-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-get-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-get-other-origin.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-get-resultingClientId-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-get-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-matchall-blob-url-worker.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-matchall-client-types-dedicated-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-matchall-client-types-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-matchall-client-types-shared-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-matchall-on-evaluation-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/clients-matchall-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/controlled-frame-postMessage.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/controlled-worker-late-postMessage.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/controlled-worker-postMessage.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/cors-approved.txt create mode 100644 test/wpt/tests/service-workers/service-worker/resources/cors-approved.txt.headers create mode 100644 test/wpt/tests/service-workers/service-worker/resources/cors-denied.txt create mode 100644 test/wpt/tests/service-workers/service-worker/resources/create-blob-url-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/create-out-of-scope-worker.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/echo-content.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/echo-cookie-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/echo-message-to-source-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/embed-and-object-are-not-intercepted-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/embed-image-is-not-intercepted-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/embed-is-not-intercepted-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/embed-navigation-is-not-intercepted-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/embedded-content-from-server.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/embedded-content-from-service-worker.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/empty-but-slow-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/empty-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/empty.h2.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/empty.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/empty.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/enable-client-message-queue.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/end-to-end-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/events-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/extendable-event-async-waituntil.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/extendable-event-waituntil.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fail-on-fetch-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-access-control-login.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-access-control.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-canvas-tainting-double-write-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-canvas-tainting-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-canvas-tainting-tests.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-cors-exposed-header-names-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-cors-xhr-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-csp-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-csp-iframe.html.sub.headers create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-error-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-add-async-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-after-navigation-within-page-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-async-respond-with-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-handled-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-network-error-controllee-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-network-error-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-network-fallback-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-respond-with-argument-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-respond-with-argument-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-respond-with-body-loaded-in-chunk-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-respond-with-custom-response-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-respond-with-partial-stream-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-respond-with-readable-stream-chunk-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-respond-with-readable-stream-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-respond-with-response-body-with-invalid-chunk-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-respond-with-response-body-with-invalid-chunk-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-respond-with-stops-propagation-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-test-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-event-within-sw-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-header-visibility-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-mixed-content-iframe-inscope-to-inscope.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-mixed-content-iframe-inscope-to-outscope.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-mixed-content-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-css-base-url-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-css-base-url-style.css create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-css-base-url-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-css-cross-origin-mime-check-cross.css create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-css-cross-origin-mime-check-cross.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-css-cross-origin-mime-check-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-css-cross-origin-mime-check-same.css create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-css-cross-origin-mime-check-same.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-css-cross-origin-read-contents.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-css-cross-origin-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-fallback-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-fallback-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-html-imports-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-html-imports-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-no-freshness-headers-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-no-freshness-headers-script.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-no-freshness-headers-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-redirect-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-resources-iframe.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-resources-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-xhr-iframe.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-xhr-sync-error-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-xhr-sync-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-xhr-sync-on-worker-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-xhr-sync-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-request-xhr-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-response-taint-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-response-xhr-iframe.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-response-xhr-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-response.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-response.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-rewrite-worker-referrer-policy.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-rewrite-worker-referrer-policy.js.headers create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-rewrite-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-rewrite-worker.js.headers create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-variants-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/fetch-waits-for-activate-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/form-poster.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/frame-for-getregistrations.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/get-resultingClientId-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/http-to-https-redirect-and-register-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/iframe-with-fetch-variants.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/iframe-with-image.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/immutable-prototype-serviceworker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-echo-cookie-worker-module.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-echo-cookie-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-mime-type-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-relative.xsl create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-404-after-update-plus-update-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-404-after-update.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-404.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-cross-origin-worker.sub.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-data-url-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-diff-resource-map-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-echo.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-get.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-mime-types-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-redirect-import.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-redirect-on-second-time-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-redirect-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-resource-map-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-updated-flag-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/import-scripts-version.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/imported-classic-script.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/imported-module-script.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/indexeddb-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/install-event-type-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/install-worker.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/interface-requirements-worker.sub.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/invalid-blobtype-iframe.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/invalid-blobtype-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/invalid-chunked-encoding-with-flush.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/invalid-chunked-encoding.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/invalid-header-iframe.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/invalid-header-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/iso-latin1-header-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/iso-latin1-header-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/load_worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/loaded.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/local-url-inherit-controller-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/local-url-inherit-controller-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/location-setter.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/malformed-http-response.asis create mode 100644 test/wpt/tests/service-workers/service-worker/resources/malformed-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/message-vs-microtask.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/mime-sniffing-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/mime-type-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/mint-new-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/missing.asis create mode 100644 test/wpt/tests/service-workers/service-worker/resources/module-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/multipart-image-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/multipart-image-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/multipart-image.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigate-window-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigation-headers-server.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigation-redirect-body-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigation-redirect-body.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigation-redirect-other-origin.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigation-redirect-out-scope.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigation-redirect-scope1.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigation-redirect-scope2.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigation-redirect-to-http-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigation-redirect-to-http-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigation-timing-worker-extended.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/navigation-timing-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/nested-blob-url-worker-created-from-worker.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/nested-blob-url-workers.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/nested-iframe-parent.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/nested-parent.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/nested-worker-created-from-blob-url-worker.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/nested_load_worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/no-dynamic-import.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/notification_icon.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/object-image-is-not-intercepted-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/object-is-not-intercepted-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/object-navigation-is-not-intercepted-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/onactivate-throw-error-from-nested-event-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/onactivate-throw-error-then-cancel-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/onactivate-throw-error-then-prevent-default-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/onactivate-throw-error-with-empty-onerror-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/onactivate-throw-error-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/onactivate-waituntil-forever.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/onfetch-waituntil-forever.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/oninstall-throw-error-from-nested-event-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/oninstall-throw-error-then-cancel-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/oninstall-throw-error-then-prevent-default-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/oninstall-throw-error-with-empty-onerror-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/oninstall-throw-error-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/oninstall-waituntil-forever.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/oninstall-waituntil-throw-error-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/onparse-infiniteloop-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/opaque-response-being-preloaded-xhr.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/opaque-response-preloaded-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/opaque-response-preloaded-xhr.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/opaque-script-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/opaque-script-large.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/opaque-script-small.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/opaque-script-sw.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/other.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/override_assert_object_equals.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-cookies-3p-credentialless-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-cookies-3p-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-cookies-3p-sw.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-cookies-3p-window.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-cookies-sw.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-service-worker-iframe-claim.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-service-worker-nested-iframe-child.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-service-worker-nested-iframe-parent.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-service-worker-third-party-iframe-getRegistrations.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-service-worker-third-party-iframe-matchAll.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-service-worker-third-party-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-service-worker-third-party-window.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-storage-sw.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/partitioned-utils.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/pass-through-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/pass.txt create mode 100644 test/wpt/tests/service-workers/service-worker/resources/performance-timeline-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/postmessage-blob-url.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/postmessage-dictionary-transferables-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/postmessage-echo-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/postmessage-fetched-text.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/postmessage-msgport-to-client-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/postmessage-on-load-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/postmessage-to-client-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/postmessage-transferables-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/postmessage-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/range-request-to-different-origins-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/range-request-with-different-cors-modes-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/redirect-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/redirect.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/referer-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/referrer-policy-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/register-closed-window-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/register-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/register-rewrite-worker.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/registration-tests-mime-types.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/registration-tests-scope.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/registration-tests-script-url.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/registration-tests-script.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/registration-tests-security-error.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/registration-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/reject-install-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/reply-to-message.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/request-end-to-end-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/request-headers.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/resource-timing-iframe.sub.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/resource-timing-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/respond-then-throw-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/respond-with-body-accessed-response-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/respond-with-body-accessed-response-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/respond-with-body-accessed-response.jsonp create mode 100644 test/wpt/tests/service-workers/service-worker/resources/sample-worker-interceptor.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/sample.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/sample.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/sample.txt create mode 100644 test/wpt/tests/service-workers/service-worker/resources/sandboxed-iframe-fetch-event-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/sandboxed-iframe-fetch-event-iframe.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/sandboxed-iframe-fetch-event-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/sandboxed-iframe-navigator-serviceworker-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/scope1/module-worker-importing-redirect-to-scope2.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/scope1/module-worker-importing-scope2.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/scope1/redirect.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/scope2/import-scripts-echo.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/scope2/imported-module-script.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/scope2/simple.txt create mode 100644 test/wpt/tests/service-workers/service-worker/resources/scope2/worker_interception_redirect_webworker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/secure-context-service-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/secure-context/sender.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/secure-context/window.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/service-worker-csp-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/service-worker-header.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/service-worker-interception-dynamic-import-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/service-worker-interception-network-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/service-worker-interception-service-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/service-worker-interception-static-import-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/silence.oga create mode 100644 test/wpt/tests/service-workers/service-worker/resources/simple-intercept-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/simple-intercept-worker.js.headers create mode 100644 test/wpt/tests/service-workers/service-worker/resources/simple.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/simple.txt create mode 100644 test/wpt/tests/service-workers/service-worker/resources/skip-waiting-installed-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/skip-waiting-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/square.png create mode 100644 test/wpt/tests/service-workers/service-worker/resources/square.png.sub.headers create mode 100644 test/wpt/tests/service-workers/service-worker/resources/stalling-service-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/subdir/blank.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/subdir/import-scripts-echo.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/subdir/simple.txt create mode 100644 test/wpt/tests/service-workers/service-worker/resources/subdir/worker_interception_redirect_webworker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/success.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/svg-target-reftest-001-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/svg-target-reftest-001.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/svg-target-reftest-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/test-helpers.sub.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/test-request-headers-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/test-request-headers-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/test-request-mode-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/test-request-mode-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/testharness-helpers.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/trickle.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/type-check-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/unregister-controller-page.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/unregister-immediately-helpers.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/unregister-rewrite-worker.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-claim-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-during-installation-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-during-installation-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-fetch-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-max-aged-worker-imported-script.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-max-aged-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-missing-import-scripts-imported-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-missing-import-scripts-main-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-nocookie-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-recovery-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-registration-with-type.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-smaller-body-after-update-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-smaller-body-before-update-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-worker-from-file.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update-worker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update/update-after-oneday.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/update_shell.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/vtt-frame.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/wait-forever-in-install-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/websocket-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/websocket.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/window-opener.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/windowclient-navigate-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/worker-client-id-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/worker-fetching-cross-origin.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/worker-interception-redirect-serviceworker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/worker-interception-redirect-webworker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/worker-load-interceptor.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/worker-testharness.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/worker_interception_redirect_webworker.py create mode 100644 test/wpt/tests/service-workers/service-worker/resources/xhr-content-length-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/xhr-iframe.html create mode 100644 test/wpt/tests/service-workers/service-worker/resources/xhr-response-url-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/xsl-base-url-iframe.xml create mode 100644 test/wpt/tests/service-workers/service-worker/resources/xsl-base-url-worker.js create mode 100644 test/wpt/tests/service-workers/service-worker/resources/xslt-pass.xsl create mode 100644 test/wpt/tests/service-workers/service-worker/respond-with-body-accessed-response.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/same-site-cookies.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/sandboxed-iframe-fetch-event.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/sandboxed-iframe-navigator-serviceworker.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/secure-context.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/service-worker-csp-connect.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/service-worker-csp-default.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/service-worker-csp-script.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/service-worker-header.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/serviceworker-message-event-historical.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/serviceworkerobject-scripturl.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/skip-waiting-installed.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/skip-waiting-using-registration.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/skip-waiting-without-client.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/skip-waiting-without-using-registration.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/skip-waiting.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/state.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/svg-target-reftest.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/synced-state.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/tentative/static-router/README.md create mode 100644 test/wpt/tests/service-workers/service-worker/tentative/static-router/resources/direct.txt create mode 100644 test/wpt/tests/service-workers/service-worker/tentative/static-router/resources/simple-test-for-condition-main-resource.html create mode 100644 test/wpt/tests/service-workers/service-worker/tentative/static-router/resources/simple.html create mode 100644 test/wpt/tests/service-workers/service-worker/tentative/static-router/resources/static-router-sw.js create mode 100644 test/wpt/tests/service-workers/service-worker/tentative/static-router/resources/test-helpers.sub.js create mode 100644 test/wpt/tests/service-workers/service-worker/tentative/static-router/static-router-main-resource.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/tentative/static-router/static-router-subresource.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/uncontrolled-page.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/unregister-controller.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/unregister-immediately-before-installed.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/unregister-immediately-during-extendable-events.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/unregister-immediately.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/unregister-then-register-new-script.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/unregister-then-register.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/unregister.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-after-navigation-fetch-event.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-after-navigation-redirect.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-after-oneday.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-bytecheck-cors-import.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-bytecheck.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-import-scripts.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-missing-import-scripts.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-module-request-mode.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-no-cache-request-headers.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-not-allowed.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-on-navigation.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-recovery.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-registration-with-type.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update-result.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/update.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/waiting.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/websocket-in-service-worker.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/websocket.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/webvtt-cross-origin.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/windowclient-navigate.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/worker-client-id.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/worker-in-sandboxed-iframe-by-csp-fetch-event.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/worker-interception-redirect.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/worker-interception.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/xhr-content-length.https.window.js create mode 100644 test/wpt/tests/service-workers/service-worker/xhr-response-url.https.html create mode 100644 test/wpt/tests/service-workers/service-worker/xsl-base-url.https.html create mode 100644 test/wpt/tests/storage/META.yml create mode 100644 test/wpt/tests/storage/README.md create mode 100644 test/wpt/tests/storage/buckets/META.yml create mode 100644 test/wpt/tests/storage/buckets/bucket-quota-indexeddb.tentative.https.any.js create mode 100644 test/wpt/tests/storage/buckets/bucket-storage-policy.tentative.https.any.js create mode 100644 test/wpt/tests/storage/buckets/resources/cached-resource.txt create mode 100644 test/wpt/tests/storage/buckets/resources/util.js create mode 100644 test/wpt/tests/storage/estimate-indexeddb.https.any.js create mode 100644 test/wpt/tests/storage/estimate-parallel.https.any.js create mode 100644 test/wpt/tests/storage/estimate-usage-details-caches.https.tentative.any.js create mode 100644 test/wpt/tests/storage/estimate-usage-details-indexeddb.https.tentative.any.js create mode 100644 test/wpt/tests/storage/estimate-usage-details-service-workers.https.tentative.window.js create mode 100644 test/wpt/tests/storage/estimate-usage-details.https.tentative.any.js create mode 100644 test/wpt/tests/storage/helpers.js create mode 100644 test/wpt/tests/storage/idlharness.https.any.js create mode 100644 test/wpt/tests/storage/opaque-origin.https.window.js create mode 100644 test/wpt/tests/storage/partitioned-estimate-usage-details-caches.tentative.https.sub.html create mode 100644 test/wpt/tests/storage/partitioned-estimate-usage-details-indexeddb.tentative.https.sub.html create mode 100644 test/wpt/tests/storage/partitioned-estimate-usage-details-service-workers.tentative.https.sub.html create mode 100644 test/wpt/tests/storage/permission-query.https.any.js create mode 100644 test/wpt/tests/storage/persist-permission-manual.https.html create mode 100644 test/wpt/tests/storage/persisted.https.any.js create mode 100644 test/wpt/tests/storage/quotachange-in-detached-iframe.tentative.https.html create mode 100644 test/wpt/tests/storage/resources/partitioned-estimate-usage-details-caches-helper-frame.html create mode 100644 test/wpt/tests/storage/resources/partitioned-estimate-usage-details-indexeddb-helper-frame.html create mode 100644 test/wpt/tests/storage/resources/partitioned-estimate-usage-details-service-workers-helper-frame.html create mode 100644 test/wpt/tests/storage/resources/worker.js create mode 100644 test/wpt/tests/storage/storagemanager-estimate.https.any.js create mode 100644 test/wpt/tests/storage/storagemanager-persist-persisted-match.https.any.js create mode 100644 test/wpt/tests/storage/storagemanager-persist.https.window.js create mode 100644 test/wpt/tests/storage/storagemanager-persist.https.worker.js create mode 100644 test/wpt/tests/storage/storagemanager-persisted.https.any.js create mode 100644 test/wpt/tests/websockets/Close-1000-reason.any.js create mode 100644 test/wpt/tests/websockets/Close-1000-verify-code.any.js create mode 100644 test/wpt/tests/websockets/Close-1000.any.js create mode 100644 test/wpt/tests/websockets/Close-1005-verify-code.any.js create mode 100644 test/wpt/tests/websockets/Close-1005.any.js create mode 100644 test/wpt/tests/websockets/Close-2999-reason.any.js create mode 100644 test/wpt/tests/websockets/Close-3000-reason.any.js create mode 100644 test/wpt/tests/websockets/Close-3000-verify-code.any.js create mode 100644 test/wpt/tests/websockets/Close-4999-reason.any.js create mode 100644 test/wpt/tests/websockets/Close-Reason-124Bytes.any.js create mode 100644 test/wpt/tests/websockets/Close-delayed.any.js create mode 100644 test/wpt/tests/websockets/Close-onlyReason.any.js create mode 100644 test/wpt/tests/websockets/Close-readyState-Closed.any.js create mode 100644 test/wpt/tests/websockets/Close-readyState-Closing.any.js create mode 100644 test/wpt/tests/websockets/Close-reason-unpaired-surrogates.any.js create mode 100644 test/wpt/tests/websockets/Close-server-initiated-close.any.js create mode 100644 test/wpt/tests/websockets/Close-undefined.any.js create mode 100644 test/wpt/tests/websockets/Create-asciiSep-protocol-string.any.js create mode 100644 test/wpt/tests/websockets/Create-blocked-port.any.js create mode 100644 test/wpt/tests/websockets/Create-extensions-empty.any.js create mode 100644 test/wpt/tests/websockets/Create-http-urls.any.js create mode 100644 test/wpt/tests/websockets/Create-invalid-urls.any.js create mode 100644 test/wpt/tests/websockets/Create-non-absolute-url.any.js create mode 100644 test/wpt/tests/websockets/Create-nonAscii-protocol-string.any.js create mode 100644 test/wpt/tests/websockets/Create-on-worker-shutdown.any.js create mode 100644 test/wpt/tests/websockets/Create-protocol-with-space.any.js create mode 100644 test/wpt/tests/websockets/Create-protocols-repeated-case-insensitive.any.js create mode 100644 test/wpt/tests/websockets/Create-protocols-repeated.any.js create mode 100644 test/wpt/tests/websockets/Create-url-with-space.any.js create mode 100644 test/wpt/tests/websockets/Create-url-with-windows-1252-encoding.html create mode 100644 test/wpt/tests/websockets/Create-valid-url-array-protocols.any.js create mode 100644 test/wpt/tests/websockets/Create-valid-url-binaryType-blob.any.js create mode 100644 test/wpt/tests/websockets/Create-valid-url-protocol-empty.any.js create mode 100644 test/wpt/tests/websockets/Create-valid-url-protocol-setCorrectly.any.js create mode 100644 test/wpt/tests/websockets/Create-valid-url-protocol-string.any.js create mode 100644 test/wpt/tests/websockets/Create-valid-url-protocol.any.js create mode 100644 test/wpt/tests/websockets/Create-valid-url.any.js create mode 100644 test/wpt/tests/websockets/META.yml create mode 100644 test/wpt/tests/websockets/README.md create mode 100644 test/wpt/tests/websockets/Send-0byte-data.any.js create mode 100644 test/wpt/tests/websockets/Send-65K-data.any.js create mode 100644 test/wpt/tests/websockets/Send-before-open.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-65K-arraybuffer.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-arraybuffer.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-arraybufferview-float32.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-arraybufferview-float64.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-arraybufferview-int16-offset.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-arraybufferview-int32.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-arraybufferview-int8.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-arraybufferview-uint16-offset-length.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-arraybufferview-uint32-offset.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-arraybufferview-uint8-offset-length.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-arraybufferview-uint8-offset.any.js create mode 100644 test/wpt/tests/websockets/Send-binary-blob.any.js create mode 100644 test/wpt/tests/websockets/Send-data.any.js create mode 100644 test/wpt/tests/websockets/Send-data.worker.js create mode 100644 test/wpt/tests/websockets/Send-null.any.js create mode 100644 test/wpt/tests/websockets/Send-paired-surrogates.any.js create mode 100644 test/wpt/tests/websockets/Send-unicode-data.any.js create mode 100644 test/wpt/tests/websockets/Send-unpaired-surrogates.any.js create mode 100644 test/wpt/tests/websockets/back-forward-cache-with-closed-websocket-connection-ccns.tentative.window.js create mode 100644 test/wpt/tests/websockets/back-forward-cache-with-closed-websocket-connection.window.js create mode 100644 test/wpt/tests/websockets/back-forward-cache-with-open-websocket-connection-ccns.tentative.window.js create mode 100644 test/wpt/tests/websockets/back-forward-cache-with-open-websocket-connection.window.js create mode 100644 test/wpt/tests/websockets/basic-auth.any.js create mode 100644 test/wpt/tests/websockets/binary/001.html create mode 100644 test/wpt/tests/websockets/binary/002.html create mode 100644 test/wpt/tests/websockets/binary/004.html create mode 100644 test/wpt/tests/websockets/binary/005.html create mode 100644 test/wpt/tests/websockets/binaryType-wrong-value.any.js create mode 100644 test/wpt/tests/websockets/bufferedAmount-unchanged-by-sync-xhr.any.js create mode 100644 test/wpt/tests/websockets/close-invalid.any.js create mode 100644 test/wpt/tests/websockets/closing-handshake/002.html create mode 100644 test/wpt/tests/websockets/closing-handshake/003.html create mode 100644 test/wpt/tests/websockets/closing-handshake/004.html create mode 100644 test/wpt/tests/websockets/constants.sub.js create mode 100644 test/wpt/tests/websockets/constructor.any.js create mode 100644 test/wpt/tests/websockets/constructor/001.html create mode 100644 test/wpt/tests/websockets/constructor/004.html create mode 100644 test/wpt/tests/websockets/constructor/005.html create mode 100644 test/wpt/tests/websockets/constructor/006.html create mode 100644 test/wpt/tests/websockets/constructor/007.html create mode 100644 test/wpt/tests/websockets/constructor/008.html create mode 100644 test/wpt/tests/websockets/constructor/009.html create mode 100644 test/wpt/tests/websockets/constructor/010.html create mode 100644 test/wpt/tests/websockets/constructor/011.html create mode 100644 test/wpt/tests/websockets/constructor/012.html create mode 100644 test/wpt/tests/websockets/constructor/013.html create mode 100644 test/wpt/tests/websockets/constructor/014.html create mode 100644 test/wpt/tests/websockets/constructor/016.html create mode 100644 test/wpt/tests/websockets/constructor/017.html create mode 100644 test/wpt/tests/websockets/constructor/018.html create mode 100644 test/wpt/tests/websockets/constructor/019.html create mode 100644 test/wpt/tests/websockets/constructor/020.html create mode 100644 test/wpt/tests/websockets/constructor/021.html create mode 100644 test/wpt/tests/websockets/constructor/022.html create mode 100644 test/wpt/tests/websockets/cookies/001.html create mode 100644 test/wpt/tests/websockets/cookies/002.html create mode 100644 test/wpt/tests/websockets/cookies/003.html create mode 100644 test/wpt/tests/websockets/cookies/004.html create mode 100644 test/wpt/tests/websockets/cookies/005.html create mode 100644 test/wpt/tests/websockets/cookies/006.html create mode 100644 test/wpt/tests/websockets/cookies/007.html create mode 100644 test/wpt/tests/websockets/cookies/support/set-cookie.py create mode 100644 test/wpt/tests/websockets/cookies/support/websocket-cookies-helper.sub.js create mode 100644 test/wpt/tests/websockets/cookies/third-party-cookie-accepted.https.html create mode 100644 test/wpt/tests/websockets/eventhandlers.any.js create mode 100644 test/wpt/tests/websockets/extended-payload-length.html create mode 100644 test/wpt/tests/websockets/handlers/basic_auth_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/delayed-passive-close_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/echo-cookie_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/echo-query_v13_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/echo-query_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/echo_close_data_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/echo_exit_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/echo_raw_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/echo_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/empty-message_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/handshake_no_extensions_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/handshake_no_protocol_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/handshake_protocol_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/handshake_sleep_2_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/invalid_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/msg_channel_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/origin_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/protocol_array_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/protocol_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/receive-backpressure_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/receive-many-with-backpressure_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/referrer_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/send-backpressure_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/set-cookie-secure_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/set-cookie_http_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/set-cookie_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/set-cookies-samesite_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/simple_handshake_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/sleep_10_v13_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/stash_responder_blocking_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/stash_responder_wsh.py create mode 100644 test/wpt/tests/websockets/handlers/wrong_accept_key_wsh.py create mode 100644 test/wpt/tests/websockets/idlharness.any.js create mode 100644 test/wpt/tests/websockets/interfaces/CloseEvent/clean-close.html create mode 100644 test/wpt/tests/websockets/interfaces/CloseEvent/constructor.html create mode 100644 test/wpt/tests/websockets/interfaces/CloseEvent/historical.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/bufferedAmount/bufferedAmount-arraybuffer.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/bufferedAmount/bufferedAmount-blob.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/bufferedAmount/bufferedAmount-defineProperty-getter.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/bufferedAmount/bufferedAmount-defineProperty-setter.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/bufferedAmount/bufferedAmount-deleting.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/bufferedAmount/bufferedAmount-getting.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/bufferedAmount/bufferedAmount-initial.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/bufferedAmount/bufferedAmount-large.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/bufferedAmount/bufferedAmount-readonly.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/bufferedAmount/bufferedAmount-unicode.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/close/close-basic.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/close/close-connecting.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/close/close-multiple.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/close/close-nested.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/close/close-replace.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/close/close-return.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/constants/001.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/constants/002.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/constants/003.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/constants/004.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/constants/005.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/constants/006.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/001.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/002.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/003.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/004.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/006.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/007.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/008.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/009.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/010.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/011.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/012.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/013.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/014.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/015.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/016.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/017.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/018.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/019.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/events/020.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/extensions/001.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/protocol/protocol-initial.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/readyState/001.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/readyState/002.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/readyState/003.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/readyState/004.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/readyState/005.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/readyState/006.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/readyState/007.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/readyState/008.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/001.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/002.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/003.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/004.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/005.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/006.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/007.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/008.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/009.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/010.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/011.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/send/012.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/url/001.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/url/002.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/url/003.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/url/004.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/url/005.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/url/006.html create mode 100644 test/wpt/tests/websockets/interfaces/WebSocket/url/resolve.html create mode 100644 test/wpt/tests/websockets/keeping-connection-open/001.html create mode 100644 test/wpt/tests/websockets/mixed-content.https.any.js create mode 100644 test/wpt/tests/websockets/multi-globals/message-received.html create mode 100644 test/wpt/tests/websockets/multi-globals/support/incumbent.sub.html create mode 100644 test/wpt/tests/websockets/multi-globals/support/relevant.html create mode 100644 test/wpt/tests/websockets/multi-globals/url-parsing/current/current.html create mode 100644 test/wpt/tests/websockets/multi-globals/url-parsing/incumbent/incumbent.html create mode 100644 test/wpt/tests/websockets/multi-globals/url-parsing/url-parsing.html create mode 100644 test/wpt/tests/websockets/opening-handshake/001.html create mode 100644 test/wpt/tests/websockets/opening-handshake/002.html create mode 100644 test/wpt/tests/websockets/opening-handshake/003-sets-origin.worker.js create mode 100644 test/wpt/tests/websockets/opening-handshake/003.html create mode 100644 test/wpt/tests/websockets/opening-handshake/005.html create mode 100644 test/wpt/tests/websockets/referrer.any.js create mode 100644 test/wpt/tests/websockets/remove-own-iframe-during-onerror.window.js create mode 100644 test/wpt/tests/websockets/resources/websockets-test-helpers.sub.js create mode 100644 test/wpt/tests/websockets/security/001.html create mode 100644 test/wpt/tests/websockets/security/002.html create mode 100644 test/wpt/tests/websockets/security/check.py create mode 100644 test/wpt/tests/websockets/send-many-64K-messages-with-backpressure.any.js create mode 100644 test/wpt/tests/websockets/stream/tentative/README.md create mode 100644 test/wpt/tests/websockets/stream/tentative/abort.any.js create mode 100644 test/wpt/tests/websockets/stream/tentative/backpressure-receive.any.js create mode 100644 test/wpt/tests/websockets/stream/tentative/backpressure-send.any.js create mode 100644 test/wpt/tests/websockets/stream/tentative/close.any.js create mode 100644 test/wpt/tests/websockets/stream/tentative/constructor.any.js create mode 100644 test/wpt/tests/websockets/stream/tentative/resources/url-constants.js create mode 100644 test/wpt/tests/websockets/unload-a-document/001-1.html create mode 100644 test/wpt/tests/websockets/unload-a-document/001-2.html create mode 100644 test/wpt/tests/websockets/unload-a-document/001.html create mode 100644 test/wpt/tests/websockets/unload-a-document/002-1.html create mode 100644 test/wpt/tests/websockets/unload-a-document/002-2.html create mode 100644 test/wpt/tests/websockets/unload-a-document/002.html create mode 100644 test/wpt/tests/websockets/unload-a-document/003.html create mode 100644 test/wpt/tests/websockets/unload-a-document/004.html create mode 100644 test/wpt/tests/websockets/unload-a-document/005-1.html create mode 100644 test/wpt/tests/websockets/unload-a-document/005.html create mode 100644 test/wpt/tests/wpt create mode 100644 test/wpt/tests/wpt.py create mode 100644 test/wpt/tests/xhr/META.yml create mode 100644 test/wpt/tests/xhr/README.md create mode 100644 test/wpt/tests/xhr/XMLHttpRequest-withCredentials.any.js create mode 100644 test/wpt/tests/xhr/abort-after-receive.any.js create mode 100644 test/wpt/tests/xhr/abort-after-send.any.js create mode 100644 test/wpt/tests/xhr/abort-after-stop.window.js create mode 100644 test/wpt/tests/xhr/abort-after-timeout.any.js create mode 100644 test/wpt/tests/xhr/abort-during-done.window.js create mode 100644 test/wpt/tests/xhr/abort-during-headers-received.window.js create mode 100644 test/wpt/tests/xhr/abort-during-loading.window.js create mode 100644 test/wpt/tests/xhr/abort-during-open.any.js create mode 100644 test/wpt/tests/xhr/abort-during-readystatechange.any.js create mode 100644 test/wpt/tests/xhr/abort-during-unsent.any.js create mode 100644 test/wpt/tests/xhr/abort-during-upload.any.js create mode 100644 test/wpt/tests/xhr/abort-event-abort.any.js create mode 100644 test/wpt/tests/xhr/abort-event-listeners.any.js create mode 100644 test/wpt/tests/xhr/abort-event-loadend.any.js create mode 100644 test/wpt/tests/xhr/abort-event-order.htm create mode 100644 test/wpt/tests/xhr/abort-upload-event-abort.any.js create mode 100644 test/wpt/tests/xhr/abort-upload-event-loadend.any.js create mode 100644 test/wpt/tests/xhr/access-control-and-redirects-async-same-origin.any.js create mode 100644 test/wpt/tests/xhr/access-control-and-redirects-async.any.js create mode 100644 test/wpt/tests/xhr/access-control-and-redirects.any.js create mode 100644 test/wpt/tests/xhr/access-control-basic-allow-access-control-origin-header-data-url.htm create mode 100644 test/wpt/tests/xhr/access-control-basic-allow-access-control-origin-header.any.js create mode 100644 test/wpt/tests/xhr/access-control-basic-allow-async.any.js create mode 100644 test/wpt/tests/xhr/access-control-basic-allow-non-cors-safelisted-method-async.any.js create mode 100644 test/wpt/tests/xhr/access-control-basic-allow-non-cors-safelisted-method.any.js create mode 100644 test/wpt/tests/xhr/access-control-basic-allow-preflight-cache-invalidation-by-header.any.js create mode 100644 test/wpt/tests/xhr/access-control-basic-allow-preflight-cache-invalidation-by-method.any.js create mode 100644 test/wpt/tests/xhr/access-control-basic-allow-preflight-cache-timeout.any.js create mode 100644 test/wpt/tests/xhr/access-control-basic-allow-preflight-cache.any.js create mode 100644 test/wpt/tests/xhr/access-control-basic-allow-star.any.js create mode 100644 test/wpt/tests/xhr/access-control-basic-allow.any.js create mode 100644 test/wpt/tests/xhr/access-control-basic-cors-safelisted-request-headers.htm create mode 100644 test/wpt/tests/xhr/access-control-basic-cors-safelisted-response-headers.htm create mode 100644 test/wpt/tests/xhr/access-control-basic-denied.htm create mode 100644 test/wpt/tests/xhr/access-control-basic-get-fail-non-simple.htm create mode 100644 test/wpt/tests/xhr/access-control-basic-non-cors-safelisted-content-type.htm create mode 100644 test/wpt/tests/xhr/access-control-basic-post-success-no-content-type.htm create mode 100644 test/wpt/tests/xhr/access-control-basic-post-with-non-cors-safelisted-content-type.htm create mode 100644 test/wpt/tests/xhr/access-control-basic-preflight-denied.htm create mode 100644 test/wpt/tests/xhr/access-control-expose-headers-on-redirect.html create mode 100644 test/wpt/tests/xhr/access-control-preflight-async-header-denied.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-async-method-denied.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-async-not-supported.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-credential-async.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-credential-sync.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-headers-async.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-headers-sync.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-request-allow-headers-returns-star.any.js create mode 100644 test/wpt/tests/xhr/access-control-preflight-request-header-lowercase.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-request-header-returns-origin.any.js create mode 100644 test/wpt/tests/xhr/access-control-preflight-request-header-sorted.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-request-headers-origin.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-request-invalid-status-301.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-request-invalid-status-400.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-request-invalid-status-501.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-request-must-not-contain-cookie.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-sync-header-denied.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-sync-method-denied.htm create mode 100644 test/wpt/tests/xhr/access-control-preflight-sync-not-supported.htm create mode 100644 test/wpt/tests/xhr/access-control-recursive-failed-request.htm create mode 100644 test/wpt/tests/xhr/access-control-response-with-body-sync.htm create mode 100644 test/wpt/tests/xhr/access-control-response-with-body.htm create mode 100644 test/wpt/tests/xhr/access-control-response-with-exposed-headers.htm create mode 100644 test/wpt/tests/xhr/access-control-sandboxed-iframe-allow-origin-null.htm create mode 100644 test/wpt/tests/xhr/access-control-sandboxed-iframe-allow.htm create mode 100644 test/wpt/tests/xhr/access-control-sandboxed-iframe-denied-without-wildcard.htm create mode 100644 test/wpt/tests/xhr/access-control-sandboxed-iframe-denied.htm create mode 100644 test/wpt/tests/xhr/allow-lists-starting-with-comma.htm create mode 100644 test/wpt/tests/xhr/anonymous-mode-unsupported.htm create mode 100644 test/wpt/tests/xhr/blob-range.any.js create mode 100644 test/wpt/tests/xhr/close-worker-with-xhr-in-progress.html create mode 100644 test/wpt/tests/xhr/content-type-unmodified.any.js create mode 100644 test/wpt/tests/xhr/cookies.http.html create mode 100644 test/wpt/tests/xhr/cors-expose-star.sub.any.js create mode 100644 test/wpt/tests/xhr/cors-upload.any.js create mode 100644 test/wpt/tests/xhr/data-uri.htm create mode 100644 test/wpt/tests/xhr/event-abort.any.js create mode 100644 test/wpt/tests/xhr/event-error-order.sub.html create mode 100644 test/wpt/tests/xhr/event-error.sub.any.js create mode 100644 test/wpt/tests/xhr/event-load.any.js create mode 100644 test/wpt/tests/xhr/event-loadend.any.js create mode 100644 test/wpt/tests/xhr/event-loadstart-upload.any.js create mode 100644 test/wpt/tests/xhr/event-loadstart.any.js create mode 100644 test/wpt/tests/xhr/event-progress.any.js create mode 100644 test/wpt/tests/xhr/event-readystate-sync-open.any.js create mode 100644 test/wpt/tests/xhr/event-readystatechange-loaded.any.js create mode 100644 test/wpt/tests/xhr/event-timeout-order.any.js create mode 100644 test/wpt/tests/xhr/event-timeout.any.js create mode 100644 test/wpt/tests/xhr/event-upload-progress-crossorigin.any.js create mode 100644 test/wpt/tests/xhr/event-upload-progress.any.js create mode 100644 test/wpt/tests/xhr/firing-events-http-content-length.html create mode 100644 test/wpt/tests/xhr/firing-events-http-no-content-length.html create mode 100644 test/wpt/tests/xhr/folder.txt create mode 100644 test/wpt/tests/xhr/formdata.html create mode 100644 test/wpt/tests/xhr/formdata/append-formelement.html create mode 100644 test/wpt/tests/xhr/formdata/append.any.js create mode 100644 test/wpt/tests/xhr/formdata/constructor-formelement.html create mode 100644 test/wpt/tests/xhr/formdata/constructor-submitter.html create mode 100644 test/wpt/tests/xhr/formdata/constructor.any.js create mode 100644 test/wpt/tests/xhr/formdata/delete-formelement.html create mode 100644 test/wpt/tests/xhr/formdata/delete.any.js create mode 100644 test/wpt/tests/xhr/formdata/foreach.any.js create mode 100644 test/wpt/tests/xhr/formdata/get-formelement.html create mode 100644 test/wpt/tests/xhr/formdata/get.any.js create mode 100644 test/wpt/tests/xhr/formdata/has-formelement.html create mode 100644 test/wpt/tests/xhr/formdata/has.any.js create mode 100644 test/wpt/tests/xhr/formdata/iteration.any.js create mode 100644 test/wpt/tests/xhr/formdata/set-blob.any.js create mode 100644 test/wpt/tests/xhr/formdata/set-formelement.html create mode 100644 test/wpt/tests/xhr/formdata/set.any.js create mode 100644 test/wpt/tests/xhr/getallresponseheaders-cookies.htm create mode 100644 test/wpt/tests/xhr/getallresponseheaders-status.htm create mode 100644 test/wpt/tests/xhr/getallresponseheaders.htm create mode 100644 test/wpt/tests/xhr/getresponseheader-case-insensitive.htm create mode 100644 test/wpt/tests/xhr/getresponseheader-chunked-trailer.htm create mode 100644 test/wpt/tests/xhr/getresponseheader-cookies-and-more.htm create mode 100644 test/wpt/tests/xhr/getresponseheader-error-state.htm create mode 100644 test/wpt/tests/xhr/getresponseheader-server-date.htm create mode 100644 test/wpt/tests/xhr/getresponseheader-special-characters.htm create mode 100644 test/wpt/tests/xhr/getresponseheader-unsent-opened-state.htm create mode 100644 test/wpt/tests/xhr/getresponseheader.any.js create mode 100644 test/wpt/tests/xhr/header-user-agent-async.htm create mode 100644 test/wpt/tests/xhr/header-user-agent-sync.htm create mode 100644 test/wpt/tests/xhr/headers-normalize-response.htm create mode 100644 test/wpt/tests/xhr/historical.html create mode 100644 test/wpt/tests/xhr/idlharness.any.js create mode 100644 test/wpt/tests/xhr/json.any.js create mode 100644 test/wpt/tests/xhr/loadstart-and-state.html create mode 100644 test/wpt/tests/xhr/open-after-abort.htm create mode 100644 test/wpt/tests/xhr/open-after-setrequestheader.htm create mode 100644 test/wpt/tests/xhr/open-after-stop.window.js create mode 100644 test/wpt/tests/xhr/open-during-abort-event.htm create mode 100644 test/wpt/tests/xhr/open-during-abort-processing.htm create mode 100644 test/wpt/tests/xhr/open-during-abort.htm create mode 100644 test/wpt/tests/xhr/open-method-bogus.htm create mode 100644 test/wpt/tests/xhr/open-method-case-insensitive.htm create mode 100644 test/wpt/tests/xhr/open-method-case-sensitive.htm create mode 100644 test/wpt/tests/xhr/open-method-insecure.htm create mode 100644 test/wpt/tests/xhr/open-method-responsetype-set-sync.htm create mode 100644 test/wpt/tests/xhr/open-open-send.htm create mode 100644 test/wpt/tests/xhr/open-open-sync-send.htm create mode 100644 test/wpt/tests/xhr/open-parameters-toString.htm create mode 100644 test/wpt/tests/xhr/open-referer.htm create mode 100644 test/wpt/tests/xhr/open-send-during-abort.htm create mode 100644 test/wpt/tests/xhr/open-send-open.htm create mode 100644 test/wpt/tests/xhr/open-sync-open-send.htm create mode 100644 test/wpt/tests/xhr/open-url-about-blank-window.htm create mode 100644 test/wpt/tests/xhr/open-url-base-inserted-after-open.htm create mode 100644 test/wpt/tests/xhr/open-url-base-inserted.htm create mode 100644 test/wpt/tests/xhr/open-url-base.htm create mode 100644 test/wpt/tests/xhr/open-url-encoding.htm create mode 100644 test/wpt/tests/xhr/open-url-fragment.htm create mode 100644 test/wpt/tests/xhr/open-url-javascript-window-2.htm create mode 100644 test/wpt/tests/xhr/open-url-javascript-window.htm create mode 100644 test/wpt/tests/xhr/open-url-multi-window-2.htm create mode 100644 test/wpt/tests/xhr/open-url-multi-window-3.htm create mode 100644 test/wpt/tests/xhr/open-url-multi-window-4.htm create mode 100644 test/wpt/tests/xhr/open-url-multi-window-5.htm create mode 100644 test/wpt/tests/xhr/open-url-multi-window-6.htm create mode 100644 test/wpt/tests/xhr/open-url-multi-window.htm create mode 100644 test/wpt/tests/xhr/open-url-redirected-sharedworker-origin.htm create mode 100644 test/wpt/tests/xhr/open-url-redirected-worker-origin.htm create mode 100644 test/wpt/tests/xhr/open-url-worker-origin.htm create mode 100644 test/wpt/tests/xhr/open-url-worker-simple.htm create mode 100644 test/wpt/tests/xhr/open-user-password-non-same-origin.htm create mode 100644 test/wpt/tests/xhr/over-1-meg.any.js create mode 100644 test/wpt/tests/xhr/overridemimetype-blob.html create mode 100644 test/wpt/tests/xhr/overridemimetype-done-state.any.js create mode 100644 test/wpt/tests/xhr/overridemimetype-edge-cases.window.js create mode 100644 test/wpt/tests/xhr/overridemimetype-headers-received-state-force-shiftjis.htm create mode 100644 test/wpt/tests/xhr/overridemimetype-invalid-mime-type.htm create mode 100644 test/wpt/tests/xhr/overridemimetype-loading-state.htm create mode 100644 test/wpt/tests/xhr/overridemimetype-open-state-force-utf-8.htm create mode 100644 test/wpt/tests/xhr/overridemimetype-open-state-force-xml.htm create mode 100644 test/wpt/tests/xhr/overridemimetype-unsent-state-force-shiftjis.any.js create mode 100644 test/wpt/tests/xhr/preserve-ua-header-on-redirect.htm create mode 100644 test/wpt/tests/xhr/progress-events-response-data-gzip.htm create mode 100644 test/wpt/tests/xhr/progressevent-constructor.html create mode 100644 test/wpt/tests/xhr/progressevent-interface.html create mode 100644 test/wpt/tests/xhr/request-content-length.any.js create mode 100644 test/wpt/tests/xhr/resources/accept-language.py create mode 100644 test/wpt/tests/xhr/resources/accept.py create mode 100644 test/wpt/tests/xhr/resources/access-control-allow-lists.py create mode 100644 test/wpt/tests/xhr/resources/access-control-allow-with-body.py create mode 100644 test/wpt/tests/xhr/resources/access-control-auth-basic.py create mode 100644 test/wpt/tests/xhr/resources/access-control-basic-allow-no-credentials.py create mode 100644 test/wpt/tests/xhr/resources/access-control-basic-allow-star.py create mode 100644 test/wpt/tests/xhr/resources/access-control-basic-allow.py create mode 100644 test/wpt/tests/xhr/resources/access-control-basic-cors-safelisted-request-headers.py create mode 100644 test/wpt/tests/xhr/resources/access-control-basic-cors-safelisted-response-headers.py create mode 100644 test/wpt/tests/xhr/resources/access-control-basic-denied.py create mode 100644 test/wpt/tests/xhr/resources/access-control-basic-options-not-supported.py create mode 100644 test/wpt/tests/xhr/resources/access-control-basic-preflight-cache-invalidation.py create mode 100644 test/wpt/tests/xhr/resources/access-control-basic-preflight-cache-timeout.py create mode 100644 test/wpt/tests/xhr/resources/access-control-basic-preflight-cache.py create mode 100644 test/wpt/tests/xhr/resources/access-control-basic-put-allow.py create mode 100644 test/wpt/tests/xhr/resources/access-control-cookie.py create mode 100644 test/wpt/tests/xhr/resources/access-control-origin-header.py create mode 100644 test/wpt/tests/xhr/resources/access-control-preflight-denied.py create mode 100644 test/wpt/tests/xhr/resources/access-control-preflight-request-allow-headers-returns-star.py create mode 100644 test/wpt/tests/xhr/resources/access-control-preflight-request-header-lowercase.py create mode 100644 test/wpt/tests/xhr/resources/access-control-preflight-request-header-returns-origin.py create mode 100644 test/wpt/tests/xhr/resources/access-control-preflight-request-header-sorted.py create mode 100644 test/wpt/tests/xhr/resources/access-control-preflight-request-headers-origin.py create mode 100644 test/wpt/tests/xhr/resources/access-control-preflight-request-invalid-status.py create mode 100644 test/wpt/tests/xhr/resources/access-control-preflight-request-must-not-contain-cookie.py create mode 100644 test/wpt/tests/xhr/resources/access-control-sandboxed-iframe.html create mode 100644 test/wpt/tests/xhr/resources/auth1/auth.py create mode 100644 test/wpt/tests/xhr/resources/auth10/auth.py create mode 100644 test/wpt/tests/xhr/resources/auth11/auth.py create mode 100644 test/wpt/tests/xhr/resources/auth2/auth.py create mode 100644 test/wpt/tests/xhr/resources/auth2/corsenabled.py create mode 100644 test/wpt/tests/xhr/resources/auth3/auth.py create mode 100644 test/wpt/tests/xhr/resources/auth4/auth.py create mode 100644 test/wpt/tests/xhr/resources/auth5/auth.py create mode 100644 test/wpt/tests/xhr/resources/auth6/auth.py create mode 100644 test/wpt/tests/xhr/resources/auth7/corsenabled.py create mode 100644 test/wpt/tests/xhr/resources/auth8/corsenabled-no-authorize.py create mode 100644 test/wpt/tests/xhr/resources/auth9/auth.py create mode 100644 test/wpt/tests/xhr/resources/authentication.py create mode 100644 test/wpt/tests/xhr/resources/bad-chunk-encoding.py create mode 100644 test/wpt/tests/xhr/resources/base.xml create mode 100644 test/wpt/tests/xhr/resources/chunked.py create mode 100644 test/wpt/tests/xhr/resources/conditional.py create mode 100644 test/wpt/tests/xhr/resources/content.py create mode 100644 test/wpt/tests/xhr/resources/corsenabled.py create mode 100644 test/wpt/tests/xhr/resources/delay.py create mode 100644 test/wpt/tests/xhr/resources/echo-content-cors.py create mode 100644 test/wpt/tests/xhr/resources/echo-content-type.py create mode 100644 test/wpt/tests/xhr/resources/echo-headers.py create mode 100644 test/wpt/tests/xhr/resources/echo-method.py create mode 100644 test/wpt/tests/xhr/resources/empty-div-utf8-html.py create mode 100644 test/wpt/tests/xhr/resources/folder.txt create mode 100644 test/wpt/tests/xhr/resources/form.py create mode 100644 test/wpt/tests/xhr/resources/get-set-cookie.py create mode 100644 test/wpt/tests/xhr/resources/gzip.py create mode 100644 test/wpt/tests/xhr/resources/header-content-length-twice.asis create mode 100644 test/wpt/tests/xhr/resources/header-content-length.asis create mode 100644 test/wpt/tests/xhr/resources/header-user-agent.py create mode 100644 test/wpt/tests/xhr/resources/headers-basic.asis create mode 100644 test/wpt/tests/xhr/resources/headers-double-empty.asis create mode 100644 test/wpt/tests/xhr/resources/headers-some-are-empty.asis create mode 100644 test/wpt/tests/xhr/resources/headers-www-authenticate.asis create mode 100644 test/wpt/tests/xhr/resources/headers.asis create mode 100644 test/wpt/tests/xhr/resources/headers.py create mode 100644 test/wpt/tests/xhr/resources/image.gif create mode 100644 test/wpt/tests/xhr/resources/img-utf8-html.py create mode 100644 test/wpt/tests/xhr/resources/img.jpg create mode 100644 test/wpt/tests/xhr/resources/infinite-redirects.py create mode 100644 test/wpt/tests/xhr/resources/init.htm create mode 100644 test/wpt/tests/xhr/resources/inspect-headers.py create mode 100644 test/wpt/tests/xhr/resources/invalid-utf8-html.py create mode 100644 test/wpt/tests/xhr/resources/last-modified.py create mode 100644 test/wpt/tests/xhr/resources/no-custom-header-on-preflight.py create mode 100644 test/wpt/tests/xhr/resources/nocors/folder.txt create mode 100644 test/wpt/tests/xhr/resources/over-1-meg.txt create mode 100644 test/wpt/tests/xhr/resources/parse-headers.py create mode 100644 test/wpt/tests/xhr/resources/pass.txt create mode 100644 test/wpt/tests/xhr/resources/redirect-cors.py create mode 100644 test/wpt/tests/xhr/resources/redirect.py create mode 100644 test/wpt/tests/xhr/resources/requri.py create mode 100644 test/wpt/tests/xhr/resources/reset-token.py create mode 100644 test/wpt/tests/xhr/resources/responseType-document-in-worker.js create mode 100644 test/wpt/tests/xhr/resources/responseXML-unavailable-in-worker.js create mode 100644 test/wpt/tests/xhr/resources/send-after-setting-document-domain-window-1.htm create mode 100644 test/wpt/tests/xhr/resources/send-after-setting-document-domain-window-2.htm create mode 100644 test/wpt/tests/xhr/resources/send-after-setting-document-domain-window-helper.js create mode 100644 test/wpt/tests/xhr/resources/shift-jis-html.py create mode 100644 test/wpt/tests/xhr/resources/status.py create mode 100644 test/wpt/tests/xhr/resources/top.txt create mode 100644 test/wpt/tests/xhr/resources/trickle.py create mode 100644 test/wpt/tests/xhr/resources/upload.py create mode 100644 test/wpt/tests/xhr/resources/utf16-bom.json create mode 100644 test/wpt/tests/xhr/resources/utf16.txt create mode 100644 test/wpt/tests/xhr/resources/well-formed.xml create mode 100644 test/wpt/tests/xhr/resources/win-1252-html.py create mode 100644 test/wpt/tests/xhr/resources/win-1252-xml.py create mode 100644 test/wpt/tests/xhr/resources/workerxhr-origin-referrer.js create mode 100644 test/wpt/tests/xhr/resources/workerxhr-simple.js create mode 100644 test/wpt/tests/xhr/resources/xmlhttprequest-event-order.js create mode 100644 test/wpt/tests/xhr/resources/xmlhttprequest-timeout-aborted.js create mode 100644 test/wpt/tests/xhr/resources/xmlhttprequest-timeout-abortedonmain.js create mode 100644 test/wpt/tests/xhr/resources/xmlhttprequest-timeout-overrides.js create mode 100644 test/wpt/tests/xhr/resources/xmlhttprequest-timeout-overridesexpires.js create mode 100644 test/wpt/tests/xhr/resources/xmlhttprequest-timeout-runner.js create mode 100644 test/wpt/tests/xhr/resources/xmlhttprequest-timeout-simple.js create mode 100644 test/wpt/tests/xhr/resources/xmlhttprequest-timeout-synconmain.js create mode 100644 test/wpt/tests/xhr/resources/xmlhttprequest-timeout-synconworker.js create mode 100644 test/wpt/tests/xhr/resources/xmlhttprequest-timeout-twice.js create mode 100644 test/wpt/tests/xhr/resources/xmlhttprequest-timeout.js create mode 100644 test/wpt/tests/xhr/resources/zlib.py create mode 100644 test/wpt/tests/xhr/response-body-errors.any.js create mode 100644 test/wpt/tests/xhr/response-data-arraybuffer.htm create mode 100644 test/wpt/tests/xhr/response-data-blob.htm create mode 100644 test/wpt/tests/xhr/response-data-deflate.htm create mode 100644 test/wpt/tests/xhr/response-data-gzip.htm create mode 100644 test/wpt/tests/xhr/response-data-progress.htm create mode 100644 test/wpt/tests/xhr/response-invalid-responsetype.htm create mode 100644 test/wpt/tests/xhr/response-json.htm create mode 100644 test/wpt/tests/xhr/response-method.htm create mode 100644 test/wpt/tests/xhr/responseText-status.html create mode 100644 test/wpt/tests/xhr/responseType-document-in-worker.html create mode 100644 test/wpt/tests/xhr/responseXML-unavailable-in-worker.html create mode 100644 test/wpt/tests/xhr/responsedocument-decoding.htm create mode 100644 test/wpt/tests/xhr/responsetext-decoding.htm create mode 100644 test/wpt/tests/xhr/responsetype.any.js create mode 100644 test/wpt/tests/xhr/responseurl.html create mode 100644 test/wpt/tests/xhr/responsexml-basic.htm create mode 100644 test/wpt/tests/xhr/responsexml-document-properties.htm create mode 100644 test/wpt/tests/xhr/responsexml-get-twice.htm create mode 100644 test/wpt/tests/xhr/responsexml-invalid-type.html create mode 100644 test/wpt/tests/xhr/responsexml-media-type.htm create mode 100644 test/wpt/tests/xhr/responsexml-non-document-types.htm create mode 100644 test/wpt/tests/xhr/responsexml-non-well-formed.htm create mode 100644 test/wpt/tests/xhr/security-consideration.sub.html create mode 100644 test/wpt/tests/xhr/send-accept-language.htm create mode 100644 test/wpt/tests/xhr/send-accept.htm create mode 100644 test/wpt/tests/xhr/send-after-setting-document-domain.htm create mode 100644 test/wpt/tests/xhr/send-authentication-basic-cors-not-enabled.htm create mode 100644 test/wpt/tests/xhr/send-authentication-basic-cors.htm create mode 100644 test/wpt/tests/xhr/send-authentication-basic-repeat-no-args.htm create mode 100644 test/wpt/tests/xhr/send-authentication-basic-setrequestheader-and-arguments.htm create mode 100644 test/wpt/tests/xhr/send-authentication-basic-setrequestheader-existing-session.htm create mode 100644 test/wpt/tests/xhr/send-authentication-basic-setrequestheader.htm create mode 100644 test/wpt/tests/xhr/send-authentication-basic.htm create mode 100644 test/wpt/tests/xhr/send-authentication-competing-names-passwords.htm create mode 100644 test/wpt/tests/xhr/send-authentication-cors-basic-setrequestheader.htm create mode 100644 test/wpt/tests/xhr/send-authentication-cors-setrequestheader-no-cred.htm create mode 100644 test/wpt/tests/xhr/send-authentication-existing-session-manual.htm create mode 100644 test/wpt/tests/xhr/send-authentication-prompt-2-manual.htm create mode 100644 test/wpt/tests/xhr/send-authentication-prompt-manual.htm create mode 100644 test/wpt/tests/xhr/send-blob-with-no-mime-type.html create mode 100644 test/wpt/tests/xhr/send-conditional-cors.htm create mode 100644 test/wpt/tests/xhr/send-conditional.htm create mode 100644 test/wpt/tests/xhr/send-content-type-charset.htm create mode 100644 test/wpt/tests/xhr/send-content-type-string.htm create mode 100644 test/wpt/tests/xhr/send-data-arraybuffer.any.js create mode 100644 test/wpt/tests/xhr/send-data-arraybufferview.any.js create mode 100644 test/wpt/tests/xhr/send-data-blob.htm create mode 100644 test/wpt/tests/xhr/send-data-es-object.any.js create mode 100644 test/wpt/tests/xhr/send-data-formdata.any.js create mode 100644 test/wpt/tests/xhr/send-data-sharedarraybuffer.any.js create mode 100644 test/wpt/tests/xhr/send-data-string-invalid-unicode.any.js create mode 100644 test/wpt/tests/xhr/send-data-unexpected-tostring.htm create mode 100644 test/wpt/tests/xhr/send-entity-body-basic.htm create mode 100644 test/wpt/tests/xhr/send-entity-body-document-bogus.htm create mode 100644 test/wpt/tests/xhr/send-entity-body-document.htm create mode 100644 test/wpt/tests/xhr/send-entity-body-empty.htm create mode 100644 test/wpt/tests/xhr/send-entity-body-get-head-async.htm create mode 100644 test/wpt/tests/xhr/send-entity-body-get-head.htm create mode 100644 test/wpt/tests/xhr/send-entity-body-none.htm create mode 100644 test/wpt/tests/xhr/send-network-error-async-events.sub.htm create mode 100644 test/wpt/tests/xhr/send-network-error-sync-events.sub.htm create mode 100644 test/wpt/tests/xhr/send-no-response-event-loadend.htm create mode 100644 test/wpt/tests/xhr/send-no-response-event-loadstart.htm create mode 100644 test/wpt/tests/xhr/send-no-response-event-order.htm create mode 100644 test/wpt/tests/xhr/send-non-same-origin.htm create mode 100644 test/wpt/tests/xhr/send-receive-utf16.htm create mode 100644 test/wpt/tests/xhr/send-redirect-bogus-sync.htm create mode 100644 test/wpt/tests/xhr/send-redirect-bogus.htm create mode 100644 test/wpt/tests/xhr/send-redirect-infinite-sync.htm create mode 100644 test/wpt/tests/xhr/send-redirect-infinite.htm create mode 100644 test/wpt/tests/xhr/send-redirect-no-location.htm create mode 100644 test/wpt/tests/xhr/send-redirect-post-upload.htm create mode 100644 test/wpt/tests/xhr/send-redirect-to-cors.htm create mode 100644 test/wpt/tests/xhr/send-redirect-to-non-cors.htm create mode 100644 test/wpt/tests/xhr/send-redirect.htm create mode 100644 test/wpt/tests/xhr/send-response-event-order.htm create mode 100644 test/wpt/tests/xhr/send-response-upload-event-loadend.htm create mode 100644 test/wpt/tests/xhr/send-response-upload-event-loadstart.htm create mode 100644 test/wpt/tests/xhr/send-response-upload-event-progress.htm create mode 100644 test/wpt/tests/xhr/send-send.any.js create mode 100644 test/wpt/tests/xhr/send-sync-blocks-async.htm create mode 100644 test/wpt/tests/xhr/send-sync-no-response-event-load.htm create mode 100644 test/wpt/tests/xhr/send-sync-no-response-event-loadend.htm create mode 100644 test/wpt/tests/xhr/send-sync-no-response-event-order.htm create mode 100644 test/wpt/tests/xhr/send-sync-response-event-order.htm create mode 100644 test/wpt/tests/xhr/send-sync-timeout.htm create mode 100644 test/wpt/tests/xhr/send-timeout-events.htm create mode 100644 test/wpt/tests/xhr/send-usp.any.js create mode 100644 test/wpt/tests/xhr/setrequestheader-after-send.htm create mode 100644 test/wpt/tests/xhr/setrequestheader-allow-empty-value.htm create mode 100644 test/wpt/tests/xhr/setrequestheader-allow-whitespace-in-value.htm create mode 100644 test/wpt/tests/xhr/setrequestheader-before-open.htm create mode 100644 test/wpt/tests/xhr/setrequestheader-bogus-name.htm create mode 100644 test/wpt/tests/xhr/setrequestheader-bogus-value.htm create mode 100644 test/wpt/tests/xhr/setrequestheader-case-insensitive.htm create mode 100644 test/wpt/tests/xhr/setrequestheader-combining.window.js create mode 100644 test/wpt/tests/xhr/setrequestheader-content-type.htm create mode 100644 test/wpt/tests/xhr/setrequestheader-header-allowed.htm create mode 100644 test/wpt/tests/xhr/setrequestheader-header-forbidden.htm create mode 100644 test/wpt/tests/xhr/setrequestheader-open-setrequestheader.htm create mode 100644 test/wpt/tests/xhr/status-async.htm create mode 100644 test/wpt/tests/xhr/status-basic.htm create mode 100644 test/wpt/tests/xhr/status-error.htm create mode 100644 test/wpt/tests/xhr/status.h2.window.js create mode 100644 test/wpt/tests/xhr/sync-no-progress.any.js create mode 100644 test/wpt/tests/xhr/sync-no-timeout.any.js create mode 100644 test/wpt/tests/xhr/sync-xhr-and-window-onload.html create mode 100644 test/wpt/tests/xhr/sync-xhr-supported-by-feature-policy.html create mode 100644 test/wpt/tests/xhr/template-element.html create mode 100644 test/wpt/tests/xhr/thrown-error-in-events.html create mode 100644 test/wpt/tests/xhr/timeout-cors-async.htm create mode 100644 test/wpt/tests/xhr/timeout-multiple-fetches.html create mode 100644 test/wpt/tests/xhr/timeout-sync.htm create mode 100644 test/wpt/tests/xhr/xhr-authorization-redirect.any.js create mode 100644 test/wpt/tests/xhr/xhr-timeout-longtask.any.js create mode 100644 test/wpt/tests/xhr/xmlhttprequest-basic.htm create mode 100644 test/wpt/tests/xhr/xmlhttprequest-eventtarget.htm create mode 100644 test/wpt/tests/xhr/xmlhttprequest-network-error-sync.htm create mode 100644 test/wpt/tests/xhr/xmlhttprequest-network-error.htm create mode 100644 test/wpt/tests/xhr/xmlhttprequest-sync-block-defer-scripts-subframe.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-sync-block-defer-scripts.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-sync-block-scripts.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-sync-default-feature-policy.sub.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-sync-not-hang-scriptloader-subframe.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-sync-not-hang-scriptloader.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-aborted.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-abortedonmain.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-overrides.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-overridesexpires.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-reused.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-simple.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-synconmain.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-twice.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-worker-aborted.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-worker-overrides.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-worker-overridesexpires.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-worker-simple.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-worker-synconworker.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-timeout-worker-twice.html create mode 100644 test/wpt/tests/xhr/xmlhttprequest-unsent.htm create mode 100644 types/README.md create mode 100644 types/agent.d.ts create mode 100644 types/api.d.ts create mode 100644 types/balanced-pool.d.ts create mode 100644 types/cache.d.ts create mode 100644 types/client.d.ts create mode 100644 types/connector.d.ts create mode 100644 types/content-type.d.ts create mode 100644 types/cookies.d.ts create mode 100644 types/diagnostics-channel.d.ts create mode 100644 types/dispatcher.d.ts create mode 100644 types/errors.d.ts create mode 100644 types/fetch.d.ts create mode 100644 types/file.d.ts create mode 100644 types/filereader.d.ts create mode 100644 types/formdata.d.ts create mode 100644 types/global-dispatcher.d.ts create mode 100644 types/global-origin.d.ts create mode 100644 types/handlers.d.ts create mode 100644 types/header.d.ts create mode 100644 types/index.d.ts create mode 100644 types/interceptors.d.ts create mode 100644 types/mock-agent.d.ts create mode 100644 types/mock-client.d.ts create mode 100644 types/mock-errors.d.ts create mode 100644 types/mock-interceptor.d.ts create mode 100644 types/mock-pool.d.ts create mode 100644 types/patch.d.ts create mode 100644 types/pool-stats.d.ts create mode 100644 types/pool.d.ts create mode 100644 types/proxy-agent.d.ts create mode 100644 types/readable.d.ts create mode 100644 types/retry-handler.d.ts create mode 100644 types/webidl.d.ts create mode 100644 types/websocket.d.ts diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..4ad0cf5 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,8 @@ +# Ignore everything but the stuff following the `*` with the `!` +# See https://docs.docker.com/engine/reference/builder/#dockerignore-file + +* +!package.json +!lib +!deps +!build diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..c7a0d1f --- /dev/null +++ b/.editorconfig @@ -0,0 +1,9 @@ +# https://editorconfig.org/ + +root = true + +[*] +indent_size = 2 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md new file mode 100644 index 0000000..8ff7029 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -0,0 +1,34 @@ +--- +name: Bug Report +about: Report an issue +title: '' +labels: bug +assignees: '' + +--- + +## Bug Description + + + +## Reproducible By + + + +## Expected Behavior + + + +## Logs & Screenshots + + + +## Environment + + + +### Additional context + + diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md new file mode 100644 index 0000000..0c3a4ff --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.md @@ -0,0 +1,28 @@ +--- +name: Feature Request +about: Make a suggestion on a feature or improvement for the project +title: '' +labels: enhancement +assignees: '' + +--- + +## This would solve... + + + +## The implementation should look like... + + + +## I have also considered... + + + +## Additional context + + diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..2620ffb --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,53 @@ + + +## This relates to... + + + +## Rationale + + + +## Changes + + + +### Features + + + +### Bug Fixes + + + +### Breaking Changes and Deprecations + + + +## Status + + + + +- [ ] I have read and agreed to the [Developer's Certificate of Origin][cert] +- [ ] Tested +- [ ] Benchmarked (**optional**) +- [ ] Documented +- [ ] Review ready +- [ ] In review +- [ ] Merge ready + +[cert]: https://github.com/nodejs/undici/blob/main/CONTRIBUTING.md diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..18b9fbf --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,23 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "weekly" + open-pull-requests-limit: 10 + + - package-ecosystem: docker + directory: /build + schedule: + interval: daily + + - package-ecosystem: pip + directory: /test/wpt/tests/resources/test + schedule: + interval: daily diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml new file mode 100644 index 0000000..281bdc6 --- /dev/null +++ b/.github/workflows/bench.yml @@ -0,0 +1,43 @@ +name: Benchmarks +on: + - push + - pull_request + +permissions: + contents: read + +jobs: + benchmark_current: + name: benchmark current + runs-on: ubuntu-latest + steps: + - name: Checkout Code + uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + with: + persist-credentials: false + ref: ${{ github.base_ref }} + - name: Setup Node + uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 + with: + node-version: lts/* + - name: Install Modules + run: npm i + - name: Run Benchmark + run: npm run bench + + benchmark_branch: + name: benchmark branch + runs-on: ubuntu-latest + steps: + - name: Checkout Code + uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + with: + persist-credentials: false + - name: Setup Node + uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 + with: + node-version: lts/* + - name: Install Modules + run: npm i + - name: Run Benchmark + run: npm run bench diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000..3c44e66 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,78 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: ["main"] + pull_request: + # The branches below must be a subset of the branches above + branches: ["main"] + schedule: + - cron: "0 0 * * 1" + +permissions: + contents: read + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: ["javascript", "python", "typescript"] + # CodeQL supports [ $supported-codeql-languages ] + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Harden Runner + uses: step-security/harden-runner@1b05615854632b887b69ae1be8cbefe72d3ae423 # v2.6.0 + with: + egress-policy: audit + + - name: Checkout repository + uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@74483a38d39275f33fcff5f35b679b5ca4a26a99 # v2.3.3 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@74483a38d39275f33fcff5f35b679b5ca4a26a99 # v2.3.3 + + # â„¹ï¸ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@74483a38d39275f33fcff5f35b679b5ca4a26a99 # v2.3.3 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml new file mode 100644 index 0000000..0e356c7 --- /dev/null +++ b/.github/workflows/dependency-review.yml @@ -0,0 +1,27 @@ +# Dependency Review Action +# +# This Action will scan dependency manifest files that change as part of a Pull Request, +# surfacing known-vulnerable versions of the packages declared or updated in the PR. +# Once installed, if the workflow run is marked as required, +# PRs introducing known-vulnerable packages will be blocked from merging. +# +# Source repository: https://github.com/actions/dependency-review-action +name: 'Dependency Review' +on: [pull_request] + +permissions: + contents: read + +jobs: + dependency-review: + runs-on: ubuntu-latest + steps: + - name: Harden Runner + uses: step-security/harden-runner@1b05615854632b887b69ae1be8cbefe72d3ae423 # v2.6.0 + with: + egress-policy: audit + + - name: 'Checkout Repository' + uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + - name: 'Dependency Review' + uses: actions/dependency-review-action@6c5ccdad469c9f8a2996bfecaec55a631a347034 # v3.1.0 diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml new file mode 100644 index 0000000..29d7490 --- /dev/null +++ b/.github/workflows/fuzz.yml @@ -0,0 +1,39 @@ +name: Fuzzing + +on: [push, pull_request] + +permissions: + contents: read + +jobs: + fuzzing: + name: Fuzz + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + with: + persist-credentials: false + + - name: Setup Node + uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 + with: + node-version: lts/* + + - name: Install + run: | + npm install + + - name: Run fuzzing + timeout-minutes: 10 + run: | + npm run fuzz + + - uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3 + if: ${{ failure() }} + with: + name: undici-fuzz-results-${{ github.sha }} + path: | + corpus/ + crash-* + fuzz-results-*.json diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..2eb2b6b --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,17 @@ +name: Lint +on: [push, pull_request] +permissions: + contents: read + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + with: + persist-credentials: false + - uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 + with: + node-version: lts/* + - run: npm install + - run: npm run lint diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml new file mode 100644 index 0000000..4c3a77e --- /dev/null +++ b/.github/workflows/nodejs.yml @@ -0,0 +1,45 @@ +# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions + + +name: Node CI + +on: + push: + branches: + - current + - next + - 'v*' + pull_request: + +jobs: + build: + name: Test + uses: pkgjs/action/.github/workflows/node-test.yaml@v0.1.7 + with: + runs-on: ubuntu-latest, windows-latest + test-command: npm run coverage:ci + timeout-minutes: 15 + post-test-steps: | + - name: Coverage Report + uses: codecov/codecov-action@v3 + include: | + - runs-on: ubuntu-latest + node-version: 16.8 + exclude: | + - runs-on: windows-latest + node-version: 14 + - runs-on: windows-latest + node-version: 16 + automerge: + if: > + github.event_name == 'pull_request' && github.event.pull_request.user.login == 'dependabot[bot]' + needs: build + runs-on: ubuntu-latest + permissions: + pull-requests: write + contents: write + steps: + - uses: fastify/github-action-merge-dependabot@59fc8817458fac20df8884576cfe69dbb77c9a07 # v3.9.1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/publish-undici-types.yml b/.github/workflows/publish-undici-types.yml new file mode 100644 index 0000000..3f8fea3 --- /dev/null +++ b/.github/workflows/publish-undici-types.yml @@ -0,0 +1,26 @@ +name: Publish undici-types + +on: + push: + tags: + - 'v*' + workflow_dispatch: + +permissions: + contents: read + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + - uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 + with: + node-version: '16.x' + registry-url: 'https://registry.npmjs.org' + - run: npm install + - run: node scripts/generate-undici-types-package-json.js + - run: npm publish + working-directory: './types' + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml new file mode 100644 index 0000000..f52ad55 --- /dev/null +++ b/.github/workflows/scorecard.yml @@ -0,0 +1,56 @@ +# This workflow uses actions that are not certified by GitHub. They are provided +# by a third-party and are governed by separate terms of service, privacy +# policy, and support documentation. + +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '16 10 * * 2' + push: + branches: [ "main" ] + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + + steps: + - name: "Checkout code" + uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1 + with: + results_file: results.sarif + results_format: sarif + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: "Upload artifact" + uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@74483a38d39275f33fcff5f35b679b5ca4a26a99 # v2.22.5 + with: + sarif_file: results.sarif diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..acd1b69 --- /dev/null +++ b/.gitignore @@ -0,0 +1,81 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# TypeScript v1 declaration files +typings/ + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env + +# next.js build output +.next + +# lock files +package-lock.json +yarn.lock + +# IDE files +.idea +.vscode + +*0x +*clinic* + +# Fuzzing +corpus/ +crash-* +fuzz-results-*.json + +# Bundle output +undici-fetch.js +/test/imports/undici-import.js diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100755 index 0000000..20d0d06 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" + +npm run lint diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/.npmignore b/.npmignore new file mode 100644 index 0000000..344e7f6 --- /dev/null +++ b/.npmignore @@ -0,0 +1,2 @@ +lib/llhttp/llhttp_simd.wasm +lib/llhttp/llhttp.wasm diff --git a/.taprc b/.taprc new file mode 100644 index 0000000..61f7051 --- /dev/null +++ b/.taprc @@ -0,0 +1,7 @@ +ts: false +jsx: false +flow: false +coverage: false +expose-gc: true +timeout: 60 +check-coverage: false diff --git a/CNAME b/CNAME new file mode 100644 index 0000000..27d813e --- /dev/null +++ b/CNAME @@ -0,0 +1 @@ +undici.nodejs.org \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..cb674bc --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,6 @@ +# Code of Conduct + +Undici is committed to upholding the Node.js Code of Conduct. + +The Node.js Code of Conduct document can be found at +https://github.com/nodejs/admin/blob/main/CODE_OF_CONDUCT.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..3a7f3ff --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,201 @@ +# Contributing to Undici + +* [Guides](#guides) + * [Update `llhttp`](#update-llhttp) + * [Lint](#lint) + * [Test](#test) + * [Coverage](#coverage) + * [Update `WPTs`](#update-wpts) +* [Developer's Certificate of Origin 1.1](#developers-certificate-of-origin) + * [Moderation Policy](#moderation-policy) + + +## Guides + + +### Update `llhttp` + +The HTTP parser used by `undici` is a WebAssembly build of [`llhttp`](https://github.com/nodejs/llhttp). + +While the project itself provides a way to compile targeting WebAssembly, at the moment we embed the sources +directly and compile the module in `undici`. + +The `deps/llhttp/include` folder contains the C header files, while the `deps/llhttp/src` folder contains +the C source files needed to compile the module. + +The `lib/llhttp` folder contains the `.js` transpiled assets required to implement a parser. + +The following are the steps required to perform an update. + +#### Clone the [llhttp](https://github.com/nodejs/llhttp) project + +```bash +git clone git@github.com:nodejs/llhttp.git + +cd llhttp +``` +#### Checkout a `llhttp` release + +```bash +git checkout +``` + +#### Install the `llhttp` dependencies + +```bash +npm i +``` + +#### Run the wasm build script + +> This requires [docker](https://www.docker.com/) installed on your machine. + +```bash +npm run build-wasm +``` + +#### Copy the sources to `undici` + +```bash +cp build/wasm/*.js /lib/llhttp/ + +cp build/wasm/*.js.map /lib/llhttp/ + +cp build/wasm/*.d.ts /lib/llhttp/ + +cp src/native/api.c src/native/http.c build/c/llhttp.c /deps/llhttp/src/ + +cp src/native/api.h build/llhttp.h /deps/llhttp/include/ +``` + +#### Build the WebAssembly module in `undici` + +> This requires [docker](https://www.docker.com/) installed on your machine. + +```bash +cd + +npm run build:wasm +``` + +#### Commit the contents of lib/llhttp + +Create a commit which includes all of the updated files in lib/llhttp. + + +### Update `WPTs` + +`undici` runs a subset of the [`web-platform-tests`](https://github.com/web-platform-tests/wpt). + +Here are the steps to update them. + +
+Skip the tutorial + +```bash +git clone --depth 1 --single-branch --branch epochs/daily --filter=blob:none --sparse https://github.com/web-platform-tests/wpt.git test/wpt/tests +cd test/wpt/tests + +git sparse-checkout add /resources +git sparse-checkout add /interfaces +git sparse-checkout add /common +git sparse-checkout add /fetch +git sparse-checkout add /FileAPI +git sparse-checkout add /xhr +git sparse-checkout add /websockets +git sparse-checkout add /mimesniff +git sparse-checkout add /storage +git sparse-checkout add /service-workers +``` +
+ +#### Sparse-clone the [wpt](https://github.com/web-platform-tests/wpt) repo + +```bash +git clone --depth 1 --single-branch --branch epochs/daily --filter=blob:none --sparse https://github.com/web-platform-tests/wpt.git test/wpt/tests + +cd test/wpt/tests + +``` + +#### Checkout the tests + +Only run the commands for the folder(s) you want to update. + +```bash +git sparse-checkout add /fetch +git sparse-checkout add /FileAPI +git sparse-checkout add /xhr +git sparse-checkout add /websockets +git sparse-checkout add /resources +git sparse-checkout add /common + +# etc +``` + +#### Run the tests + +Run the tests to ensure that any new failures are marked as such. + +You can mark tests as failing in their corresponding [status](./test/wpt/status) file. + +```bash +npm run test:wpt +``` + + + +### Lint + +```bash +npm run lint +``` + + +### Test + +```bash +npm run test +``` + + +### Coverage + +```bash +npm run coverage +``` + + +## Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + + +### Moderation Policy + +The [Node.js Moderation Policy] applies to this project. + +[Node.js Moderation Policy]: +https://github.com/nodejs/admin/blob/main/Moderation-Policy.md diff --git a/GOVERNANCE.md b/GOVERNANCE.md new file mode 100644 index 0000000..3e88d4b --- /dev/null +++ b/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Undici Working Group + +The Node.js Undici project is governed by a Working Group (WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#collaborators). + +### Collaborators + +The undici GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the undici repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#collaborators). The list shall be in an +alphabetical order. + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on Zoom. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..e7323bb --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Matteo Collina and Undici contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/MAINTAINERS.md b/MAINTAINERS.md new file mode 100644 index 0000000..b98d904 --- /dev/null +++ b/MAINTAINERS.md @@ -0,0 +1,33 @@ +# Maintainers + +This document details any and all processes relevant to project maintainers. Maintainers should feel empowered to contribute back to this document with any process changes they feel improve the overall experience for themselves and other maintainers. + +## Labels + +Maintainers are encouraged to use the extensive and detailed list of labels for easier repo management. + +* Generally, all issues should be labelled. The most general labels are `bug`, `enhancement`, and `Status: help-wanted`. +* Issues specific to a certain aspect of the project should be labeled using one of the specificity labels listed below. For example, a bug in the `Client` class should have the `Client` and `bug` label assigned. + * Specificity labels: + * `Agent` + * `Client` + * `Docs` + * `Performance` + * `Pool` + * `Tests` + * `Types` +* Any `question` or `usage help` issues should be converted into Q&A Discussions +* `Status:` labels should be added to all open issues indicating their relative development status. + * Status labels: + * `Status: blocked` + * `Status: help-wanted` + * `Status: in-progress` + * `Status: wontfix` +* Issues and/or pull requests with an agreed upon semver status can be assigned the appropriate `semver-` label. + * Semver labels: + * `semver-major` + * `semver-minor` + * `semver-patch` +* Issues with a low-barrier of entry should be assigned the `good first issue` label. +* Do not use the `invalid` label, instead use `bug` or `Status: wontfix`. +* Duplicate issues should initially be assigned the `duplicate` label. diff --git a/README.md b/README.md new file mode 100644 index 0000000..3ba8989 --- /dev/null +++ b/README.md @@ -0,0 +1,443 @@ +# undici + +[![Node CI](https://github.com/nodejs/undici/actions/workflows/nodejs.yml/badge.svg)](https://github.com/nodejs/undici/actions/workflows/nodejs.yml) [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](http://standardjs.com/) [![npm version](https://badge.fury.io/js/undici.svg)](https://badge.fury.io/js/undici) [![codecov](https://codecov.io/gh/nodejs/undici/branch/main/graph/badge.svg?token=yZL6LtXkOA)](https://codecov.io/gh/nodejs/undici) + +An HTTP/1.1 client, written from scratch for Node.js. + +> Undici means eleven in Italian. 1.1 -> 11 -> Eleven -> Undici. +It is also a Stranger Things reference. + +Have a question about using Undici? Open a [Q&A Discussion](https://github.com/nodejs/undici/discussions/new) or join our official OpenJS [Slack](https://openjs-foundation.slack.com/archives/C01QF9Q31QD) channel. + +## Install + +``` +npm i undici +``` + +## Benchmarks + +The benchmark is a simple `hello world` [example](benchmarks/benchmark.js) using a +number of unix sockets (connections) with a pipelining depth of 10 running on Node 20.6.0. + +### Connections 1 + + +| Tests | Samples | Result | Tolerance | Difference with slowest | +|---------------------|---------|---------------|-----------|-------------------------| +| http - no keepalive | 15 | 5.32 req/sec | ± 2.61 % | - | +| http - keepalive | 10 | 5.35 req/sec | ± 2.47 % | + 0.44 % | +| undici - fetch | 15 | 41.85 req/sec | ± 2.49 % | + 686.04 % | +| undici - pipeline | 40 | 50.36 req/sec | ± 2.77 % | + 845.92 % | +| undici - stream | 15 | 60.58 req/sec | ± 2.75 % | + 1037.72 % | +| undici - request | 10 | 61.19 req/sec | ± 2.60 % | + 1049.24 % | +| undici - dispatch | 20 | 64.84 req/sec | ± 2.81 % | + 1117.81 % | + + +### Connections 50 + +| Tests | Samples | Result | Tolerance | Difference with slowest | +|---------------------|---------|------------------|-----------|-------------------------| +| undici - fetch | 30 | 2107.19 req/sec | ± 2.69 % | - | +| http - no keepalive | 10 | 2698.90 req/sec | ± 2.68 % | + 28.08 % | +| http - keepalive | 10 | 4639.49 req/sec | ± 2.55 % | + 120.17 % | +| undici - pipeline | 40 | 6123.33 req/sec | ± 2.97 % | + 190.59 % | +| undici - stream | 50 | 9426.51 req/sec | ± 2.92 % | + 347.35 % | +| undici - request | 10 | 10162.88 req/sec | ± 2.13 % | + 382.29 % | +| undici - dispatch | 50 | 11191.11 req/sec | ± 2.98 % | + 431.09 % | + + +## Quick Start + +```js +import { request } from 'undici' + +const { + statusCode, + headers, + trailers, + body +} = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) +console.log('headers', headers) + +for await (const data of body) { + console.log('data', data) +} + +console.log('trailers', trailers) +``` + +## Body Mixins + +The `body` mixins are the most common way to format the request/response body. Mixins include: + +- [`.formData()`](https://fetch.spec.whatwg.org/#dom-body-formdata) +- [`.json()`](https://fetch.spec.whatwg.org/#dom-body-json) +- [`.text()`](https://fetch.spec.whatwg.org/#dom-body-text) + +Example usage: + +```js +import { request } from 'undici' + +const { + statusCode, + headers, + trailers, + body +} = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) +console.log('headers', headers) +console.log('data', await body.json()) +console.log('trailers', trailers) +``` + +_Note: Once a mixin has been called then the body cannot be reused, thus calling additional mixins on `.body`, e.g. `.body.json(); .body.text()` will result in an error `TypeError: unusable` being thrown and returned through the `Promise` rejection._ + +Should you need to access the `body` in plain-text after using a mixin, the best practice is to use the `.text()` mixin first and then manually parse the text to the desired format. + +For more information about their behavior, please reference the body mixin from the [Fetch Standard](https://fetch.spec.whatwg.org/#body-mixin). + +## Common API Methods + +This section documents our most commonly used API methods. Additional APIs are documented in their own files within the [docs](./docs/) folder and are accessible via the navigation list on the left side of the docs site. + +### `undici.request([url, options]): Promise` + +Arguments: + +* **url** `string | URL | UrlObject` +* **options** [`RequestOptions`](./docs/api/Dispatcher.md#parameter-requestoptions) + * **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher) + * **method** `String` - Default: `PUT` if `options.body`, otherwise `GET` + * **maxRedirections** `Integer` - Default: `0` + +Returns a promise with the result of the `Dispatcher.request` method. + +Calls `options.dispatcher.request(options)`. + +See [Dispatcher.request](./docs/api/Dispatcher.md#dispatcherrequestoptions-callback) for more details. + +### `undici.stream([url, options, ]factory): Promise` + +Arguments: + +* **url** `string | URL | UrlObject` +* **options** [`StreamOptions`](./docs/api/Dispatcher.md#parameter-streamoptions) + * **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher) + * **method** `String` - Default: `PUT` if `options.body`, otherwise `GET` + * **maxRedirections** `Integer` - Default: `0` +* **factory** `Dispatcher.stream.factory` + +Returns a promise with the result of the `Dispatcher.stream` method. + +Calls `options.dispatcher.stream(options, factory)`. + +See [Dispatcher.stream](docs/api/Dispatcher.md#dispatcherstreamoptions-factory-callback) for more details. + +### `undici.pipeline([url, options, ]handler): Duplex` + +Arguments: + +* **url** `string | URL | UrlObject` +* **options** [`PipelineOptions`](docs/api/Dispatcher.md#parameter-pipelineoptions) + * **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher) + * **method** `String` - Default: `PUT` if `options.body`, otherwise `GET` + * **maxRedirections** `Integer` - Default: `0` +* **handler** `Dispatcher.pipeline.handler` + +Returns: `stream.Duplex` + +Calls `options.dispatch.pipeline(options, handler)`. + +See [Dispatcher.pipeline](docs/api/Dispatcher.md#dispatcherpipelineoptions-handler) for more details. + +### `undici.connect([url, options]): Promise` + +Starts two-way communications with the requested resource using [HTTP CONNECT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/CONNECT). + +Arguments: + +* **url** `string | URL | UrlObject` +* **options** [`ConnectOptions`](docs/api/Dispatcher.md#parameter-connectoptions) + * **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher) + * **maxRedirections** `Integer` - Default: `0` +* **callback** `(err: Error | null, data: ConnectData | null) => void` (optional) + +Returns a promise with the result of the `Dispatcher.connect` method. + +Calls `options.dispatch.connect(options)`. + +See [Dispatcher.connect](docs/api/Dispatcher.md#dispatcherconnectoptions-callback) for more details. + +### `undici.fetch(input[, init]): Promise` + +Implements [fetch](https://fetch.spec.whatwg.org/#fetch-method). + +* https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch +* https://fetch.spec.whatwg.org/#fetch-method + +Only supported on Node 16.8+. + +Basic usage example: + +```js +import { fetch } from 'undici' + + +const res = await fetch('https://example.com') +const json = await res.json() +console.log(json) +``` + +You can pass an optional dispatcher to `fetch` as: + +```js +import { fetch, Agent } from 'undici' + +const res = await fetch('https://example.com', { + // Mocks are also supported + dispatcher: new Agent({ + keepAliveTimeout: 10, + keepAliveMaxTimeout: 10 + }) +}) +const json = await res.json() +console.log(json) +``` + +#### `request.body` + +A body can be of the following types: + +- ArrayBuffer +- ArrayBufferView +- AsyncIterables +- Blob +- Iterables +- String +- URLSearchParams +- FormData + +In this implementation of fetch, ```request.body``` now accepts ```Async Iterables```. It is not present in the [Fetch Standard.](https://fetch.spec.whatwg.org) + +```js +import { fetch } from 'undici' + +const data = { + async *[Symbol.asyncIterator]() { + yield 'hello' + yield 'world' + }, +} + +await fetch('https://example.com', { body: data, method: 'POST', duplex: 'half' }) +``` + +#### `request.duplex` + +- half + +In this implementation of fetch, `request.duplex` must be set if `request.body` is `ReadableStream` or `Async Iterables`. And fetch requests are currently always be full duplex. More detail refer to [Fetch Standard.](https://fetch.spec.whatwg.org/#dom-requestinit-duplex) + +#### `response.body` + +Nodejs has two kinds of streams: [web streams](https://nodejs.org/dist/latest-v16.x/docs/api/webstreams.html), which follow the API of the WHATWG web standard found in browsers, and an older Node-specific [streams API](https://nodejs.org/api/stream.html). `response.body` returns a readable web stream. If you would prefer to work with a Node stream you can convert a web stream using `.fromWeb()`. + +```js +import { fetch } from 'undici' +import { Readable } from 'node:stream' + +const response = await fetch('https://example.com') +const readableWebStream = response.body +const readableNodeStream = Readable.fromWeb(readableWebStream) +``` + +#### Specification Compliance + +This section documents parts of the [Fetch Standard](https://fetch.spec.whatwg.org) that Undici does +not support or does not fully implement. + +##### Garbage Collection + +* https://fetch.spec.whatwg.org/#garbage-collection + +The [Fetch Standard](https://fetch.spec.whatwg.org) allows users to skip consuming the response body by relying on +[garbage collection](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Memory_Management#garbage_collection) to release connection resources. Undici does not do the same. Therefore, it is important to always either consume or cancel the response body. + +Garbage collection in Node is less aggressive and deterministic +(due to the lack of clear idle periods that browsers have through the rendering refresh rate) +which means that leaving the release of connection resources to the garbage collector can lead +to excessive connection usage, reduced performance (due to less connection re-use), and even +stalls or deadlocks when running out of connections. + +```js +// Do +const headers = await fetch(url) + .then(async res => { + for await (const chunk of res.body) { + // force consumption of body + } + return res.headers + }) + +// Do not +const headers = await fetch(url) + .then(res => res.headers) +``` + +However, if you want to get only headers, it might be better to use `HEAD` request method. Usage of this method will obviate the need for consumption or cancelling of the response body. See [MDN - HTTP - HTTP request methods - HEAD](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD) for more details. + +```js +const headers = await fetch(url, { method: 'HEAD' }) + .then(res => res.headers) +``` + +##### Forbidden and Safelisted Header Names + +* https://fetch.spec.whatwg.org/#cors-safelisted-response-header-name +* https://fetch.spec.whatwg.org/#forbidden-header-name +* https://fetch.spec.whatwg.org/#forbidden-response-header-name +* https://github.com/wintercg/fetch/issues/6 + +The [Fetch Standard](https://fetch.spec.whatwg.org) requires implementations to exclude certain headers from requests and responses. In browser environments, some headers are forbidden so the user agent remains in full control over them. In Undici, these constraints are removed to give more control to the user. + +### `undici.upgrade([url, options]): Promise` + +Upgrade to a different protocol. See [MDN - HTTP - Protocol upgrade mechanism](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) for more details. + +Arguments: + +* **url** `string | URL | UrlObject` +* **options** [`UpgradeOptions`](docs/api/Dispatcher.md#parameter-upgradeoptions) + * **dispatcher** `Dispatcher` - Default: [getGlobalDispatcher](#undicigetglobaldispatcher) + * **maxRedirections** `Integer` - Default: `0` +* **callback** `(error: Error | null, data: UpgradeData) => void` (optional) + +Returns a promise with the result of the `Dispatcher.upgrade` method. + +Calls `options.dispatcher.upgrade(options)`. + +See [Dispatcher.upgrade](docs/api/Dispatcher.md#dispatcherupgradeoptions-callback) for more details. + +### `undici.setGlobalDispatcher(dispatcher)` + +* dispatcher `Dispatcher` + +Sets the global dispatcher used by Common API Methods. + +### `undici.getGlobalDispatcher()` + +Gets the global dispatcher used by Common API Methods. + +Returns: `Dispatcher` + +### `undici.setGlobalOrigin(origin)` + +* origin `string | URL | undefined` + +Sets the global origin used in `fetch`. + +If `undefined` is passed, the global origin will be reset. This will cause `Response.redirect`, `new Request()`, and `fetch` to throw an error when a relative path is passed. + +```js +setGlobalOrigin('http://localhost:3000') + +const response = await fetch('/api/ping') + +console.log(response.url) // http://localhost:3000/api/ping +``` + +### `undici.getGlobalOrigin()` + +Gets the global origin used in `fetch`. + +Returns: `URL` + +### `UrlObject` + +* **port** `string | number` (optional) +* **path** `string` (optional) +* **pathname** `string` (optional) +* **hostname** `string` (optional) +* **origin** `string` (optional) +* **protocol** `string` (optional) +* **search** `string` (optional) + +## Specification Compliance + +This section documents parts of the HTTP/1.1 specification that Undici does +not support or does not fully implement. + +### Expect + +Undici does not support the `Expect` request header field. The request +body is always immediately sent and the `100 Continue` response will be +ignored. + +Refs: https://tools.ietf.org/html/rfc7231#section-5.1.1 + +### Pipelining + +Undici will only use pipelining if configured with a `pipelining` factor +greater than `1`. + +Undici always assumes that connections are persistent and will immediately +pipeline requests, without checking whether the connection is persistent. +Hence, automatic fallback to HTTP/1.0 or HTTP/1.1 without pipelining is +not supported. + +Undici will immediately pipeline when retrying requests after a failed +connection. However, Undici will not retry the first remaining requests in +the prior pipeline and instead error the corresponding callback/promise/stream. + +Undici will abort all running requests in the pipeline when any of them are +aborted. + +* Refs: https://tools.ietf.org/html/rfc2616#section-8.1.2.2 +* Refs: https://tools.ietf.org/html/rfc7230#section-6.3.2 + +### Manual Redirect + +Since it is not possible to manually follow an HTTP redirect on the server-side, +Undici returns the actual response instead of an `opaqueredirect` filtered one +when invoked with a `manual` redirect. This aligns `fetch()` with the other +implementations in Deno and Cloudflare Workers. + +Refs: https://fetch.spec.whatwg.org/#atomic-http-redirect-handling + +## Workarounds + +### Network address family autoselection. + +If you experience problem when connecting to a remote server that is resolved by your DNS servers to a IPv6 (AAAA record) +first, there are chances that your local router or ISP might have problem connecting to IPv6 networks. In that case +undici will throw an error with code `UND_ERR_CONNECT_TIMEOUT`. + +If the target server resolves to both a IPv6 and IPv4 (A records) address and you are using a compatible Node version +(18.3.0 and above), you can fix the problem by providing the `autoSelectFamily` option (support by both `undici.request` +and `undici.Agent`) which will enable the family autoselection algorithm when establishing the connection. + +## Collaborators + +* [__Daniele Belardi__](https://github.com/dnlup), +* [__Ethan Arrowood__](https://github.com/ethan-arrowood), +* [__Matteo Collina__](https://github.com/mcollina), +* [__Matthew Aitken__](https://github.com/KhafraDev), +* [__Robert Nagy__](https://github.com/ronag), +* [__Szymon Marczak__](https://github.com/szmarczak), +* [__Tomas Della Vedova__](https://github.com/delvedor), + +### Releasers + +* [__Ethan Arrowood__](https://github.com/ethan-arrowood), +* [__Matteo Collina__](https://github.com/mcollina), +* [__Robert Nagy__](https://github.com/ronag), +* [__Matthew Aitken__](https://github.com/KhafraDev), + +## License + +MIT diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..dc5499a --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,2 @@ +If you believe you have found a security issue in the software in this +repository, please consult https://github.com/nodejs/node/blob/HEAD/SECURITY.md. diff --git a/benchmarks/benchmark-http2.js b/benchmarks/benchmark-http2.js new file mode 100644 index 0000000..d8555de --- /dev/null +++ b/benchmarks/benchmark-http2.js @@ -0,0 +1,306 @@ +'use strict' + +const { connect } = require('http2') +const { createSecureContext } = require('tls') +const os = require('os') +const path = require('path') +const { readFileSync } = require('fs') +const { table } = require('table') +const { Writable } = require('stream') +const { WritableStream } = require('stream/web') +const { isMainThread } = require('worker_threads') + +const { Pool, Client, fetch, Agent, setGlobalDispatcher } = require('..') + +const ca = readFileSync(path.join(__dirname, '..', 'test', 'fixtures', 'ca.pem'), 'utf8') +const servername = 'agent1' + +const iterations = (parseInt(process.env.SAMPLES, 10) || 10) + 1 +const errorThreshold = parseInt(process.env.ERROR_THRESHOLD, 10) || 3 +const connections = parseInt(process.env.CONNECTIONS, 10) || 50 +const pipelining = parseInt(process.env.PIPELINING, 10) || 10 +const parallelRequests = parseInt(process.env.PARALLEL, 10) || 100 +const headersTimeout = parseInt(process.env.HEADERS_TIMEOUT, 10) || 0 +const bodyTimeout = parseInt(process.env.BODY_TIMEOUT, 10) || 0 +const dest = {} + +if (process.env.PORT) { + dest.port = process.env.PORT + dest.url = `https://localhost:${process.env.PORT}` +} else { + dest.url = 'https://localhost' + dest.socketPath = path.join(os.tmpdir(), 'undici.sock') +} + +const httpsBaseOptions = { + ca, + servername, + protocol: 'https:', + hostname: 'localhost', + method: 'GET', + path: '/', + query: { + frappucino: 'muffin', + goat: 'scone', + pond: 'moose', + foo: ['bar', 'baz', 'bal'], + bool: true, + numberKey: 256 + }, + ...dest +} + +const http2ClientOptions = { + secureContext: createSecureContext({ ca }), + servername +} + +const undiciOptions = { + path: '/', + method: 'GET', + headersTimeout, + bodyTimeout +} + +const Class = connections > 1 ? Pool : Client +const dispatcher = new Class(httpsBaseOptions.url, { + allowH2: true, + pipelining, + connections, + connect: { + rejectUnauthorized: false, + ca, + servername + }, + ...dest +}) + +setGlobalDispatcher(new Agent({ + allowH2: true, + pipelining, + connections, + connect: { + rejectUnauthorized: false, + ca, + servername + } +})) + +class SimpleRequest { + constructor (resolve) { + this.dst = new Writable({ + write (chunk, encoding, callback) { + callback() + } + }).on('finish', resolve) + } + + onConnect (abort) { } + + onHeaders (statusCode, headers, resume) { + this.dst.on('drain', resume) + } + + onData (chunk) { + return this.dst.write(chunk) + } + + onComplete () { + this.dst.end() + } + + onError (err) { + throw err + } +} + +function makeParallelRequests (cb) { + return Promise.all(Array.from(Array(parallelRequests)).map(() => new Promise(cb))) +} + +function printResults (results) { + // Sort results by least performant first, then compare relative performances and also printing padding + let last + + const rows = Object.entries(results) + // If any failed, put on the top of the list, otherwise order by mean, ascending + .sort((a, b) => (!a[1].success ? -1 : b[1].mean - a[1].mean)) + .map(([name, result]) => { + if (!result.success) { + return [name, result.size, 'Errored', 'N/A', 'N/A'] + } + + // Calculate throughput and relative performance + const { size, mean, standardError } = result + const relative = last !== 0 ? (last / mean - 1) * 100 : 0 + + // Save the slowest for relative comparison + if (typeof last === 'undefined') { + last = mean + } + + return [ + name, + size, + `${((connections * 1e9) / mean).toFixed(2)} req/sec`, + `± ${((standardError / mean) * 100).toFixed(2)} %`, + relative > 0 ? `+ ${relative.toFixed(2)} %` : '-' + ] + }) + + console.log(results) + + // Add the header row + rows.unshift(['Tests', 'Samples', 'Result', 'Tolerance', 'Difference with slowest']) + + return table(rows, { + columns: { + 0: { + alignment: 'left' + }, + 1: { + alignment: 'right' + }, + 2: { + alignment: 'right' + }, + 3: { + alignment: 'right' + }, + 4: { + alignment: 'right' + } + }, + drawHorizontalLine: (index, size) => index > 0 && index < size, + border: { + bodyLeft: '│', + bodyRight: '│', + bodyJoin: '│', + joinLeft: '|', + joinRight: '|', + joinJoin: '|' + } + }) +} + +const experiments = { + 'http2 - request' () { + return makeParallelRequests(resolve => { + connect(dest.url, http2ClientOptions, (session) => { + const headers = { + ':path': '/', + ':method': 'GET', + ':scheme': 'https', + ':authority': `localhost:${dest.port}` + } + + const request = session.request(headers) + + request.pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ).on('finish', resolve) + }) + }) + }, + 'undici - pipeline' () { + return makeParallelRequests(resolve => { + dispatcher + .pipeline(undiciOptions, data => { + return data.body + }) + .end() + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('finish', resolve) + }) + }, + 'undici - request' () { + return makeParallelRequests(resolve => { + try { + dispatcher.request(undiciOptions).then(({ body }) => { + body + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('error', (err) => { + console.log('undici - request - dispatcher.request - body - error', err) + }) + .on('finish', () => { + resolve() + }) + }) + } catch (err) { + console.error('undici - request - dispatcher.request - requestCount', err) + } + }) + }, + 'undici - stream' () { + return makeParallelRequests(resolve => { + return dispatcher + .stream(undiciOptions, () => { + return new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + }) + .then(resolve) + }) + }, + 'undici - dispatch' () { + return makeParallelRequests(resolve => { + dispatcher.dispatch(undiciOptions, new SimpleRequest(resolve)) + }) + } +} + +if (process.env.PORT) { + // fetch does not support the socket + experiments['undici - fetch'] = () => { + return makeParallelRequests(resolve => { + fetch(dest.url, {}).then(res => { + res.body.pipeTo(new WritableStream({ write () { }, close () { resolve() } })) + }).catch(console.log) + }) + } +} + +async function main () { + const { cronometro } = await import('cronometro') + + cronometro( + experiments, + { + iterations, + errorThreshold, + print: false + }, + (err, results) => { + if (err) { + throw err + } + + console.log(printResults(results)) + dispatcher.destroy() + } + ) +} + +if (isMainThread) { + main() +} else { + module.exports = main +} diff --git a/benchmarks/benchmark-https.js b/benchmarks/benchmark-https.js new file mode 100644 index 0000000..a364f0a --- /dev/null +++ b/benchmarks/benchmark-https.js @@ -0,0 +1,319 @@ +'use strict' + +const https = require('https') +const os = require('os') +const path = require('path') +const { readFileSync } = require('fs') +const { table } = require('table') +const { Writable } = require('stream') +const { WritableStream } = require('stream/web') +const { isMainThread } = require('worker_threads') + +const { Pool, Client, fetch, Agent, setGlobalDispatcher } = require('..') + +const ca = readFileSync(path.join(__dirname, '..', 'test', 'fixtures', 'ca.pem'), 'utf8') +const servername = 'agent1' + +const iterations = (parseInt(process.env.SAMPLES, 10) || 10) + 1 +const errorThreshold = parseInt(process.env.ERROR_TRESHOLD, 10) || 3 +const connections = parseInt(process.env.CONNECTIONS, 10) || 50 +const pipelining = parseInt(process.env.PIPELINING, 10) || 10 +const parallelRequests = parseInt(process.env.PARALLEL, 10) || 100 +const headersTimeout = parseInt(process.env.HEADERS_TIMEOUT, 10) || 0 +const bodyTimeout = parseInt(process.env.BODY_TIMEOUT, 10) || 0 +const dest = {} + +if (process.env.PORT) { + dest.port = process.env.PORT + dest.url = `https://localhost:${process.env.PORT}` +} else { + dest.url = 'https://localhost' + dest.socketPath = path.join(os.tmpdir(), 'undici.sock') +} + +const httpsBaseOptions = { + ca, + servername, + protocol: 'https:', + hostname: 'localhost', + method: 'GET', + path: '/', + query: { + frappucino: 'muffin', + goat: 'scone', + pond: 'moose', + foo: ['bar', 'baz', 'bal'], + bool: true, + numberKey: 256 + }, + ...dest +} + +const httpsNoKeepAliveOptions = { + ...httpsBaseOptions, + agent: new https.Agent({ + keepAlive: false, + maxSockets: connections, + // rejectUnauthorized: false, + ca, + servername + }) +} + +const httpsKeepAliveOptions = { + ...httpsBaseOptions, + agent: new https.Agent({ + keepAlive: true, + maxSockets: connections, + // rejectUnauthorized: false, + ca, + servername + }) +} + +const undiciOptions = { + path: '/', + method: 'GET', + headersTimeout, + bodyTimeout +} + +const Class = connections > 1 ? Pool : Client +const dispatcher = new Class(httpsBaseOptions.url, { + pipelining, + connections, + connect: { + // rejectUnauthorized: false, + ca, + servername + }, + ...dest +}) + +setGlobalDispatcher(new Agent({ + pipelining, + connections, + connect: { + // rejectUnauthorized: false, + ca, + servername + } +})) + +class SimpleRequest { + constructor (resolve) { + this.dst = new Writable({ + write (chunk, encoding, callback) { + callback() + } + }).on('finish', resolve) + } + + onConnect (abort) { } + + onHeaders (statusCode, headers, resume) { + this.dst.on('drain', resume) + } + + onData (chunk) { + return this.dst.write(chunk) + } + + onComplete () { + this.dst.end() + } + + onError (err) { + throw err + } +} + +function makeParallelRequests (cb) { + return Promise.all(Array.from(Array(parallelRequests)).map(() => new Promise(cb))) +} + +function printResults (results) { + // Sort results by least performant first, then compare relative performances and also printing padding + let last + + const rows = Object.entries(results) + // If any failed, put on the top of the list, otherwise order by mean, ascending + .sort((a, b) => (!a[1].success ? -1 : b[1].mean - a[1].mean)) + .map(([name, result]) => { + if (!result.success) { + return [name, result.size, 'Errored', 'N/A', 'N/A'] + } + + // Calculate throughput and relative performance + const { size, mean, standardError } = result + const relative = last !== 0 ? (last / mean - 1) * 100 : 0 + + // Save the slowest for relative comparison + if (typeof last === 'undefined') { + last = mean + } + + return [ + name, + size, + `${((connections * 1e9) / mean).toFixed(2)} req/sec`, + `± ${((standardError / mean) * 100).toFixed(2)} %`, + relative > 0 ? `+ ${relative.toFixed(2)} %` : '-' + ] + }) + + console.log(results) + + // Add the header row + rows.unshift(['Tests', 'Samples', 'Result', 'Tolerance', 'Difference with slowest']) + + return table(rows, { + columns: { + 0: { + alignment: 'left' + }, + 1: { + alignment: 'right' + }, + 2: { + alignment: 'right' + }, + 3: { + alignment: 'right' + }, + 4: { + alignment: 'right' + } + }, + drawHorizontalLine: (index, size) => index > 0 && index < size, + border: { + bodyLeft: '│', + bodyRight: '│', + bodyJoin: '│', + joinLeft: '|', + joinRight: '|', + joinJoin: '|' + } + }) +} + +const experiments = { + 'https - no keepalive' () { + return makeParallelRequests(resolve => { + https.get(httpsNoKeepAliveOptions, res => { + res + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('finish', resolve) + }) + }) + }, + 'https - keepalive' () { + return makeParallelRequests(resolve => { + https.get(httpsKeepAliveOptions, res => { + res + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('finish', resolve) + }) + }) + }, + 'undici - pipeline' () { + return makeParallelRequests(resolve => { + dispatcher + .pipeline(undiciOptions, data => { + return data.body + }) + .end() + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('finish', resolve) + }) + }, + 'undici - request' () { + return makeParallelRequests(resolve => { + dispatcher.request(undiciOptions).then(({ body }) => { + body + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('finish', resolve) + }) + }) + }, + 'undici - stream' () { + return makeParallelRequests(resolve => { + return dispatcher + .stream(undiciOptions, () => { + return new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + }) + .then(resolve) + }) + }, + 'undici - dispatch' () { + return makeParallelRequests(resolve => { + dispatcher.dispatch(undiciOptions, new SimpleRequest(resolve)) + }) + } +} + +if (process.env.PORT) { + // fetch does not support the socket + experiments['undici - fetch'] = () => { + return makeParallelRequests(resolve => { + fetch(dest.url, {}).then(res => { + res.body.pipeTo(new WritableStream({ write () { }, close () { resolve() } })) + }).catch(console.log) + }) + } +} + +async function main () { + const { cronometro } = await import('cronometro') + + cronometro( + experiments, + { + iterations, + errorThreshold, + print: false + }, + (err, results) => { + if (err) { + throw err + } + + console.log(printResults(results)) + dispatcher.destroy() + } + ) +} + +if (isMainThread) { + main() +} else { + module.exports = main +} diff --git a/benchmarks/benchmark.js b/benchmarks/benchmark.js new file mode 100644 index 0000000..5bf3d2e --- /dev/null +++ b/benchmarks/benchmark.js @@ -0,0 +1,300 @@ +'use strict' + +const http = require('http') +const os = require('os') +const path = require('path') +const { table } = require('table') +const { Writable } = require('stream') +const { WritableStream } = require('stream/web') +const { isMainThread } = require('worker_threads') + +const { Pool, Client, fetch, Agent, setGlobalDispatcher } = require('..') + +const iterations = (parseInt(process.env.SAMPLES, 10) || 10) + 1 +const errorThreshold = parseInt(process.env.ERROR_TRESHOLD, 10) || 3 +const connections = parseInt(process.env.CONNECTIONS, 10) || 50 +const pipelining = parseInt(process.env.PIPELINING, 10) || 10 +const parallelRequests = parseInt(process.env.PARALLEL, 10) || 100 +const headersTimeout = parseInt(process.env.HEADERS_TIMEOUT, 10) || 0 +const bodyTimeout = parseInt(process.env.BODY_TIMEOUT, 10) || 0 +const dest = {} + +if (process.env.PORT) { + dest.port = process.env.PORT + dest.url = `http://localhost:${process.env.PORT}` +} else { + dest.url = 'http://localhost' + dest.socketPath = path.join(os.tmpdir(), 'undici.sock') +} + +const httpBaseOptions = { + protocol: 'http:', + hostname: 'localhost', + method: 'GET', + path: '/', + query: { + frappucino: 'muffin', + goat: 'scone', + pond: 'moose', + foo: ['bar', 'baz', 'bal'], + bool: true, + numberKey: 256 + }, + ...dest +} + +const httpNoKeepAliveOptions = { + ...httpBaseOptions, + agent: new http.Agent({ + keepAlive: false, + maxSockets: connections + }) +} + +const httpKeepAliveOptions = { + ...httpBaseOptions, + agent: new http.Agent({ + keepAlive: true, + maxSockets: connections + }) +} + +const undiciOptions = { + path: '/', + method: 'GET', + headersTimeout, + bodyTimeout +} + +const Class = connections > 1 ? Pool : Client +const dispatcher = new Class(httpBaseOptions.url, { + pipelining, + connections, + ...dest +}) + +setGlobalDispatcher(new Agent({ + pipelining, + connections, + connect: { + rejectUnauthorized: false + } +})) + +class SimpleRequest { + constructor (resolve) { + this.dst = new Writable({ + write (chunk, encoding, callback) { + callback() + } + }).on('finish', resolve) + } + + onConnect (abort) { } + + onHeaders (statusCode, headers, resume) { + this.dst.on('drain', resume) + } + + onData (chunk) { + return this.dst.write(chunk) + } + + onComplete () { + this.dst.end() + } + + onError (err) { + throw err + } +} + +function makeParallelRequests (cb) { + return Promise.all(Array.from(Array(parallelRequests)).map(() => new Promise(cb))) +} + +function printResults (results) { + // Sort results by least performant first, then compare relative performances and also printing padding + let last + + const rows = Object.entries(results) + // If any failed, put on the top of the list, otherwise order by mean, ascending + .sort((a, b) => (!a[1].success ? -1 : b[1].mean - a[1].mean)) + .map(([name, result]) => { + if (!result.success) { + return [name, result.size, 'Errored', 'N/A', 'N/A'] + } + + // Calculate throughput and relative performance + const { size, mean, standardError } = result + const relative = last !== 0 ? (last / mean - 1) * 100 : 0 + + // Save the slowest for relative comparison + if (typeof last === 'undefined') { + last = mean + } + + return [ + name, + size, + `${((connections * 1e9) / mean).toFixed(2)} req/sec`, + `± ${((standardError / mean) * 100).toFixed(2)} %`, + relative > 0 ? `+ ${relative.toFixed(2)} %` : '-' + ] + }) + + console.log(results) + + // Add the header row + rows.unshift(['Tests', 'Samples', 'Result', 'Tolerance', 'Difference with slowest']) + + return table(rows, { + columns: { + 0: { + alignment: 'left' + }, + 1: { + alignment: 'right' + }, + 2: { + alignment: 'right' + }, + 3: { + alignment: 'right' + }, + 4: { + alignment: 'right' + } + }, + drawHorizontalLine: (index, size) => index > 0 && index < size, + border: { + bodyLeft: '│', + bodyRight: '│', + bodyJoin: '│', + joinLeft: '|', + joinRight: '|', + joinJoin: '|' + } + }) +} + +const experiments = { + 'http - no keepalive' () { + return makeParallelRequests(resolve => { + http.get(httpNoKeepAliveOptions, res => { + res + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('finish', resolve) + }) + }) + }, + 'http - keepalive' () { + return makeParallelRequests(resolve => { + http.get(httpKeepAliveOptions, res => { + res + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('finish', resolve) + }) + }) + }, + 'undici - pipeline' () { + return makeParallelRequests(resolve => { + dispatcher + .pipeline(undiciOptions, data => { + return data.body + }) + .end() + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('finish', resolve) + }) + }, + 'undici - request' () { + return makeParallelRequests(resolve => { + dispatcher.request(undiciOptions).then(({ body }) => { + body + .pipe( + new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + ) + .on('finish', resolve) + }) + }) + }, + 'undici - stream' () { + return makeParallelRequests(resolve => { + return dispatcher + .stream(undiciOptions, () => { + return new Writable({ + write (chunk, encoding, callback) { + callback() + } + }) + }) + .then(resolve) + }) + }, + 'undici - dispatch' () { + return makeParallelRequests(resolve => { + dispatcher.dispatch(undiciOptions, new SimpleRequest(resolve)) + }) + } +} + +if (process.env.PORT) { + // fetch does not support the socket + experiments['undici - fetch'] = () => { + return makeParallelRequests(resolve => { + fetch(dest.url).then(res => { + res.body.pipeTo(new WritableStream({ write () { }, close () { resolve() } })) + }).catch(console.log) + }) + } +} + +async function main () { + const { cronometro } = await import('cronometro') + + cronometro( + experiments, + { + iterations, + errorThreshold, + print: false + }, + (err, results) => { + if (err) { + throw err + } + + console.log(printResults(results)) + dispatcher.destroy() + } + ) +} + +if (isMainThread) { + main() +} else { + module.exports = main +} diff --git a/benchmarks/server-http2.js b/benchmarks/server-http2.js new file mode 100644 index 0000000..0be99cd --- /dev/null +++ b/benchmarks/server-http2.js @@ -0,0 +1,49 @@ +'use strict' + +const { unlinkSync, readFileSync } = require('fs') +const { createSecureServer } = require('http2') +const os = require('os') +const path = require('path') +const cluster = require('cluster') + +const key = readFileSync(path.join(__dirname, '..', 'test', 'fixtures', 'key.pem'), 'utf8') +const cert = readFileSync(path.join(__dirname, '..', 'test', 'fixtures', 'cert.pem'), 'utf8') + +const socketPath = path.join(os.tmpdir(), 'undici.sock') + +const port = process.env.PORT || socketPath +const timeout = parseInt(process.env.TIMEOUT, 10) || 1 +const workers = parseInt(process.env.WORKERS) || os.cpus().length + +const sessionTimeout = 600e3 // 10 minutes + +if (cluster.isPrimary) { + try { + unlinkSync(socketPath) + } catch (_) { + // Do nothing if the socket does not exist + } + + for (let i = 0; i < workers; i++) { + cluster.fork() + } +} else { + const buf = Buffer.alloc(64 * 1024, '_') + const server = createSecureServer( + { + key, + cert, + allowHTTP1: true, + sessionTimeout + }, + (req, res) => { + setTimeout(() => { + res.end(buf) + }, timeout) + } + ) + + server.keepAliveTimeout = 600e3 + + server.listen(port) +} diff --git a/benchmarks/server-https.js b/benchmarks/server-https.js new file mode 100644 index 0000000..f0275d9 --- /dev/null +++ b/benchmarks/server-https.js @@ -0,0 +1,41 @@ +'use strict' + +const { unlinkSync, readFileSync } = require('fs') +const { createServer } = require('https') +const os = require('os') +const path = require('path') +const cluster = require('cluster') + +const key = readFileSync(path.join(__dirname, '..', 'test', 'fixtures', 'key.pem'), 'utf8') +const cert = readFileSync(path.join(__dirname, '..', 'test', 'fixtures', 'cert.pem'), 'utf8') + +const socketPath = path.join(os.tmpdir(), 'undici.sock') + +const port = process.env.PORT || socketPath +const timeout = parseInt(process.env.TIMEOUT, 10) || 1 +const workers = parseInt(process.env.WORKERS) || os.cpus().length + +if (cluster.isPrimary) { + try { + unlinkSync(socketPath) + } catch (_) { + // Do nothing if the socket does not exist + } + + for (let i = 0; i < workers; i++) { + cluster.fork() + } +} else { + const buf = Buffer.alloc(64 * 1024, '_') + const server = createServer({ + key, + cert, + keepAliveTimeout: 600e3 + }, (req, res) => { + setTimeout(() => { + res.end(buf) + }, timeout) + }) + + server.listen(port) +} diff --git a/benchmarks/server.js b/benchmarks/server.js new file mode 100644 index 0000000..e1a32e8 --- /dev/null +++ b/benchmarks/server.js @@ -0,0 +1,33 @@ +'use strict' + +const { unlinkSync } = require('fs') +const { createServer } = require('http') +const os = require('os') +const path = require('path') +const cluster = require('cluster') + +const socketPath = path.join(os.tmpdir(), 'undici.sock') + +const port = process.env.PORT || socketPath +const timeout = parseInt(process.env.TIMEOUT, 10) || 1 +const workers = parseInt(process.env.WORKERS) || os.cpus().length + +if (cluster.isPrimary) { + try { + unlinkSync(socketPath) + } catch (_) { + // Do nothing if the socket does not exist + } + + for (let i = 0; i < workers; i++) { + cluster.fork() + } +} else { + const buf = Buffer.alloc(64 * 1024, '_') + const server = createServer((req, res) => { + setTimeout(function () { + res.end(buf) + }, timeout) + }).listen(port) + server.keepAliveTimeout = 600e3 +} diff --git a/benchmarks/wait.js b/benchmarks/wait.js new file mode 100644 index 0000000..771f9f2 --- /dev/null +++ b/benchmarks/wait.js @@ -0,0 +1,22 @@ +'use strict' + +const os = require('os') +const path = require('path') +const waitOn = require('wait-on') + +const socketPath = path.join(os.tmpdir(), 'undici.sock') + +let resources +if (process.env.PORT) { + resources = [`http-get://localhost:${process.env.PORT}/`] +} else { + resources = [`http-get://unix:${socketPath}:/`] +} + +waitOn({ + resources, + timeout: 5000 +}).catch((err) => { + console.error(err) + process.exit(1) +}) diff --git a/binary-search/.gitignore b/binary-search/.gitignore new file mode 100644 index 0000000..07e6e47 --- /dev/null +++ b/binary-search/.gitignore @@ -0,0 +1 @@ +/node_modules diff --git a/binary-search/.travis.yml b/binary-search/.travis.yml new file mode 100644 index 0000000..795ac70 --- /dev/null +++ b/binary-search/.travis.yml @@ -0,0 +1,6 @@ +language: node_js +node_js: + - '6' +cache: + directories: + - node_modules diff --git a/binary-search/README.md b/binary-search/README.md new file mode 100644 index 0000000..e02805a --- /dev/null +++ b/binary-search/README.md @@ -0,0 +1,46 @@ +binary-search +============= + +This is a really tiny, stupid, simple binary search library for Node.JS. We +wrote it because existing solutions were bloated and incorrect. + +This version is a straight port of the Java version mentioned by Joshua Bloch +in his article, [Nearly All Binary Searches and Merge Sorts are Broken](http://googleresearch.blogspot.com/2006/06/extra-extra-read-all-about-it-nearly.html). + +Thanks to [Conrad Irwin](https://github.com/ConradIrwin) and [Michael +Marino](https://github.com/mgmarino) for, ironically, pointing out bugs. + +Example +------- + +```js +var bs = require("binary-search"); + +bs([1, 2, 3, 4], 3, function(element, needle) { return element - needle; }); +// => 2 + +bs([1, 2, 4, 5], 3, function(element, needle) { return element - needle; }); +// => -3 +``` + +Be advised that passing in a comparator function is *required*. Since you're +probably using one for your sort function anyway, this isn't a big deal. + +The comparator takes a 1st and 2nd argument of element and needle, respectively. + +The comparator also takes a 3rd and 4th argument, the current index and array, +respectively. You shouldn't normally need the index or array to compare values, +but it's there if you do. + +You may also, optionally, specify an input range as the final two parameters, +in case you want to limit the search to a particular range of inputs. However, +be advised that this is generally a bad idea (but sometimes bad ideas are +necessary). + +License +------- + +To the extent possible by law, The Dark Sky Company, LLC has [waived all +copyright and related or neighboring rights][cc0] to this library. + +[cc0]: http://creativecommons.org/publicdomain/zero/1.0/ diff --git a/binary-search/binary-search.d.ts b/binary-search/binary-search.d.ts new file mode 100644 index 0000000..0395d93 --- /dev/null +++ b/binary-search/binary-search.d.ts @@ -0,0 +1,22 @@ +//Typescript type definition for: +//https://github.com/darkskyapp/binary-search +declare module 'binary-search' { + +function binarySearch( + haystack: ArrayLike, + needle: B, + comparator: (a: A, b: B, index?: number, haystack?: A[]) => any, + // Notes about comparator return value: + // * when ab the comparator's returned value should be: + // * positive number or a value such that `+value` is a positive number + // * examples: `1` or the string `"1"` + // * when a===b + // * any value other than the return cases for ab + // * examples: undefined, NaN, 'abc' + low?: number, + high?: number): number; //returns index of found result or number < 0 if not found +export = binarySearch; +} diff --git a/binary-search/index.js b/binary-search/index.js new file mode 100644 index 0000000..bc281ca --- /dev/null +++ b/binary-search/index.js @@ -0,0 +1,45 @@ +module.exports = function(haystack, needle, comparator, low, high) { + var mid, cmp; + + if(low === undefined) + low = 0; + + else { + low = low|0; + if(low < 0 || low >= haystack.length) + throw new RangeError("invalid lower bound"); + } + + if(high === undefined) + high = haystack.length - 1; + + else { + high = high|0; + if(high < low || high >= haystack.length) + throw new RangeError("invalid upper bound"); + } + + while(low <= high) { + // The naive `low + high >>> 1` could fail for array lengths > 2**31 + // because `>>>` converts its operands to int32. `low + (high - low >>> 1)` + // works for array lengths <= 2**32-1 which is also Javascript's max array + // length. + mid = low + ((high - low) >>> 1); + cmp = +comparator(haystack[mid], needle, mid, haystack); + + // Too low. + if(cmp < 0.0) + low = mid + 1; + + // Too high. + else if(cmp > 0.0) + high = mid - 1; + + // Key found. + else + return mid; + } + + // Key not found. + return ~low; +} diff --git a/binary-search/package.json b/binary-search/package.json new file mode 100644 index 0000000..9a91ed5 --- /dev/null +++ b/binary-search/package.json @@ -0,0 +1,28 @@ +{ + "name": "binary-search", + "version": "1.3.6", + "description": "tiny binary search function with comparators", + "license": "CC0-1.0", + "typings": "./binary-search.d.ts", + "author": { + "name": "The Dark Sky Company, LLC", + "email": "support@darkskyapp.com" + }, + "contributors": [ + { + "name": "Darcy Parker", + "web": "https://github.com/darcyparker" + } + ], + "repository": { + "type": "git", + "url": "git://github.com/darkskyapp/binary-search.git" + }, + "devDependencies": { + "chai": "^4.2.0", + "mocha": "^5.2.0" + }, + "scripts": { + "test": "mocha" + } +} diff --git a/binary-search/test.js b/binary-search/test.js new file mode 100644 index 0000000..95a497f --- /dev/null +++ b/binary-search/test.js @@ -0,0 +1,46 @@ +var expect = require("chai").expect; + +describe("binarysearch", function() { + var bs = require("./"), + arr = [1, 2, 2, 2, 3, 5, 9], + cmp = function(a, b) { return a - b; }; + + it("should bail if not passed an array", function() { + expect(function() { bs(undefined, 3, cmp); }).to.throw(TypeError); + }); + + it("should bail if not passed a comparator", function() { + expect(function() { bs(arr, 3, undefined); }).to.throw(TypeError); + }); + + it("should return the index of an item in a sorted array", function() { + expect(bs(arr, 3, cmp)).to.equal(4); + }); + + it("should return the index of where the item would go plus one, negated, if the item is not found", function() { + expect(bs(arr, 4, cmp)).to.equal(-6); + }); + + it("should return any valid index if an item exists multiple times in the array", function() { + expect(bs(arr, 2, cmp)).to.equal(3); + }); + + it("should work even on empty arrays", function() { + expect(bs([], 42, cmp)).to.equal(-1); + }); + + it("should work even on arrays of doubles", function() { + expect(bs([0.0, 0.1, 0.2, 0.3, 0.4], 0.25, cmp)).to.equal(-4); + }); + + it("should pass the index and array parameters to the comparator", function() { + var indexes = [], + indexCmp = function(a, b, i, array) { + expect(array).to.equal(arr); + indexes.push(i); + return cmp(a, b); + }; + bs(arr, 3, indexCmp); + expect(indexes).to.deep.equal([3, 5, 4]) + }); +}); diff --git a/build/Dockerfile b/build/Dockerfile new file mode 100644 index 0000000..5438b73 --- /dev/null +++ b/build/Dockerfile @@ -0,0 +1,18 @@ +FROM node:20-alpine@sha256:4559bc033338938e54d0a3c2f0d7c3ad7d1d13c28c4c405b85c6b3a26f4ce5f7 + +ARG UID=1000 +ARG GID=1000 + +RUN apk add -U clang lld wasi-sdk +RUN mkdir /home/node/undici + +WORKDIR /home/node/undici + +COPY package.json . +COPY build build +COPY deps deps +COPY lib lib + +RUN npm i + +USER node diff --git a/build/wasm.js b/build/wasm.js new file mode 100644 index 0000000..fd90ac2 --- /dev/null +++ b/build/wasm.js @@ -0,0 +1,101 @@ +'use strict' + +const { execSync } = require('child_process') +const { writeFileSync, readFileSync } = require('fs') +const { join, resolve } = require('path') + +const ROOT = resolve(__dirname, '../') +const WASM_SRC = resolve(__dirname, '../deps/llhttp') +const WASM_OUT = resolve(__dirname, '../lib/llhttp') +const DOCKERFILE = resolve(__dirname, './Dockerfile') + +let platform = process.env.WASM_PLATFORM +if (!platform && process.argv[2]) { + platform = execSync('docker info -f "{{.OSType}}/{{.Architecture}}"').toString().trim() +} + +if (process.argv[2] === '--prebuild') { + const cmd = `docker build --platform=${platform.toString().trim()} -t llhttp_wasm_builder -f ${DOCKERFILE} ${ROOT}` + + console.log(`> ${cmd}\n\n`) + execSync(cmd, { stdio: 'inherit' }) + + process.exit(0) +} + +if (process.argv[2] === '--docker') { + let cmd = `docker run --rm -it --platform=${platform.toString().trim()}` + if (process.platform === 'linux') { + cmd += ` --user ${process.getuid()}:${process.getegid()}` + } + + cmd += ` --mount type=bind,source=${ROOT}/lib/llhttp,target=/home/node/undici/lib/llhttp llhttp_wasm_builder node build/wasm.js` + console.log(`> ${cmd}\n\n`) + execSync(cmd, { stdio: 'inherit' }) + process.exit(0) +} + +// Gather information about the tools used for the build +const buildInfo = execSync('apk info -v').toString() +if (!buildInfo.includes('wasi-sdk')) { + console.log('Failed to generate build environment information') + process.exit(-1) +} +writeFileSync(join(WASM_OUT, 'wasm_build_env.txt'), buildInfo) + +// Build wasm binary +execSync(`clang \ + --sysroot=/usr/share/wasi-sysroot \ + -target wasm32-unknown-wasi \ + -Ofast \ + -fno-exceptions \ + -fvisibility=hidden \ + -mexec-model=reactor \ + -Wl,-error-limit=0 \ + -Wl,-O3 \ + -Wl,--lto-O3 \ + -Wl,--strip-all \ + -Wl,--allow-undefined \ + -Wl,--export-dynamic \ + -Wl,--export-table \ + -Wl,--export=malloc \ + -Wl,--export=free \ + -Wl,--no-entry \ + ${join(WASM_SRC, 'src')}/*.c \ + -I${join(WASM_SRC, 'include')} \ + -o ${join(WASM_OUT, 'llhttp.wasm')}`, { stdio: 'inherit' }) + +const base64Wasm = readFileSync(join(WASM_OUT, 'llhttp.wasm')).toString('base64') +writeFileSync( + join(WASM_OUT, 'llhttp-wasm.js'), + `module.exports = '${base64Wasm}'\n` +) + +// Build wasm simd binary +execSync(`clang \ + --sysroot=/usr/share/wasi-sysroot \ + -target wasm32-unknown-wasi \ + -msimd128 \ + -Ofast \ + -fno-exceptions \ + -fvisibility=hidden \ + -mexec-model=reactor \ + -Wl,-error-limit=0 \ + -Wl,-O3 \ + -Wl,--lto-O3 \ + -Wl,--strip-all \ + -Wl,--allow-undefined \ + -Wl,--export-dynamic \ + -Wl,--export-table \ + -Wl,--export=malloc \ + -Wl,--export=free \ + -Wl,--no-entry \ + ${join(WASM_SRC, 'src')}/*.c \ + -I${join(WASM_SRC, 'include')} \ + -o ${join(WASM_OUT, 'llhttp_simd.wasm')}`, { stdio: 'inherit' }) + +const base64WasmSimd = readFileSync(join(WASM_OUT, 'llhttp_simd.wasm')).toString('base64') +writeFileSync( + join(WASM_OUT, 'llhttp_simd-wasm.js'), + `module.exports = '${base64WasmSimd}'\n` +) diff --git a/docs/api/Agent.md b/docs/api/Agent.md new file mode 100644 index 0000000..dd5d99b --- /dev/null +++ b/docs/api/Agent.md @@ -0,0 +1,80 @@ +# Agent + +Extends: `undici.Dispatcher` + +Agent allow dispatching requests against multiple different origins. + +Requests are not guaranteed to be dispatched in order of invocation. + +## `new undici.Agent([options])` + +Arguments: + +* **options** `AgentOptions` (optional) + +Returns: `Agent` + +### Parameter: `AgentOptions` + +Extends: [`PoolOptions`](Pool.md#parameter-pooloptions) + +* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)` +* **maxRedirections** `Integer` - Default: `0`. The number of HTTP redirection to follow unless otherwise specified in `DispatchOptions`. +* **interceptors** `{ Agent: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time. + +## Instance Properties + +### `Agent.closed` + +Implements [Client.closed](Client.md#clientclosed) + +### `Agent.destroyed` + +Implements [Client.destroyed](Client.md#clientdestroyed) + +## Instance Methods + +### `Agent.close([callback])` + +Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise). + +### `Agent.destroy([error, callback])` + +Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise). + +### `Agent.dispatch(options, handler: AgentDispatchOptions)` + +Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +#### Parameter: `AgentDispatchOptions` + +Extends: [`DispatchOptions`](Dispatcher.md#parameter-dispatchoptions) + +* **origin** `string | URL` +* **maxRedirections** `Integer`. + +Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise). + +### `Agent.connect(options[, callback])` + +See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback). + +### `Agent.dispatch(options, handler)` + +Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `Agent.pipeline(options, handler)` + +See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler). + +### `Agent.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +### `Agent.stream(options, factory[, callback])` + +See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback). + +### `Agent.upgrade(options[, callback])` + +See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback). diff --git a/docs/api/BalancedPool.md b/docs/api/BalancedPool.md new file mode 100644 index 0000000..290c734 --- /dev/null +++ b/docs/api/BalancedPool.md @@ -0,0 +1,99 @@ +# Class: BalancedPool + +Extends: `undici.Dispatcher` + +A pool of [Pool](Pool.md) instances connected to multiple upstreams. + +Requests are not guaranteed to be dispatched in order of invocation. + +## `new BalancedPool(upstreams [, options])` + +Arguments: + +* **upstreams** `URL | string | string[]` - It should only include the **protocol, hostname, and port**. +* **options** `BalancedPoolOptions` (optional) + +### Parameter: `BalancedPoolOptions` + +Extends: [`PoolOptions`](Pool.md#parameter-pooloptions) + +* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Pool(origin, opts)` + +The `PoolOptions` are passed to each of the `Pool` instances being created. +## Instance Properties + +### `BalancedPool.upstreams` + +Returns an array of upstreams that were previously added. + +### `BalancedPool.closed` + +Implements [Client.closed](Client.md#clientclosed) + +### `BalancedPool.destroyed` + +Implements [Client.destroyed](Client.md#clientdestroyed) + +### `Pool.stats` + +Returns [`PoolStats`](PoolStats.md) instance for this pool. + +## Instance Methods + +### `BalancedPool.addUpstream(upstream)` + +Add an upstream. + +Arguments: + +* **upstream** `string` - It should only include the **protocol, hostname, and port**. + +### `BalancedPool.removeUpstream(upstream)` + +Removes an upstream that was previously addded. + +### `BalancedPool.close([callback])` + +Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise). + +### `BalancedPool.destroy([error, callback])` + +Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise). + +### `BalancedPool.connect(options[, callback])` + +See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback). + +### `BalancedPool.dispatch(options, handlers)` + +Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `BalancedPool.pipeline(options, handler)` + +See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler). + +### `BalancedPool.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +### `BalancedPool.stream(options, factory[, callback])` + +See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback). + +### `BalancedPool.upgrade(options[, callback])` + +See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback). + +## Instance Events + +### Event: `'connect'` + +See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect). + +### Event: `'disconnect'` + +See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect). + +### Event: `'drain'` + +See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain). diff --git a/docs/api/CacheStorage.md b/docs/api/CacheStorage.md new file mode 100644 index 0000000..08ee99f --- /dev/null +++ b/docs/api/CacheStorage.md @@ -0,0 +1,30 @@ +# CacheStorage + +Undici exposes a W3C spec-compliant implementation of [CacheStorage](https://developer.mozilla.org/en-US/docs/Web/API/CacheStorage) and [Cache](https://developer.mozilla.org/en-US/docs/Web/API/Cache). + +## Opening a Cache + +Undici exports a top-level CacheStorage instance. You can open a new Cache, or duplicate a Cache with an existing name, by using `CacheStorage.prototype.open`. If you open a Cache with the same name as an already-existing Cache, its list of cached Responses will be shared between both instances. + +```mjs +import { caches } from 'undici' + +const cache_1 = await caches.open('v1') +const cache_2 = await caches.open('v1') + +// Although .open() creates a new instance, +assert(cache_1 !== cache_2) +// The same Response is matched in both. +assert.deepStrictEqual(await cache_1.match('/req'), await cache_2.match('/req')) +``` + +## Deleting a Cache + +If a Cache is deleted, the cached Responses/Requests can still be used. + +```mjs +const response = await cache_1.match('/req') +await caches.delete('v1') + +await response.text() // the Response's body +``` diff --git a/docs/api/Client.md b/docs/api/Client.md new file mode 100644 index 0000000..b9e26f0 --- /dev/null +++ b/docs/api/Client.md @@ -0,0 +1,273 @@ +# Class: Client + +Extends: `undici.Dispatcher` + +A basic HTTP/1.1 client, mapped on top a single TCP/TLS connection. Pipelining is disabled by default. + +Requests are not guaranteed to be dispatched in order of invocation. + +## `new Client(url[, options])` + +Arguments: + +* **url** `URL | string` - Should only include the **protocol, hostname, and port**. +* **options** `ClientOptions` (optional) + +Returns: `Client` + +### Parameter: `ClientOptions` + +> âš ï¸ Warning: The `H2` support is experimental. + +* **bodyTimeout** `number | null` (optional) - Default: `300e3` - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds. +* **headersTimeout** `number | null` (optional) - Default: `300e3` - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds. +* **keepAliveMaxTimeout** `number | null` (optional) - Default: `600e3` - The maximum allowed `keepAliveTimeout`, in milliseconds, when overridden by *keep-alive* hints from the server. Defaults to 10 minutes. +* **keepAliveTimeout** `number | null` (optional) - Default: `4e3` - The timeout, in milliseconds, after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. See [MDN: HTTP - Headers - Keep-Alive directives](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive#directives) for more details. Defaults to 4 seconds. +* **keepAliveTimeoutThreshold** `number | null` (optional) - Default: `1e3` - A number of milliseconds subtracted from server *keep-alive* hints when overriding `keepAliveTimeout` to account for timing inaccuracies caused by e.g. transport latency. Defaults to 1 second. +* **maxHeaderSize** `number | null` (optional) - Default: `--max-http-header-size` or `16384` - The maximum length of request headers in bytes. Defaults to Node.js' --max-http-header-size or 16KiB. +* **maxResponseSize** `number | null` (optional) - Default: `-1` - The maximum length of response body in bytes. Set to `-1` to disable. +* **pipelining** `number | null` (optional) - Default: `1` - The amount of concurrent requests to be sent over the single TCP/TLS connection according to [RFC7230](https://tools.ietf.org/html/rfc7230#section-6.3.2). Carefully consider your workload and environment before enabling concurrent requests as pipelining may reduce performance if used incorrectly. Pipelining is sensitive to network stack settings as well as head of line blocking caused by e.g. long running requests. Set to `0` to disable keep-alive connections. +* **connect** `ConnectOptions | Function | null` (optional) - Default: `null`. +* **strictContentLength** `Boolean` (optional) - Default: `true` - Whether to treat request content length mismatches as errors. If true, an error is thrown when the request content-length header doesn't match the length of the request body. +* **interceptors** `{ Client: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time. +* **autoSelectFamily**: `boolean` (optional) - Default: depends on local Node version, on Node 18.13.0 and above is `false`. Enables a family autodetection algorithm that loosely implements section 5 of [RFC 8305](https://tools.ietf.org/html/rfc8305#section-5). See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. This option is ignored if not supported by the current Node version. +* **autoSelectFamilyAttemptTimeout**: `number` - Default: depends on local Node version, on Node 18.13.0 and above is `250`. The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. +* **allowH2**: `boolean` - Default: `false`. Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation. +* **maxConcurrentStreams**: `number` - Default: `100`. Dictates the maximum number of concurrent streams for a single H2 session. It can be overridden by a SETTINGS remote frame. + +#### Parameter: `ConnectOptions` + +Every Tls option, see [here](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback). +Furthermore, the following options can be passed: + +* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe. +* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: 100. +* **timeout** `number | null` (optional) - In milliseconds, Default `10e3`. +* **servername** `string | null` (optional) +* **keepAlive** `boolean | null` (optional) - Default: `true` - TCP keep-alive enabled +* **keepAliveInitialDelay** `number | null` (optional) - Default: `60000` - TCP keep-alive interval for the socket in milliseconds + +### Example - Basic Client instantiation + +This will instantiate the undici Client, but it will not connect to the origin until something is queued. Consider using `client.connect` to prematurely connect to the origin, or just call `client.request`. + +```js +'use strict' +import { Client } from 'undici' + +const client = new Client('http://localhost:3000') +``` + +### Example - Custom connector + +This will allow you to perform some additional check on the socket that will be used for the next request. + +```js +'use strict' +import { Client, buildConnector } from 'undici' + +const connector = buildConnector({ rejectUnauthorized: false }) +const client = new Client('https://localhost:3000', { + connect (opts, cb) { + connector(opts, (err, socket) => { + if (err) { + cb(err) + } else if (/* assertion */) { + socket.destroy() + cb(new Error('kaboom')) + } else { + cb(null, socket) + } + }) + } +}) +``` + +## Instance Methods + +### `Client.close([callback])` + +Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise). + +### `Client.destroy([error, callback])` + +Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise). + +Waits until socket is closed before invoking the callback (or returning a promise if no callback is provided). + +### `Client.connect(options[, callback])` + +See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback). + +### `Client.dispatch(options, handlers)` + +Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `Client.pipeline(options, handler)` + +See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler). + +### `Client.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +### `Client.stream(options, factory[, callback])` + +See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback). + +### `Client.upgrade(options[, callback])` + +See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback). + +## Instance Properties + +### `Client.closed` + +* `boolean` + +`true` after `client.close()` has been called. + +### `Client.destroyed` + +* `boolean` + +`true` after `client.destroyed()` has been called or `client.close()` has been called and the client shutdown has completed. + +### `Client.pipelining` + +* `number` + +Property to get and set the pipelining factor. + +## Instance Events + +### Event: `'connect'` + +See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect). + +Parameters: + +* **origin** `URL` +* **targets** `Array` + +Emitted when a socket has been created and connected. The client will connect once `client.size > 0`. + +#### Example - Client connect event + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +client.on('connect', (origin) => { + console.log(`Connected to ${origin}`) // should print before the request body statement +}) + +try { + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + body.setEncoding('utf-8') + body.on('data', console.log) + client.close() + server.close() +} catch (error) { + console.error(error) + client.close() + server.close() +} +``` + +### Event: `'disconnect'` + +See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect). + +Parameters: + +* **origin** `URL` +* **targets** `Array` +* **error** `Error` + +Emitted when socket has disconnected. The error argument of the event is the error which caused the socket to disconnect. The client will reconnect if or once `client.size > 0`. + +#### Example - Client disconnect event + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.destroy() +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +client.on('disconnect', (origin) => { + console.log(`Disconnected from ${origin}`) +}) + +try { + await client.request({ + path: '/', + method: 'GET' + }) +} catch (error) { + console.error(error.message) + client.close() + server.close() +} +``` + +### Event: `'drain'` + +Emitted when pipeline is no longer busy. + +See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain). + +#### Example - Client drain event + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +client.on('drain', () => { + console.log('drain event') + client.close() + server.close() +}) + +const requests = [ + client.request({ path: '/', method: 'GET' }), + client.request({ path: '/', method: 'GET' }), + client.request({ path: '/', method: 'GET' }) +] + +await Promise.all(requests) + +console.log('requests completed') +``` + +### Event: `'error'` + +Invoked for users errors such as throwing in the `onError` handler. diff --git a/docs/api/Connector.md b/docs/api/Connector.md new file mode 100644 index 0000000..56821bd --- /dev/null +++ b/docs/api/Connector.md @@ -0,0 +1,115 @@ +# Connector + +Undici creates the underlying socket via the connector builder. +Normally, this happens automatically and you don't need to care about this, +but if you need to perform some additional check over the currently used socket, +this is the right place. + +If you want to create a custom connector, you must import the `buildConnector` utility. + +#### Parameter: `buildConnector.BuildOptions` + +Every Tls option, see [here](https://nodejs.org/api/tls.html#tls_tls_connect_options_callback). +Furthermore, the following options can be passed: + +* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe. +* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: `100`. +* **timeout** `number | null` (optional) - In milliseconds. Default `10e3`. +* **servername** `string | null` (optional) + +Once you call `buildConnector`, it will return a connector function, which takes the following parameters. + +#### Parameter: `connector.Options` + +* **hostname** `string` (required) +* **host** `string` (optional) +* **protocol** `string` (required) +* **port** `string` (required) +* **servername** `string` (optional) +* **localAddress** `string | null` (optional) Local address the socket should connect from. +* **httpSocket** `Socket` (optional) Establish secure connection on a given socket rather than creating a new socket. It can only be sent on TLS update. + +### Basic example + +```js +'use strict' + +import { Client, buildConnector } from 'undici' + +const connector = buildConnector({ rejectUnauthorized: false }) +const client = new Client('https://localhost:3000', { + connect (opts, cb) { + connector(opts, (err, socket) => { + if (err) { + cb(err) + } else if (/* assertion */) { + socket.destroy() + cb(new Error('kaboom')) + } else { + cb(null, socket) + } + }) + } +}) +``` + +### Example: validate the CA fingerprint + +```js +'use strict' + +import { Client, buildConnector } from 'undici' + +const caFingerprint = 'FO:OB:AR' +const connector = buildConnector({ rejectUnauthorized: false }) +const client = new Client('https://localhost:3000', { + connect (opts, cb) { + connector(opts, (err, socket) => { + if (err) { + cb(err) + } else if (getIssuerCertificate(socket).fingerprint256 !== caFingerprint) { + socket.destroy() + cb(new Error('Fingerprint does not match or malformed certificate')) + } else { + cb(null, socket) + } + }) + } +}) + +client.request({ + path: '/', + method: 'GET' +}, (err, data) => { + if (err) throw err + + const bufs = [] + data.body.on('data', (buf) => { + bufs.push(buf) + }) + data.body.on('end', () => { + console.log(Buffer.concat(bufs).toString('utf8')) + client.close() + }) +}) + +function getIssuerCertificate (socket) { + let certificate = socket.getPeerCertificate(true) + while (certificate && Object.keys(certificate).length > 0) { + // invalid certificate + if (certificate.issuerCertificate == null) { + return null + } + + // We have reached the root certificate. + // In case of self-signed certificates, `issuerCertificate` may be a circular reference. + if (certificate.fingerprint256 === certificate.issuerCertificate.fingerprint256) { + break + } + + // continue the loop + certificate = certificate.issuerCertificate + } + return certificate +} +``` diff --git a/docs/api/ContentType.md b/docs/api/ContentType.md new file mode 100644 index 0000000..2bcc9f7 --- /dev/null +++ b/docs/api/ContentType.md @@ -0,0 +1,57 @@ +# MIME Type Parsing + +## `MIMEType` interface + +* **type** `string` +* **subtype** `string` +* **parameters** `Map` +* **essence** `string` + +## `parseMIMEType(input)` + +Implements [parse a MIME type](https://mimesniff.spec.whatwg.org/#parse-a-mime-type). + +Parses a MIME type, returning its type, subtype, and any associated parameters. If the parser can't parse an input it returns the string literal `'failure'`. + +```js +import { parseMIMEType } from 'undici' + +parseMIMEType('text/html; charset=gbk') +// { +// type: 'text', +// subtype: 'html', +// parameters: Map(1) { 'charset' => 'gbk' }, +// essence: 'text/html' +// } +``` + +Arguments: + +* **input** `string` + +Returns: `MIMEType|'failure'` + +## `serializeAMimeType(input)` + +Implements [serialize a MIME type](https://mimesniff.spec.whatwg.org/#serialize-a-mime-type). + +Serializes a MIMEType object. + +```js +import { serializeAMimeType } from 'undici' + +serializeAMimeType({ + type: 'text', + subtype: 'html', + parameters: new Map([['charset', 'gbk']]), + essence: 'text/html' +}) +// text/html;charset=gbk + +``` + +Arguments: + +* **mimeType** `MIMEType` + +Returns: `string` diff --git a/docs/api/Cookies.md b/docs/api/Cookies.md new file mode 100644 index 0000000..0cad379 --- /dev/null +++ b/docs/api/Cookies.md @@ -0,0 +1,101 @@ +# Cookie Handling + +## `Cookie` interface + +* **name** `string` +* **value** `string` +* **expires** `Date|number` (optional) +* **maxAge** `number` (optional) +* **domain** `string` (optional) +* **path** `string` (optional) +* **secure** `boolean` (optional) +* **httpOnly** `boolean` (optional) +* **sameSite** `'String'|'Lax'|'None'` (optional) +* **unparsed** `string[]` (optional) Left over attributes that weren't parsed. + +## `deleteCookie(headers, name[, attributes])` + +Sets the expiry time of the cookie to the unix epoch, causing browsers to delete it when received. + +```js +import { deleteCookie, Headers } from 'undici' + +const headers = new Headers() +deleteCookie(headers, 'name') + +console.log(headers.get('set-cookie')) // name=; Expires=Thu, 01 Jan 1970 00:00:00 GMT +``` + +Arguments: + +* **headers** `Headers` +* **name** `string` +* **attributes** `{ path?: string, domain?: string }` (optional) + +Returns: `void` + +## `getCookies(headers)` + +Parses the `Cookie` header and returns a list of attributes and values. + +```js +import { getCookies, Headers } from 'undici' + +const headers = new Headers({ + cookie: 'get=cookies; and=attributes' +}) + +console.log(getCookies(headers)) // { get: 'cookies', and: 'attributes' } +``` + +Arguments: + +* **headers** `Headers` + +Returns: `Record` + +## `getSetCookies(headers)` + +Parses all `Set-Cookie` headers. + +```js +import { getSetCookies, Headers } from 'undici' + +const headers = new Headers({ 'set-cookie': 'undici=getSetCookies; Secure' }) + +console.log(getSetCookies(headers)) +// [ +// { +// name: 'undici', +// value: 'getSetCookies', +// secure: true +// } +// ] + +``` + +Arguments: + +* **headers** `Headers` + +Returns: `Cookie[]` + +## `setCookie(headers, cookie)` + +Appends a cookie to the `Set-Cookie` header. + +```js +import { setCookie, Headers } from 'undici' + +const headers = new Headers() +setCookie(headers, { name: 'undici', value: 'setCookie' }) + +console.log(headers.get('Set-Cookie')) // undici=setCookie +``` + +Arguments: + +* **headers** `Headers` +* **cookie** `Cookie` + +Returns: `void` diff --git a/docs/api/DiagnosticsChannel.md b/docs/api/DiagnosticsChannel.md new file mode 100644 index 0000000..0aa0b9a --- /dev/null +++ b/docs/api/DiagnosticsChannel.md @@ -0,0 +1,204 @@ +# Diagnostics Channel Support + +Stability: Experimental. + +Undici supports the [`diagnostics_channel`](https://nodejs.org/api/diagnostics_channel.html) (currently available only on Node.js v16+). +It is the preferred way to instrument Undici and retrieve internal information. + +The channels available are the following. + +## `undici:request:create` + +This message is published when a new outgoing request is created. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => { + console.log('origin', request.origin) + console.log('completed', request.completed) + console.log('method', request.method) + console.log('path', request.path) + console.log('headers') // raw text, e.g: 'bar: bar\r\n' + request.addHeader('hello', 'world') + console.log('headers', request.headers) // e.g. 'bar: bar\r\nhello: world\r\n' +}) +``` + +Note: a request is only loosely completed to a given socket. + + +## `undici:request:bodySent` + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:request:bodySent').subscribe(({ request }) => { + // request is the same object undici:request:create +}) +``` + +## `undici:request:headers` + +This message is published after the response headers have been received, i.e. the response has been completed. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:request:headers').subscribe(({ request, response }) => { + // request is the same object undici:request:create + console.log('statusCode', response.statusCode) + console.log(response.statusText) + // response.headers are buffers. + console.log(response.headers.map((x) => x.toString())) +}) +``` + +## `undici:request:trailers` + +This message is published after the response body and trailers have been received, i.e. the response has been completed. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:request:trailers').subscribe(({ request, trailers }) => { + // request is the same object undici:request:create + console.log('completed', request.completed) + // trailers are buffers. + console.log(trailers.map((x) => x.toString())) +}) +``` + +## `undici:request:error` + +This message is published if the request is going to error, but it has not errored yet. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:request:error').subscribe(({ request, error }) => { + // request is the same object undici:request:create +}) +``` + +## `undici:client:sendHeaders` + +This message is published right before the first byte of the request is written to the socket. + +*Note*: It will publish the exact headers that will be sent to the server in raw format. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(({ request, headers, socket }) => { + // request is the same object undici:request:create + console.log(`Full headers list ${headers.split('\r\n')}`); +}) +``` + +## `undici:client:beforeConnect` + +This message is published before creating a new connection for **any** request. +You can not assume that this event is related to any specific request. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(({ connectParams, connector }) => { + // const { host, hostname, protocol, port, servername } = connectParams + // connector is a function that creates the socket +}) +``` + +## `undici:client:connected` + +This message is published after a connection is established. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:client:connected').subscribe(({ socket, connectParams, connector }) => { + // const { host, hostname, protocol, port, servername } = connectParams + // connector is a function that creates the socket +}) +``` + +## `undici:client:connectError` + +This message is published if it did not succeed to create new connection + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:client:connectError').subscribe(({ error, socket, connectParams, connector }) => { + // const { host, hostname, protocol, port, servername } = connectParams + // connector is a function that creates the socket + console.log(`Connect failed with ${error.message}`) +}) +``` + +## `undici:websocket:open` + +This message is published after the client has successfully connected to a server. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:websocket:open').subscribe(({ address, protocol, extensions }) => { + console.log(address) // address, family, and port + console.log(protocol) // negotiated subprotocols + console.log(extensions) // negotiated extensions +}) +``` + +## `undici:websocket:close` + +This message is published after the connection has closed. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:websocket:close').subscribe(({ websocket, code, reason }) => { + console.log(websocket) // the WebSocket object + console.log(code) // the closing status code + console.log(reason) // the closing reason +}) +``` + +## `undici:websocket:socket_error` + +This message is published if the socket experiences an error. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:websocket:socket_error').subscribe((error) => { + console.log(error) +}) +``` + +## `undici:websocket:ping` + +This message is published after the client receives a ping frame, if the connection is not closing. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:websocket:ping').subscribe(({ payload }) => { + // a Buffer or undefined, containing the optional application data of the frame + console.log(payload) +}) +``` + +## `undici:websocket:pong` + +This message is published after the client receives a pong frame. + +```js +import diagnosticsChannel from 'diagnostics_channel' + +diagnosticsChannel.channel('undici:websocket:pong').subscribe(({ payload }) => { + // a Buffer or undefined, containing the optional application data of the frame + console.log(payload) +}) +``` diff --git a/docs/api/DispatchInterceptor.md b/docs/api/DispatchInterceptor.md new file mode 100644 index 0000000..7dfc260 --- /dev/null +++ b/docs/api/DispatchInterceptor.md @@ -0,0 +1,60 @@ +# Interface: DispatchInterceptor + +Extends: `Function` + +A function that can be applied to the `Dispatcher.Dispatch` function before it is invoked with a dispatch request. + +This allows one to write logic to intercept both the outgoing request, and the incoming response. + +### Parameter: `Dispatcher.Dispatch` + +The base dispatch function you are decorating. + +### ReturnType: `Dispatcher.Dispatch` + +A dispatch function that has been altered to provide additional logic + +### Basic Example + +Here is an example of an interceptor being used to provide a JWT bearer token + +```js +'use strict' + +const insertHeaderInterceptor = dispatch => { + return function InterceptedDispatch(opts, handler){ + opts.headers.push('Authorization', 'Bearer [Some token]') + return dispatch(opts, handler) + } +} + +const client = new Client('https://localhost:3000', { + interceptors: { Client: [insertHeaderInterceptor] } +}) + +``` + +### Basic Example 2 + +Here is a contrived example of an interceptor stripping the headers from a response. + +```js +'use strict' + +const clearHeadersInterceptor = dispatch => { + const { DecoratorHandler } = require('undici') + class ResultInterceptor extends DecoratorHandler { + onHeaders (statusCode, headers, resume) { + return super.onHeaders(statusCode, [], resume) + } + } + return function InterceptedDispatch(opts, handler){ + return dispatch(opts, new ResultInterceptor(handler)) + } +} + +const client = new Client('https://localhost:3000', { + interceptors: { Client: [clearHeadersInterceptor] } +}) + +``` diff --git a/docs/api/Dispatcher.md b/docs/api/Dispatcher.md new file mode 100644 index 0000000..fd463bf --- /dev/null +++ b/docs/api/Dispatcher.md @@ -0,0 +1,887 @@ +# Dispatcher + +Extends: `events.EventEmitter` + +Dispatcher is the core API used to dispatch requests. + +Requests are not guaranteed to be dispatched in order of invocation. + +## Instance Methods + +### `Dispatcher.close([callback]): Promise` + +Closes the dispatcher and gracefully waits for enqueued requests to complete before resolving. + +Arguments: + +* **callback** `(error: Error | null, data: null) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed + +```js +dispatcher.close() // -> Promise +dispatcher.close(() => {}) // -> void +``` + +#### Example - Request resolves before Client closes + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('undici') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + body.setEncoding('utf8') + body.on('data', console.log) +} catch (error) {} + +await client.close() + +console.log('Client closed') +server.close() +``` + +### `Dispatcher.connect(options[, callback])` + +Starts two-way communications with the requested resource using [HTTP CONNECT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/CONNECT). + +Arguments: + +* **options** `ConnectOptions` +* **callback** `(err: Error | null, data: ConnectData | null) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed + +#### Parameter: `ConnectOptions` + +* **path** `string` +* **headers** `UndiciHeaders` (optional) - Default: `null` +* **signal** `AbortSignal | events.EventEmitter | null` (optional) - Default: `null` +* **opaque** `unknown` (optional) - This argument parameter is passed through to `ConnectData` + +#### Parameter: `ConnectData` + +* **statusCode** `number` +* **headers** `Record` +* **socket** `stream.Duplex` +* **opaque** `unknown` + +#### Example - Connect request with echo + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + throw Error('should never get here') +}).listen() + +server.on('connect', (req, socket, head) => { + socket.write('HTTP/1.1 200 Connection established\r\n\r\n') + + let data = head.toString() + socket.on('data', (buf) => { + data += buf.toString() + }) + + socket.on('end', () => { + socket.end(data) + }) +}) + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const { socket } = await client.connect({ + path: '/' + }) + const wanted = 'Body' + let data = '' + socket.on('data', d => { data += d }) + socket.on('end', () => { + console.log(`Data received: ${data.toString()} | Data wanted: ${wanted}`) + client.close() + server.close() + }) + socket.write(wanted) + socket.end() +} catch (error) { } +``` + +### `Dispatcher.destroy([error, callback]): Promise` + +Destroy the dispatcher abruptly with the given error. All the pending and running requests will be asynchronously aborted and error. Since this operation is asynchronously dispatched there might still be some progress on dispatched requests. + +Both arguments are optional; the method can be called in four different ways: + +Arguments: + +* **error** `Error | null` (optional) +* **callback** `(error: Error | null, data: null) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed + +```js +dispatcher.destroy() // -> Promise +dispatcher.destroy(new Error()) // -> Promise +dispatcher.destroy(() => {}) // -> void +dispatcher.destroy(new Error(), () => {}) // -> void +``` + +#### Example - Request is aborted when Client is destroyed + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end() +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const request = client.request({ + path: '/', + method: 'GET' + }) + client.destroy() + .then(() => { + console.log('Client destroyed') + server.close() + }) + await request +} catch (error) { + console.error(error) +} +``` + +### `Dispatcher.dispatch(options, handler)` + +This is the low level API which all the preceding APIs are implemented on top of. +This API is expected to evolve through semver-major versions and is less stable than the preceding higher level APIs. +It is primarily intended for library developers who implement higher level APIs on top of this. + +Arguments: + +* **options** `DispatchOptions` +* **handler** `DispatchHandler` + +Returns: `Boolean` - `false` if dispatcher is busy and further dispatch calls won't make any progress until the `'drain'` event has been emitted. + +#### Parameter: `DispatchOptions` + +* **origin** `string | URL` +* **path** `string` +* **method** `string` +* **reset** `boolean` (optional) - Default: `false` - If `false`, the request will attempt to create a long-living connection by sending the `connection: keep-alive` header,otherwise will attempt to close it immediately after response by sending `connection: close` within the request and closing the socket afterwards. +* **body** `string | Buffer | Uint8Array | stream.Readable | Iterable | AsyncIterable | null` (optional) - Default: `null` +* **headers** `UndiciHeaders | string[]` (optional) - Default: `null`. +* **query** `Record | null` (optional) - Default: `null` - Query string params to be embedded in the request URL. Note that both keys and values of query are encoded using `encodeURIComponent`. If for some reason you need to send them unencoded, embed query params into path directly instead. +* **idempotent** `boolean` (optional) - Default: `true` if `method` is `'HEAD'` or `'GET'` - Whether the requests can be safely retried or not. If `false` the request won't be sent until all preceding requests in the pipeline has completed. +* **blocking** `boolean` (optional) - Default: `false` - Whether the response is expected to take a long time and would end up blocking the pipeline. When this is set to `true` further pipelining will be avoided on the same connection until headers have been received. +* **upgrade** `string | null` (optional) - Default: `null` - Upgrade the request. Should be used to specify the kind of upgrade i.e. `'Websocket'`. +* **bodyTimeout** `number | null` (optional) - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds. +* **headersTimeout** `number | null` (optional) - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds. +* **throwOnError** `boolean` (optional) - Default: `false` - Whether Undici should throw an error upon receiving a 4xx or 5xx response from the server. +* **expectContinue** `boolean` (optional) - Default: `false` - For H2, it appends the expect: 100-continue header, and halts the request body until a 100-continue is received from the remote server + +#### Parameter: `DispatchHandler` + +* **onConnect** `(abort: () => void, context: object) => void` - Invoked before request is dispatched on socket. May be invoked multiple times when a request is retried when the request at the head of the pipeline fails. +* **onError** `(error: Error) => void` - Invoked when an error has occurred. May not throw. +* **onUpgrade** `(statusCode: number, headers: Buffer[], socket: Duplex) => void` (optional) - Invoked when request is upgraded. Required if `DispatchOptions.upgrade` is defined or `DispatchOptions.method === 'CONNECT'`. +* **onHeaders** `(statusCode: number, headers: Buffer[], resume: () => void, statusText: string) => boolean` - Invoked when statusCode and headers have been received. May be invoked multiple times due to 1xx informational headers. Not required for `upgrade` requests. +* **onData** `(chunk: Buffer) => boolean` - Invoked when response payload data is received. Not required for `upgrade` requests. +* **onComplete** `(trailers: Buffer[]) => void` - Invoked when response payload and trailers have been received and the request has completed. Not required for `upgrade` requests. +* **onBodySent** `(chunk: string | Buffer | Uint8Array) => void` - Invoked when a body chunk is sent to the server. Not required. For a stream or iterable body this will be invoked for every chunk. For other body types, it will be invoked once after the body is sent. + +#### Example 1 - Dispatch GET request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +const data = [] + +client.dispatch({ + path: '/', + method: 'GET', + headers: { + 'x-foo': 'bar' + } +}, { + onConnect: () => { + console.log('Connected!') + }, + onError: (error) => { + console.error(error) + }, + onHeaders: (statusCode, headers) => { + console.log(`onHeaders | statusCode: ${statusCode} | headers: ${headers}`) + }, + onData: (chunk) => { + console.log('onData: chunk received') + data.push(chunk) + }, + onComplete: (trailers) => { + console.log(`onComplete | trailers: ${trailers}`) + const res = Buffer.concat(data).toString('utf8') + console.log(`Data: ${res}`) + client.close() + server.close() + } +}) +``` + +#### Example 2 - Dispatch Upgrade Request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end() +}).listen() + +await once(server, 'listening') + +server.on('upgrade', (request, socket, head) => { + console.log('Node.js Server - upgrade event') + socket.write('HTTP/1.1 101 Web Socket Protocol Handshake\r\n') + socket.write('Upgrade: WebSocket\r\n') + socket.write('Connection: Upgrade\r\n') + socket.write('\r\n') + socket.end() +}) + +const client = new Client(`http://localhost:${server.address().port}`) + +client.dispatch({ + path: '/', + method: 'GET', + upgrade: 'websocket' +}, { + onConnect: () => { + console.log('Undici Client - onConnect') + }, + onError: (error) => { + console.log('onError') // shouldn't print + }, + onUpgrade: (statusCode, headers, socket) => { + console.log('Undici Client - onUpgrade') + console.log(`onUpgrade Headers: ${headers}`) + socket.on('data', buffer => { + console.log(buffer.toString('utf8')) + }) + socket.on('end', () => { + client.close() + server.close() + }) + socket.end() + } +}) +``` + +#### Example 3 - Dispatch POST request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + request.on('data', (data) => { + console.log(`Request Data: ${data.toString('utf8')}`) + const body = JSON.parse(data) + body.message = 'World' + response.end(JSON.stringify(body)) + }) +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +const data = [] + +client.dispatch({ + path: '/', + method: 'POST', + headers: { + 'content-type': 'application/json' + }, + body: JSON.stringify({ message: 'Hello' }) +}, { + onConnect: () => { + console.log('Connected!') + }, + onError: (error) => { + console.error(error) + }, + onHeaders: (statusCode, headers) => { + console.log(`onHeaders | statusCode: ${statusCode} | headers: ${headers}`) + }, + onData: (chunk) => { + console.log('onData: chunk received') + data.push(chunk) + }, + onComplete: (trailers) => { + console.log(`onComplete | trailers: ${trailers}`) + const res = Buffer.concat(data).toString('utf8') + console.log(`Response Data: ${res}`) + client.close() + server.close() + } +}) +``` + +### `Dispatcher.pipeline(options, handler)` + +For easy use with [stream.pipeline](https://nodejs.org/api/stream.html#stream_stream_pipeline_source_transforms_destination_callback). The `handler` argument should return a `Readable` from which the result will be read. Usually it should just return the `body` argument unless some kind of transformation needs to be performed based on e.g. `headers` or `statusCode`. The `handler` should validate the response and save any required state. If there is an error, it should be thrown. The function returns a `Duplex` which writes to the request and reads from the response. + +Arguments: + +* **options** `PipelineOptions` +* **handler** `(data: PipelineHandlerData) => stream.Readable` + +Returns: `stream.Duplex` + +#### Parameter: PipelineOptions + +Extends: [`RequestOptions`](#parameter-requestoptions) + +* **objectMode** `boolean` (optional) - Default: `false` - Set to `true` if the `handler` will return an object stream. + +#### Parameter: PipelineHandlerData + +* **statusCode** `number` +* **headers** `Record` +* **opaque** `unknown` +* **body** `stream.Readable` +* **context** `object` +* **onInfo** `({statusCode: number, headers: Record}) => void | null` (optional) - Default: `null` - Callback collecting all the info headers (HTTP 100-199) received. + +#### Example 1 - Pipeline Echo + +```js +import { Readable, Writable, PassThrough, pipeline } from 'stream' +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + request.pipe(response) +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +let res = '' + +pipeline( + new Readable({ + read () { + this.push(Buffer.from('undici')) + this.push(null) + } + }), + client.pipeline({ + path: '/', + method: 'GET' + }, ({ statusCode, headers, body }) => { + console.log(`response received ${statusCode}`) + console.log('headers', headers) + return pipeline(body, new PassThrough(), () => {}) + }), + new Writable({ + write (chunk, _, callback) { + res += chunk.toString() + callback() + }, + final (callback) { + console.log(`Response pipelined to writable: ${res}`) + callback() + } + }), + error => { + if (error) { + console.error(error) + } + + client.close() + server.close() + } +) +``` + +### `Dispatcher.request(options[, callback])` + +Performs a HTTP request. + +Non-idempotent requests will not be pipelined in order +to avoid indirect failures. + +Idempotent requests will be automatically retried if +they fail due to indirect failure from the request +at the head of the pipeline. This does not apply to +idempotent requests with a stream request body. + +All response bodies must always be fully consumed or destroyed. + +Arguments: + +* **options** `RequestOptions` +* **callback** `(error: Error | null, data: ResponseData) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed. + +#### Parameter: `RequestOptions` + +Extends: [`DispatchOptions`](#parameter-dispatchoptions) + +* **opaque** `unknown` (optional) - Default: `null` - Used for passing through context to `ResponseData`. +* **signal** `AbortSignal | events.EventEmitter | null` (optional) - Default: `null`. +* **onInfo** `({statusCode: number, headers: Record}) => void | null` (optional) - Default: `null` - Callback collecting all the info headers (HTTP 100-199) received. + +The `RequestOptions.method` property should not be value `'CONNECT'`. + +#### Parameter: `ResponseData` + +* **statusCode** `number` +* **headers** `Record` - Note that all header keys are lower-cased, e. g. `content-type`. +* **body** `stream.Readable` which also implements [the body mixin from the Fetch Standard](https://fetch.spec.whatwg.org/#body-mixin). +* **trailers** `Record` - This object starts out + as empty and will be mutated to contain trailers after `body` has emitted `'end'`. +* **opaque** `unknown` +* **context** `object` + +`body` contains the following additional [body mixin](https://fetch.spec.whatwg.org/#body-mixin) methods and properties: + +- `text()` +- `json()` +- `arrayBuffer()` +- `body` +- `bodyUsed` + +`body` can not be consumed twice. For example, calling `text()` after `json()` throws `TypeError`. + +`body` contains the following additional extensions: + +- `dump({ limit: Integer })`, dump the response by reading up to `limit` bytes without killing the socket (optional) - Default: 262144. + +Note that body will still be a `Readable` even if it is empty, but attempting to deserialize it with `json()` will result in an exception. Recommended way to ensure there is a body to deserialize is to check if status code is not 204, and `content-type` header starts with `application/json`. + +#### Example 1 - Basic GET Request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const { body, headers, statusCode, trailers } = await client.request({ + path: '/', + method: 'GET' + }) + console.log(`response received ${statusCode}`) + console.log('headers', headers) + body.setEncoding('utf8') + body.on('data', console.log) + body.on('end', () => { + console.log('trailers', trailers) + }) + + client.close() + server.close() +} catch (error) { + console.error(error) +} +``` + +#### Example 2 - Aborting a request + +> Node.js v15+ is required to run this example + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) +const abortController = new AbortController() + +try { + client.request({ + path: '/', + method: 'GET', + signal: abortController.signal + }) +} catch (error) { + console.error(error) // should print an RequestAbortedError + client.close() + server.close() +} + +abortController.abort() +``` + +Alternatively, any `EventEmitter` that emits an `'abort'` event may be used as an abort controller: + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import EventEmitter, { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) +const ee = new EventEmitter() + +try { + client.request({ + path: '/', + method: 'GET', + signal: ee + }) +} catch (error) { + console.error(error) // should print an RequestAbortedError + client.close() + server.close() +} + +ee.emit('abort') +``` + +Destroying the request or response body will have the same effect. + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + body.destroy() +} catch (error) { + console.error(error) // should print an RequestAbortedError + client.close() + server.close() +} +``` + +### `Dispatcher.stream(options, factory[, callback])` + +A faster version of `Dispatcher.request`. This method expects the second argument `factory` to return a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable) stream which the response will be written to. This improves performance by avoiding creating an intermediate [`stream.Readable`](https://nodejs.org/api/stream.html#stream_readable_streams) stream when the user expects to directly pipe the response body to a [`stream.Writable`](https://nodejs.org/api/stream.html#stream_class_stream_writable) stream. + +As demonstrated in [Example 1 - Basic GET stream request](#example-1---basic-get-stream-request), it is recommended to use the `option.opaque` property to avoid creating a closure for the `factory` method. This pattern works well with Node.js Web Frameworks such as [Fastify](https://fastify.io). See [Example 2 - Stream to Fastify Response](#example-2---stream-to-fastify-response) for more details. + +Arguments: + +* **options** `RequestOptions` +* **factory** `(data: StreamFactoryData) => stream.Writable` +* **callback** `(error: Error | null, data: StreamData) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed + +#### Parameter: `StreamFactoryData` + +* **statusCode** `number` +* **headers** `Record` +* **opaque** `unknown` +* **onInfo** `({statusCode: number, headers: Record}) => void | null` (optional) - Default: `null` - Callback collecting all the info headers (HTTP 100-199) received. + +#### Parameter: `StreamData` + +* **opaque** `unknown` +* **trailers** `Record` +* **context** `object` + +#### Example 1 - Basic GET stream request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' +import { Writable } from 'stream' + +const server = createServer((request, response) => { + response.end('Hello, World!') +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +const bufs = [] + +try { + await client.stream({ + path: '/', + method: 'GET', + opaque: { bufs } + }, ({ statusCode, headers, opaque: { bufs } }) => { + console.log(`response received ${statusCode}`) + console.log('headers', headers) + return new Writable({ + write (chunk, encoding, callback) { + bufs.push(chunk) + callback() + } + }) + }) + + console.log(Buffer.concat(bufs).toString('utf-8')) + + client.close() + server.close() +} catch (error) { + console.error(error) +} +``` + +#### Example 2 - Stream to Fastify Response + +In this example, a (fake) request is made to the fastify server using `fastify.inject()`. This request then executes the fastify route handler which makes a subsequent request to the raw Node.js http server using `undici.dispatcher.stream()`. The fastify response is passed to the `opaque` option so that undici can tap into the underlying writable stream using `response.raw`. This methodology demonstrates how one could use undici and fastify together to create fast-as-possible requests from one backend server to another. + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' +import fastify from 'fastify' + +const nodeServer = createServer((request, response) => { + response.end('Hello, World! From Node.js HTTP Server') +}).listen() + +await once(nodeServer, 'listening') + +console.log('Node Server listening') + +const nodeServerUndiciClient = new Client(`http://localhost:${nodeServer.address().port}`) + +const fastifyServer = fastify() + +fastifyServer.route({ + url: '/', + method: 'GET', + handler: (request, response) => { + nodeServerUndiciClient.stream({ + path: '/', + method: 'GET', + opaque: response + }, ({ opaque }) => opaque.raw) + } +}) + +await fastifyServer.listen() + +console.log('Fastify Server listening') + +const fastifyServerUndiciClient = new Client(`http://localhost:${fastifyServer.server.address().port}`) + +try { + const { statusCode, body } = await fastifyServerUndiciClient.request({ + path: '/', + method: 'GET' + }) + + console.log(`response received ${statusCode}`) + body.setEncoding('utf8') + body.on('data', console.log) + + nodeServerUndiciClient.close() + fastifyServerUndiciClient.close() + fastifyServer.close() + nodeServer.close() +} catch (error) { } +``` + +### `Dispatcher.upgrade(options[, callback])` + +Upgrade to a different protocol. Visit [MDN - HTTP - Protocol upgrade mechanism](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) for more details. + +Arguments: + +* **options** `UpgradeOptions` + +* **callback** `(error: Error | null, data: UpgradeData) => void` (optional) + +Returns: `void | Promise` - Only returns a `Promise` if no `callback` argument was passed + +#### Parameter: `UpgradeOptions` + +* **path** `string` +* **method** `string` (optional) - Default: `'GET'` +* **headers** `UndiciHeaders` (optional) - Default: `null` +* **protocol** `string` (optional) - Default: `'Websocket'` - A string of comma separated protocols, in descending preference order. +* **signal** `AbortSignal | EventEmitter | null` (optional) - Default: `null` + +#### Parameter: `UpgradeData` + +* **headers** `http.IncomingHeaders` +* **socket** `stream.Duplex` +* **opaque** `unknown` + +#### Example 1 - Basic Upgrade Request + +```js +import { createServer } from 'http' +import { Client } from 'undici' +import { once } from 'events' + +const server = createServer((request, response) => { + response.statusCode = 101 + response.setHeader('connection', 'upgrade') + response.setHeader('upgrade', request.headers.upgrade) + response.end() +}).listen() + +await once(server, 'listening') + +const client = new Client(`http://localhost:${server.address().port}`) + +try { + const { headers, socket } = await client.upgrade({ + path: '/', + }) + socket.on('end', () => { + console.log(`upgrade: ${headers.upgrade}`) // upgrade: Websocket + client.close() + server.close() + }) + socket.end() +} catch (error) { + console.error(error) + client.close() + server.close() +} +``` + +## Instance Events + +### Event: `'connect'` + +Parameters: + +* **origin** `URL` +* **targets** `Array` + +### Event: `'disconnect'` + +Parameters: + +* **origin** `URL` +* **targets** `Array` +* **error** `Error` + +### Event: `'connectionError'` + +Parameters: + +* **origin** `URL` +* **targets** `Array` +* **error** `Error` + +Emitted when dispatcher fails to connect to +origin. + +### Event: `'drain'` + +Parameters: + +* **origin** `URL` + +Emitted when dispatcher is no longer busy. + +## Parameter: `UndiciHeaders` + +* `Record | string[] | null` + +Header arguments such as `options.headers` in [`Client.dispatch`](Client.md#clientdispatchoptions-handlers) can be specified in two forms; either as an object specified by the `Record` (`IncomingHttpHeaders`) type, or an array of strings. An array representation of a header list must have an even length or an `InvalidArgumentError` will be thrown. + +Keys are lowercase and values are not modified. + +Response headers will derive a `host` from the `url` of the [Client](Client.md#class-client) instance if no `host` header was previously specified. + +### Example 1 - Object + +```js +{ + 'content-length': '123', + 'content-type': 'text/plain', + connection: 'keep-alive', + host: 'mysite.com', + accept: '*/*' +} +``` + +### Example 2 - Array + +```js +[ + 'content-length', '123', + 'content-type', 'text/plain', + 'connection', 'keep-alive', + 'host', 'mysite.com', + 'accept', '*/*' +] +``` diff --git a/docs/api/Errors.md b/docs/api/Errors.md new file mode 100644 index 0000000..917e45d --- /dev/null +++ b/docs/api/Errors.md @@ -0,0 +1,47 @@ +# Errors + +Undici exposes a variety of error objects that you can use to enhance your error handling. +You can find all the error objects inside the `errors` key. + +```js +import { errors } from 'undici' +``` + +| Error | Error Codes | Description | +| ------------------------------------ | ------------------------------------- | ------------------------------------------------------------------------- | +| `UndiciError` | `UND_ERR` | all errors below are extended from `UndiciError`. | +| `ConnectTimeoutError` | `UND_ERR_CONNECT_TIMEOUT` | socket is destroyed due to connect timeout. | +| `HeadersTimeoutError` | `UND_ERR_HEADERS_TIMEOUT` | socket is destroyed due to headers timeout. | +| `HeadersOverflowError` | `UND_ERR_HEADERS_OVERFLOW` | socket is destroyed due to headers' max size being exceeded. | +| `BodyTimeoutError` | `UND_ERR_BODY_TIMEOUT` | socket is destroyed due to body timeout. | +| `ResponseStatusCodeError` | `UND_ERR_RESPONSE_STATUS_CODE` | an error is thrown when `throwOnError` is `true` for status codes >= 400. | +| `InvalidArgumentError` | `UND_ERR_INVALID_ARG` | passed an invalid argument. | +| `InvalidReturnValueError` | `UND_ERR_INVALID_RETURN_VALUE` | returned an invalid value. | +| `RequestAbortedError` | `UND_ERR_ABORTED` | the request has been aborted by the user | +| `ClientDestroyedError` | `UND_ERR_DESTROYED` | trying to use a destroyed client. | +| `ClientClosedError` | `UND_ERR_CLOSED` | trying to use a closed client. | +| `SocketError` | `UND_ERR_SOCKET` | there is an error with the socket. | +| `NotSupportedError` | `UND_ERR_NOT_SUPPORTED` | encountered unsupported functionality. | +| `RequestContentLengthMismatchError` | `UND_ERR_REQ_CONTENT_LENGTH_MISMATCH` | request body does not match content-length header | +| `ResponseContentLengthMismatchError` | `UND_ERR_RES_CONTENT_LENGTH_MISMATCH` | response body does not match content-length header | +| `InformationalError` | `UND_ERR_INFO` | expected error with reason | +| `ResponseExceededMaxSizeError` | `UND_ERR_RES_EXCEEDED_MAX_SIZE` | response body exceed the max size allowed | + +### `SocketError` + +The `SocketError` has a `.socket` property which holds socket metadata: + +```ts +interface SocketInfo { + localAddress?: string + localPort?: number + remoteAddress?: string + remotePort?: number + remoteFamily?: string + timeout?: number + bytesWritten?: number + bytesRead?: number +} +``` + +Be aware that in some cases the `.socket` property can be `null`. diff --git a/docs/api/Fetch.md b/docs/api/Fetch.md new file mode 100644 index 0000000..b5a6242 --- /dev/null +++ b/docs/api/Fetch.md @@ -0,0 +1,27 @@ +# Fetch + +Undici exposes a fetch() method starts the process of fetching a resource from the network. + +Documentation and examples can be found on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/fetch). + +## File + +This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/File) + +In Node versions v18.13.0 and above and v19.2.0 and above, undici will default to using Node's [File](https://nodejs.org/api/buffer.html#class-file) class. In versions where it's not available, it will default to the undici one. + +## FormData + +This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/FormData) + +## Response + +This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Response) + +## Request + +This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Request) + +## Header + +This API is implemented as per the standard, you can find documentation on [MDN](https://developer.mozilla.org/en-US/docs/Web/API/Headers) diff --git a/docs/api/MockAgent.md b/docs/api/MockAgent.md new file mode 100644 index 0000000..85ae690 --- /dev/null +++ b/docs/api/MockAgent.md @@ -0,0 +1,540 @@ +# Class: MockAgent + +Extends: `undici.Dispatcher` + +A mocked Agent class that implements the Agent API. It allows one to intercept HTTP requests made through undici and return mocked responses instead. + +## `new MockAgent([options])` + +Arguments: + +* **options** `MockAgentOptions` (optional) - It extends the `Agent` options. + +Returns: `MockAgent` + +### Parameter: `MockAgentOptions` + +Extends: [`AgentOptions`](Agent.md#parameter-agentoptions) + +* **agent** `Agent` (optional) - Default: `new Agent([options])` - a custom agent encapsulated by the MockAgent. + +### Example - Basic MockAgent instantiation + +This will instantiate the MockAgent. It will not do anything until registered as the agent to use with requests and mock interceptions are added. + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent() +``` + +### Example - Basic MockAgent instantiation with custom agent + +```js +import { Agent, MockAgent } from 'undici' + +const agent = new Agent() + +const mockAgent = new MockAgent({ agent }) +``` + +## Instance Methods + +### `MockAgent.get(origin)` + +This method creates and retrieves MockPool or MockClient instances which can then be used to intercept HTTP requests. If the number of connections on the mock agent is set to 1, a MockClient instance is returned. Otherwise a MockPool instance is returned. + +For subsequent `MockAgent.get` calls on the same origin, the same mock instance will be returned. + +Arguments: + +* **origin** `string | RegExp | (value) => boolean` - a matcher for the pool origin to be retrieved from the MockAgent. + +| Matcher type | Condition to pass | +|:------------:| -------------------------- | +| `string` | Exact match against string | +| `RegExp` | Regex must pass | +| `Function` | Function must return true | + +Returns: `MockClient | MockPool`. + +| `MockAgentOptions` | Mock instance returned | +| -------------------- | ---------------------- | +| `connections === 1` | `MockClient` | +| `connections` > `1` | `MockPool` | + +#### Example - Basic Mocked Request + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { statusCode, body } = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Mocked Request with local mock agent dispatcher + +```js +import { MockAgent, request } from 'undici' + +const mockAgent = new MockAgent() + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo', { dispatcher: mockAgent }) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Mocked Request with local mock pool dispatcher + +```js +import { MockAgent, request } from 'undici' + +const mockAgent = new MockAgent() + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo', { dispatcher: mockPool }) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Mocked Request with local mock client dispatcher + +```js +import { MockAgent, request } from 'undici' + +const mockAgent = new MockAgent({ connections: 1 }) + +const mockClient = mockAgent.get('http://localhost:3000') +mockClient.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo', { dispatcher: mockClient }) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Mocked requests with multiple intercepts + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') +mockPool.intercept({ path: '/hello'}).reply(200, 'hello') + +const result1 = await request('http://localhost:3000/foo') + +console.log('response received', result1.statusCode) // response received 200 + +for await (const data of result1.body) { + console.log('data', data.toString('utf8')) // data foo +} + +const result2 = await request('http://localhost:3000/hello') + +console.log('response received', result2.statusCode) // response received 200 + +for await (const data of result2.body) { + console.log('data', data.toString('utf8')) // data hello +} +``` +#### Example - Mock different requests within the same file +```js +const { MockAgent, setGlobalDispatcher } = require('undici'); +const agent = new MockAgent(); +agent.disableNetConnect(); +setGlobalDispatcher(agent); +describe('Test', () => { + it('200', async () => { + const mockAgent = agent.get('http://test.com'); + // your test + }); + it('200', async () => { + const mockAgent = agent.get('http://testing.com'); + // your test + }); +}); +``` + +#### Example - Mocked request with query body, headers and trailers + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' +}).reply(200, { foo: 'bar' }, { + headers: { 'content-type': 'application/json' }, + trailers: { 'Content-MD5': 'test' } +}) + +const { + statusCode, + headers, + trailers, + body +} = await request('http://localhost:3000/foo?hello=there&see=ya', { + method: 'POST', + body: 'form1=data1&form2=data2' +}) + +console.log('response received', statusCode) // response received 200 +console.log('headers', headers) // { 'content-type': 'application/json' } + +for await (const data of body) { + console.log('data', data.toString('utf8')) // '{"foo":"bar"}' +} + +console.log('trailers', trailers) // { 'content-md5': 'test' } +``` + +#### Example - Mocked request with origin regex + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get(new RegExp('http://localhost:3000')) +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Mocked request with origin function + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get((origin) => origin === 'http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +### `MockAgent.close()` + +Closes the mock agent and waits for registered mock pools and clients to also close before resolving. + +Returns: `Promise` + +#### Example - clean up after tests are complete + +```js +import { MockAgent, setGlobalDispatcher } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +await mockAgent.close() +``` + +### `MockAgent.dispatch(options, handlers)` + +Implements [`Agent.dispatch(options, handlers)`](Agent.md#parameter-agentdispatchoptions). + +### `MockAgent.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +#### Example - MockAgent request + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent() + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await mockAgent.request({ + origin: 'http://localhost:3000', + path: '/foo', + method: 'GET' +}) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +### `MockAgent.deactivate()` + +This method disables mocking in MockAgent. + +Returns: `void` + +#### Example - Deactivate Mocking + +```js +import { MockAgent, setGlobalDispatcher } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.deactivate() +``` + +### `MockAgent.activate()` + +This method enables mocking in a MockAgent instance. When instantiated, a MockAgent is automatically activated. Therefore, this method is only effective after `MockAgent.deactivate` has been called. + +Returns: `void` + +#### Example - Activate Mocking + +```js +import { MockAgent, setGlobalDispatcher } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.deactivate() +// No mocking will occur + +// Later +mockAgent.activate() +``` + +### `MockAgent.enableNetConnect([host])` + +When requests are not matched in a MockAgent intercept, a real HTTP request is attempted. We can control this further through the use of `enableNetConnect`. This is achieved by defining host matchers so only matching requests will be attempted. + +When using a string, it should only include the **hostname and optionally, the port**. In addition, calling this method multiple times with a string will allow all HTTP requests that match these values. + +Arguments: + +* **host** `string | RegExp | (value) => boolean` - (optional) + +Returns: `void` + +#### Example - Allow all non-matching urls to be dispatched in a real HTTP request + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.enableNetConnect() + +await request('http://example.com') +// A real request is made +``` + +#### Example - Allow requests matching a host string to make real requests + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.enableNetConnect('example-1.com') +mockAgent.enableNetConnect('example-2.com:8080') + +await request('http://example-1.com') +// A real request is made + +await request('http://example-2.com:8080') +// A real request is made + +await request('http://example-3.com') +// Will throw +``` + +#### Example - Allow requests matching a host regex to make real requests + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.enableNetConnect(new RegExp('example.com')) + +await request('http://example.com') +// A real request is made +``` + +#### Example - Allow requests matching a host function to make real requests + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +mockAgent.enableNetConnect((value) => value === 'example.com') + +await request('http://example.com') +// A real request is made +``` + +### `MockAgent.disableNetConnect()` + +This method causes all requests to throw when requests are not matched in a MockAgent intercept. + +Returns: `void` + +#### Example - Disable all non-matching requests by throwing an error for each + +```js +import { MockAgent, request } from 'undici' + +const mockAgent = new MockAgent() + +mockAgent.disableNetConnect() + +await request('http://example.com') +// Will throw +``` + +### `MockAgent.pendingInterceptors()` + +This method returns any pending interceptors registered on a mock agent. A pending interceptor meets one of the following criteria: + +- Is registered with neither `.times()` nor `.persist()`, and has not been invoked; +- Is persistent (i.e., registered with `.persist()`) and has not been invoked; +- Is registered with `.times()` and has not been invoked `` of times. + +Returns: `PendingInterceptor[]` (where `PendingInterceptor` is a `MockDispatch` with an additional `origin: string`) + +#### Example - List all pending inteceptors + +```js +const agent = new MockAgent() +agent.disableNetConnect() + +agent + .get('https://example.com') + .intercept({ method: 'GET', path: '/' }) + .reply(200) + +const pendingInterceptors = agent.pendingInterceptors() +// Returns [ +// { +// timesInvoked: 0, +// times: 1, +// persist: false, +// consumed: false, +// pending: true, +// path: '/', +// method: 'GET', +// body: undefined, +// headers: undefined, +// data: { +// error: null, +// statusCode: 200, +// data: '', +// headers: {}, +// trailers: {} +// }, +// origin: 'https://example.com' +// } +// ] +``` + +### `MockAgent.assertNoPendingInterceptors([options])` + +This method throws if the mock agent has any pending interceptors. A pending interceptor meets one of the following criteria: + +- Is registered with neither `.times()` nor `.persist()`, and has not been invoked; +- Is persistent (i.e., registered with `.persist()`) and has not been invoked; +- Is registered with `.times()` and has not been invoked `` of times. + +#### Example - Check that there are no pending interceptors + +```js +const agent = new MockAgent() +agent.disableNetConnect() + +agent + .get('https://example.com') + .intercept({ method: 'GET', path: '/' }) + .reply(200) + +agent.assertNoPendingInterceptors() +// Throws an UndiciError with the following message: +// +// 1 interceptor is pending: +// +// ┌─────────┬────────┬───────────────────────┬──────┬─────────────┬────────────┬─────────────┬───────────┠+// │ (index) │ Method │ Origin │ Path │ Status code │ Persistent │ Invocations │ Remaining │ +// ├─────────┼────────┼───────────────────────┼──────┼─────────────┼────────────┼─────────────┼───────────┤ +// │ 0 │ 'GET' │ 'https://example.com' │ '/' │ 200 │ 'âŒ' │ 0 │ 1 │ +// └─────────┴────────┴───────────────────────┴──────┴─────────────┴────────────┴─────────────┴───────────┘ +``` diff --git a/docs/api/MockClient.md b/docs/api/MockClient.md new file mode 100644 index 0000000..ac54691 --- /dev/null +++ b/docs/api/MockClient.md @@ -0,0 +1,77 @@ +# Class: MockClient + +Extends: `undici.Client` + +A mock client class that implements the same api as [MockPool](MockPool.md). + +## `new MockClient(origin, [options])` + +Arguments: + +* **origin** `string` - It should only include the **protocol, hostname, and port**. +* **options** `MockClientOptions` - It extends the `Client` options. + +Returns: `MockClient` + +### Parameter: `MockClientOptions` + +Extends: `ClientOptions` + +* **agent** `Agent` - the agent to associate this MockClient with. + +### Example - Basic MockClient instantiation + +We can use MockAgent to instantiate a MockClient ready to be used to intercept specified requests. It will not do anything until registered as the agent to use and any mock request are registered. + +```js +import { MockAgent } from 'undici' + +// Connections must be set to 1 to return a MockClient instance +const mockAgent = new MockAgent({ connections: 1 }) + +const mockClient = mockAgent.get('http://localhost:3000') +``` + +## Instance Methods + +### `MockClient.intercept(options)` + +Implements: [`MockPool.intercept(options)`](MockPool.md#mockpoolinterceptoptions) + +### `MockClient.close()` + +Implements: [`MockPool.close()`](MockPool.md#mockpoolclose) + +### `MockClient.dispatch(options, handlers)` + +Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `MockClient.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +#### Example - MockClient request + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent({ connections: 1 }) + +const mockClient = mockAgent.get('http://localhost:3000') +mockClient.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await mockClient.request({ + origin: 'http://localhost:3000', + path: '/foo', + method: 'GET' +}) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` diff --git a/docs/api/MockErrors.md b/docs/api/MockErrors.md new file mode 100644 index 0000000..c1aa3db --- /dev/null +++ b/docs/api/MockErrors.md @@ -0,0 +1,12 @@ +# MockErrors + +Undici exposes a variety of mock error objects that you can use to enhance your mock error handling. +You can find all the mock error objects inside the `mockErrors` key. + +```js +import { mockErrors } from 'undici' +``` + +| Mock Error | Mock Error Codes | Description | +| --------------------- | ------------------------------- | ---------------------------------------------------------- | +| `MockNotMatchedError` | `UND_MOCK_ERR_MOCK_NOT_MATCHED` | The request does not match any registered mock dispatches. | diff --git a/docs/api/MockPool.md b/docs/api/MockPool.md new file mode 100644 index 0000000..96a986f --- /dev/null +++ b/docs/api/MockPool.md @@ -0,0 +1,547 @@ +# Class: MockPool + +Extends: `undici.Pool` + +A mock Pool class that implements the Pool API and is used by MockAgent to intercept real requests and return mocked responses. + +## `new MockPool(origin, [options])` + +Arguments: + +* **origin** `string` - It should only include the **protocol, hostname, and port**. +* **options** `MockPoolOptions` - It extends the `Pool` options. + +Returns: `MockPool` + +### Parameter: `MockPoolOptions` + +Extends: `PoolOptions` + +* **agent** `Agent` - the agent to associate this MockPool with. + +### Example - Basic MockPool instantiation + +We can use MockAgent to instantiate a MockPool ready to be used to intercept specified requests. It will not do anything until registered as the agent to use and any mock request are registered. + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent() + +const mockPool = mockAgent.get('http://localhost:3000') +``` + +## Instance Methods + +### `MockPool.intercept(options)` + +This method defines the interception rules for matching against requests for a MockPool or MockPool. We can intercept multiple times on a single instance, but each intercept is only used once. For example if you expect to make 2 requests inside a test, you need to call `intercept()` twice. Assuming you use `disableNetConnect()` you will get `MockNotMatchedError` on the second request when you only call `intercept()` once. + +When defining interception rules, all the rules must pass for a request to be intercepted. If a request is not intercepted, a real request will be attempted. + +| Matcher type | Condition to pass | +|:------------:| -------------------------- | +| `string` | Exact match against string | +| `RegExp` | Regex must pass | +| `Function` | Function must return true | + +Arguments: + +* **options** `MockPoolInterceptOptions` - Interception options. + +Returns: `MockInterceptor` corresponding to the input options. + +### Parameter: `MockPoolInterceptOptions` + +* **path** `string | RegExp | (path: string) => boolean` - a matcher for the HTTP request path. When a `RegExp` or callback is used, it will match against the request path including all query parameters in alphabetical order. When a `string` is provided, the query parameters can be conveniently specified through the `MockPoolInterceptOptions.query` setting. +* **method** `string | RegExp | (method: string) => boolean` - (optional) - a matcher for the HTTP request method. Defaults to `GET`. +* **body** `string | RegExp | (body: string) => boolean` - (optional) - a matcher for the HTTP request body. +* **headers** `Record boolean`> - (optional) - a matcher for the HTTP request headers. To be intercepted, a request must match all defined headers. Extra headers not defined here may (or may not) be included in the request and do not affect the interception in any way. +* **query** `Record | null` - (optional) - a matcher for the HTTP request query string params. Only applies when a `string` was provided for `MockPoolInterceptOptions.path`. + +### Return: `MockInterceptor` + +We can define the behaviour of an intercepted request with the following options. + +* **reply** `(statusCode: number, replyData: string | Buffer | object | MockInterceptor.MockResponseDataHandler, responseOptions?: MockResponseOptions) => MockScope` - define a reply for a matching request. You can define the replyData as a callback to read incoming request data. Default for `responseOptions` is `{}`. +* **reply** `(callback: MockInterceptor.MockReplyOptionsCallback) => MockScope` - define a reply for a matching request, allowing dynamic mocking of all reply options rather than just the data. +* **replyWithError** `(error: Error) => MockScope` - define an error for a matching request to throw. +* **defaultReplyHeaders** `(headers: Record) => MockInterceptor` - define default headers to be included in subsequent replies. These are in addition to headers on a specific reply. +* **defaultReplyTrailers** `(trailers: Record) => MockInterceptor` - define default trailers to be included in subsequent replies. These are in addition to trailers on a specific reply. +* **replyContentLength** `() => MockInterceptor` - define automatically calculated `content-length` headers to be included in subsequent replies. + +The reply data of an intercepted request may either be a string, buffer, or JavaScript object. Objects are converted to JSON while strings and buffers are sent as-is. + +By default, `reply` and `replyWithError` define the behaviour for the first matching request only. Subsequent requests will not be affected (this can be changed using the returned `MockScope`). + +### Parameter: `MockResponseOptions` + +* **headers** `Record` - headers to be included on the mocked reply. +* **trailers** `Record` - trailers to be included on the mocked reply. + +### Return: `MockScope` + +A `MockScope` is associated with a single `MockInterceptor`. With this, we can configure the default behaviour of a intercepted reply. + +* **delay** `(waitInMs: number) => MockScope` - delay the associated reply by a set amount in ms. +* **persist** `() => MockScope` - any matching request will always reply with the defined response indefinitely. +* **times** `(repeatTimes: number) => MockScope` - any matching request will reply with the defined response a fixed amount of times. This is overridden by **persist**. + +#### Example - Basic Mocked Request + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +// MockPool +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ path: '/foo' }).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Mocked request using reply data callbacks + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/echo', + method: 'GET', + headers: { + 'User-Agent': 'undici', + Host: 'example.com' + } +}).reply(200, ({ headers }) => ({ message: headers.get('message') })) + +const { statusCode, body, headers } = await request('http://localhost:3000', { + headers: { + message: 'hello world!' + } +}) + +console.log('response received', statusCode) // response received 200 +console.log('headers', headers) // { 'content-type': 'application/json' } + +for await (const data of body) { + console.log('data', data.toString('utf8')) // { "message":"hello world!" } +} +``` + +#### Example - Mocked request using reply options callback + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/echo', + method: 'GET', + headers: { + 'User-Agent': 'undici', + Host: 'example.com' + } +}).reply(({ headers }) => ({ statusCode: 200, data: { message: headers.get('message') }}))) + +const { statusCode, body, headers } = await request('http://localhost:3000', { + headers: { + message: 'hello world!' + } +}) + +console.log('response received', statusCode) // response received 200 +console.log('headers', headers) // { 'content-type': 'application/json' } + +for await (const data of body) { + console.log('data', data.toString('utf8')) // { "message":"hello world!" } +} +``` + +#### Example - Basic Mocked requests with multiple intercepts + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).reply(200, 'foo') + +mockPool.intercept({ + path: '/hello', + method: 'GET', +}).reply(200, 'hello') + +const result1 = await request('http://localhost:3000/foo') + +console.log('response received', result1.statusCode) // response received 200 + +for await (const data of result1.body) { + console.log('data', data.toString('utf8')) // data foo +} + +const result2 = await request('http://localhost:3000/hello') + +console.log('response received', result2.statusCode) // response received 200 + +for await (const data of result2.body) { + console.log('data', data.toString('utf8')) // data hello +} +``` + +#### Example - Mocked request with query body, request headers and response headers and trailers + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2', + headers: { + 'User-Agent': 'undici', + Host: 'example.com' + } +}).reply(200, { foo: 'bar' }, { + headers: { 'content-type': 'application/json' }, + trailers: { 'Content-MD5': 'test' } +}) + +const { + statusCode, + headers, + trailers, + body +} = await request('http://localhost:3000/foo?hello=there&see=ya', { + method: 'POST', + body: 'form1=data1&form2=data2', + headers: { + foo: 'bar', + 'User-Agent': 'undici', + Host: 'example.com' + } + }) + +console.log('response received', statusCode) // response received 200 +console.log('headers', headers) // { 'content-type': 'application/json' } + +for await (const data of body) { + console.log('data', data.toString('utf8')) // '{"foo":"bar"}' +} + +console.log('trailers', trailers) // { 'content-md5': 'test' } +``` + +#### Example - Mocked request using different matchers + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: /^GET$/, + body: (value) => value === 'form=data', + headers: { + 'User-Agent': 'undici', + Host: /^example.com$/ + } +}).reply(200, 'foo') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo', { + method: 'GET', + body: 'form=data', + headers: { + foo: 'bar', + 'User-Agent': 'undici', + Host: 'example.com' + } +}) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Mocked request with reply with a defined error + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).replyWithError(new Error('kaboom')) + +try { + await request('http://localhost:3000/foo', { + method: 'GET' + }) +} catch (error) { + console.error(error) // Error: kaboom +} +``` + +#### Example - Mocked request with defaultReplyHeaders + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).defaultReplyHeaders({ foo: 'bar' }) + .reply(200, 'foo') + +const { headers } = await request('http://localhost:3000/foo') + +console.log('headers', headers) // headers { foo: 'bar' } +``` + +#### Example - Mocked request with defaultReplyTrailers + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).defaultReplyTrailers({ foo: 'bar' }) + .reply(200, 'foo') + +const { trailers } = await request('http://localhost:3000/foo') + +console.log('trailers', trailers) // trailers { foo: 'bar' } +``` + +#### Example - Mocked request with automatic content-length calculation + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).replyContentLength().reply(200, 'foo') + +const { headers } = await request('http://localhost:3000/foo') + +console.log('headers', headers) // headers { 'content-length': '3' } +``` + +#### Example - Mocked request with automatic content-length calculation on an object + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).replyContentLength().reply(200, { foo: 'bar' }) + +const { headers } = await request('http://localhost:3000/foo') + +console.log('headers', headers) // headers { 'content-length': '13' } +``` + +#### Example - Mocked request with persist enabled + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).reply(200, 'foo').persist() + +const result1 = await request('http://localhost:3000/foo') +// Will match and return mocked data + +const result2 = await request('http://localhost:3000/foo') +// Will match and return mocked data + +// Etc +``` + +#### Example - Mocked request with times enabled + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +mockPool.intercept({ + path: '/foo', + method: 'GET' +}).reply(200, 'foo').times(2) + +const result1 = await request('http://localhost:3000/foo') +// Will match and return mocked data + +const result2 = await request('http://localhost:3000/foo') +// Will match and return mocked data + +const result3 = await request('http://localhost:3000/foo') +// Will not match and make attempt a real request +``` + +#### Example - Mocked request with path callback + +```js +import { MockAgent, setGlobalDispatcher, request } from 'undici' +import querystring from 'querystring' + +const mockAgent = new MockAgent() +setGlobalDispatcher(mockAgent) + +const mockPool = mockAgent.get('http://localhost:3000') + +const matchPath = requestPath => { + const [pathname, search] = requestPath.split('?') + const requestQuery = querystring.parse(search) + + if (!pathname.startsWith('/foo')) { + return false + } + + if (!Object.keys(requestQuery).includes('foo') || requestQuery.foo !== 'bar') { + return false + } + + return true +} + +mockPool.intercept({ + path: matchPath, + method: 'GET' +}).reply(200, 'foo') + +const result = await request('http://localhost:3000/foo?foo=bar') +// Will match and return mocked data +``` + +### `MockPool.close()` + +Closes the mock pool and de-registers from associated MockAgent. + +Returns: `Promise` + +#### Example - clean up after tests are complete + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent() +const mockPool = mockAgent.get('http://localhost:3000') + +await mockPool.close() +``` + +### `MockPool.dispatch(options, handlers)` + +Implements [`Dispatcher.dispatch(options, handlers)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `MockPool.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +#### Example - MockPool request + +```js +import { MockAgent } from 'undici' + +const mockAgent = new MockAgent() + +const mockPool = mockAgent.get('http://localhost:3000') +mockPool.intercept({ + path: '/foo', + method: 'GET', +}).reply(200, 'foo') + +const { + statusCode, + body +} = await mockPool.request({ + origin: 'http://localhost:3000', + path: '/foo', + method: 'GET' +}) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` diff --git a/docs/api/Pool.md b/docs/api/Pool.md new file mode 100644 index 0000000..8fcabac --- /dev/null +++ b/docs/api/Pool.md @@ -0,0 +1,84 @@ +# Class: Pool + +Extends: `undici.Dispatcher` + +A pool of [Client](Client.md) instances connected to the same upstream target. + +Requests are not guaranteed to be dispatched in order of invocation. + +## `new Pool(url[, options])` + +Arguments: + +* **url** `URL | string` - It should only include the **protocol, hostname, and port**. +* **options** `PoolOptions` (optional) + +### Parameter: `PoolOptions` + +Extends: [`ClientOptions`](Client.md#parameter-clientoptions) + +* **factory** `(origin: URL, opts: Object) => Dispatcher` - Default: `(origin, opts) => new Client(origin, opts)` +* **connections** `number | null` (optional) - Default: `null` - The number of `Client` instances to create. When set to `null`, the `Pool` instance will create an unlimited amount of `Client` instances. +* **interceptors** `{ Pool: DispatchInterceptor[] } }` - Default: `{ Pool: [] }` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). + +## Instance Properties + +### `Pool.closed` + +Implements [Client.closed](Client.md#clientclosed) + +### `Pool.destroyed` + +Implements [Client.destroyed](Client.md#clientdestroyed) + +### `Pool.stats` + +Returns [`PoolStats`](PoolStats.md) instance for this pool. + +## Instance Methods + +### `Pool.close([callback])` + +Implements [`Dispatcher.close([callback])`](Dispatcher.md#dispatcherclosecallback-promise). + +### `Pool.destroy([error, callback])` + +Implements [`Dispatcher.destroy([error, callback])`](Dispatcher.md#dispatcherdestroyerror-callback-promise). + +### `Pool.connect(options[, callback])` + +See [`Dispatcher.connect(options[, callback])`](Dispatcher.md#dispatcherconnectoptions-callback). + +### `Pool.dispatch(options, handler)` + +Implements [`Dispatcher.dispatch(options, handler)`](Dispatcher.md#dispatcherdispatchoptions-handler). + +### `Pool.pipeline(options, handler)` + +See [`Dispatcher.pipeline(options, handler)`](Dispatcher.md#dispatcherpipelineoptions-handler). + +### `Pool.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). + +### `Pool.stream(options, factory[, callback])` + +See [`Dispatcher.stream(options, factory[, callback])`](Dispatcher.md#dispatcherstreamoptions-factory-callback). + +### `Pool.upgrade(options[, callback])` + +See [`Dispatcher.upgrade(options[, callback])`](Dispatcher.md#dispatcherupgradeoptions-callback). + +## Instance Events + +### Event: `'connect'` + +See [Dispatcher Event: `'connect'`](Dispatcher.md#event-connect). + +### Event: `'disconnect'` + +See [Dispatcher Event: `'disconnect'`](Dispatcher.md#event-disconnect). + +### Event: `'drain'` + +See [Dispatcher Event: `'drain'`](Dispatcher.md#event-drain). diff --git a/docs/api/PoolStats.md b/docs/api/PoolStats.md new file mode 100644 index 0000000..16b6dc2 --- /dev/null +++ b/docs/api/PoolStats.md @@ -0,0 +1,35 @@ +# Class: PoolStats + +Aggregate stats for a [Pool](Pool.md) or [BalancedPool](BalancedPool.md). + +## `new PoolStats(pool)` + +Arguments: + +* **pool** `Pool` - Pool or BalancedPool from which to return stats. + +## Instance Properties + +### `PoolStats.connected` + +Number of open socket connections in this pool. + +### `PoolStats.free` + +Number of open socket connections in this pool that do not have an active request. + +### `PoolStats.pending` + +Number of pending requests across all clients in this pool. + +### `PoolStats.queued` + +Number of queued requests across all clients in this pool. + +### `PoolStats.running` + +Number of currently active requests across all clients in this pool. + +### `PoolStats.size` + +Number of active, pending, or queued requests across all clients in this pool. diff --git a/docs/api/ProxyAgent.md b/docs/api/ProxyAgent.md new file mode 100644 index 0000000..cebfe68 --- /dev/null +++ b/docs/api/ProxyAgent.md @@ -0,0 +1,126 @@ +# Class: ProxyAgent + +Extends: `undici.Dispatcher` + +A Proxy Agent class that implements the Agent API. It allows the connection through proxy in a simple way. + +## `new ProxyAgent([options])` + +Arguments: + +* **options** `ProxyAgentOptions` (required) - It extends the `Agent` options. + +Returns: `ProxyAgent` + +### Parameter: `ProxyAgentOptions` + +Extends: [`AgentOptions`](Agent.md#parameter-agentoptions) + +* **uri** `string` (required) - It can be passed either by a string or a object containing `uri` as string. +* **token** `string` (optional) - It can be passed by a string of token for authentication. +* **auth** `string` (**deprecated**) - Use token. +* **clientFactory** `(origin: URL, opts: Object) => Dispatcher` (optional) - Default: `(origin, opts) => new Pool(origin, opts)` +* **requestTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the request. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback). +* **proxyTls** `BuildOptions` (optional) - Options object passed when creating the underlying socket via the connector builder for the proxy server. See [TLS](https://nodejs.org/api/tls.html#tlsconnectoptions-callback). + +Examples: + +```js +import { ProxyAgent } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') +// or +const proxyAgent = new ProxyAgent({ uri: 'my.proxy.server' }) +``` + +#### Example - Basic ProxyAgent instantiation + +This will instantiate the ProxyAgent. It will not do anything until registered as the agent to use with requests. + +```js +import { ProxyAgent } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') +``` + +#### Example - Basic Proxy Request with global agent dispatcher + +```js +import { setGlobalDispatcher, request, ProxyAgent } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') +setGlobalDispatcher(proxyAgent) + +const { statusCode, body } = await request('http://localhost:3000/foo') + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Proxy Request with local agent dispatcher + +```js +import { ProxyAgent, request } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') + +const { + statusCode, + body +} = await request('http://localhost:3000/foo', { dispatcher: proxyAgent }) + +console.log('response received', statusCode) // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')) // data foo +} +``` + +#### Example - Basic Proxy Request with authentication + +```js +import { setGlobalDispatcher, request, ProxyAgent } from 'undici'; + +const proxyAgent = new ProxyAgent({ + uri: 'my.proxy.server', + // token: 'Bearer xxxx' + token: `Basic ${Buffer.from('username:password').toString('base64')}` +}); +setGlobalDispatcher(proxyAgent); + +const { statusCode, body } = await request('http://localhost:3000/foo'); + +console.log('response received', statusCode); // response received 200 + +for await (const data of body) { + console.log('data', data.toString('utf8')); // data foo +} +``` + +### `ProxyAgent.close()` + +Closes the proxy agent and waits for registered pools and clients to also close before resolving. + +Returns: `Promise` + +#### Example - clean up after tests are complete + +```js +import { ProxyAgent, setGlobalDispatcher } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') +setGlobalDispatcher(proxyAgent) + +await proxyAgent.close() +``` + +### `ProxyAgent.dispatch(options, handlers)` + +Implements [`Agent.dispatch(options, handlers)`](Agent.md#parameter-agentdispatchoptions). + +### `ProxyAgent.request(options[, callback])` + +See [`Dispatcher.request(options [, callback])`](Dispatcher.md#dispatcherrequestoptions-callback). diff --git a/docs/api/RetryHandler.md b/docs/api/RetryHandler.md new file mode 100644 index 0000000..2323ce4 --- /dev/null +++ b/docs/api/RetryHandler.md @@ -0,0 +1,108 @@ +# Class: RetryHandler + +Extends: `undici.DispatcherHandlers` + +A handler class that implements the retry logic for a request. + +## `new RetryHandler(dispatchOptions, retryHandlers, [retryOptions])` + +Arguments: + +- **options** `Dispatch.DispatchOptions & RetryOptions` (required) - It is an intersection of `Dispatcher.DispatchOptions` and `RetryOptions`. +- **retryHandlers** `RetryHandlers` (required) - Object containing the `dispatch` to be used on every retry, and `handler` for handling the `dispatch` lifecycle. + +Returns: `retryHandler` + +### Parameter: `Dispatch.DispatchOptions & RetryOptions` + +Extends: [`Dispatch.DispatchOptions`](Dispatcher.md#parameter-dispatchoptions). + +#### `RetryOptions` + +- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed. +- **maxRetries** `number` (optional) - Maximum number of retries. Default: `5` +- **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds) +- **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second) +- **timeoutFactor** `number` (optional) - Factor to multiply the timeout by for each retry attempt. Default: `2` +- **retryAfter** `boolean` (optional) - It enables automatic retry after the `Retry-After` header is received. Default: `true` +- +- **methods** `string[]` (optional) - Array of HTTP methods to retry. Default: `['GET', 'PUT', 'HEAD', 'OPTIONS', 'DELETE']` +- **statusCodes** `number[]` (optional) - Array of HTTP status codes to retry. Default: `[429, 500, 502, 503, 504]` +- **errorCodes** `string[]` (optional) - Array of Error codes to retry. Default: `['ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN','ENETUNREACH', 'EHOSTDOWN', + +**`RetryContext`** + +- `state`: `RetryState` - Current retry state. It can be mutated. +- `opts`: `Dispatch.DispatchOptions & RetryOptions` - Options passed to the retry handler. + +### Parameter `RetryHandlers` + +- **dispatch** `(options: Dispatch.DispatchOptions, handlers: Dispatch.DispatchHandlers) => Promise` (required) - Dispatch function to be called after every retry. +- **handler** Extends [`Dispatch.DispatchHandlers`](Dispatcher.md#dispatcherdispatchoptions-handler) (required) - Handler function to be called after the request is successful or the retries are exhausted. + +Examples: + +```js +const client = new Client(`http://localhost:${server.address().port}`); +const chunks = []; +const handler = new RetryHandler( + { + ...dispatchOptions, + retryOptions: { + // custom retry function + retry: function (err, state, callback) { + counter++; + + if (err.code && err.code === "UND_ERR_DESTROYED") { + callback(err); + return; + } + + if (err.statusCode === 206) { + callback(err); + return; + } + + setTimeout(() => callback(null), 1000); + }, + }, + }, + { + dispatch: (...args) => { + return client.dispatch(...args); + }, + handler: { + onConnect() {}, + onBodySent() {}, + onHeaders(status, _rawHeaders, resume, _statusMessage) { + // do something with headers + }, + onData(chunk) { + chunks.push(chunk); + return true; + }, + onComplete() {}, + onError() { + // handle error properly + }, + }, + } +); +``` + +#### Example - Basic RetryHandler with defaults + +```js +const client = new Client(`http://localhost:${server.address().port}`); +const handler = new RetryHandler(dispatchOptions, { + dispatch: client.dispatch.bind(client), + handler: { + onConnect() {}, + onBodySent() {}, + onHeaders(status, _rawHeaders, resume, _statusMessage) {}, + onData(chunk) {}, + onComplete() {}, + onError(err) {}, + }, +}); +``` diff --git a/docs/api/WebSocket.md b/docs/api/WebSocket.md new file mode 100644 index 0000000..9d374f4 --- /dev/null +++ b/docs/api/WebSocket.md @@ -0,0 +1,43 @@ +# Class: WebSocket + +> âš ï¸ Warning: the WebSocket API is experimental. + +Extends: [`EventTarget`](https://developer.mozilla.org/en-US/docs/Web/API/EventTarget) + +The WebSocket object provides a way to manage a WebSocket connection to a server, allowing bidirectional communication. The API follows the [WebSocket spec](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket) and [RFC 6455](https://datatracker.ietf.org/doc/html/rfc6455). + +## `new WebSocket(url[, protocol])` + +Arguments: + +* **url** `URL | string` - The url's protocol *must* be `ws` or `wss`. +* **protocol** `string | string[] | WebSocketInit` (optional) - Subprotocol(s) to request the server use, or a [`Dispatcher`](./Dispatcher.md). + +### Example: + +This example will not work in browsers or other platforms that don't allow passing an object. + +```mjs +import { WebSocket, ProxyAgent } from 'undici' + +const proxyAgent = new ProxyAgent('my.proxy.server') + +const ws = new WebSocket('wss://echo.websocket.events', { + dispatcher: proxyAgent, + protocols: ['echo', 'chat'] +}) +``` + +If you do not need a custom Dispatcher, it's recommended to use the following pattern: + +```mjs +import { WebSocket } from 'undici' + +const ws = new WebSocket('wss://echo.websocket.events', ['echo', 'chat']) +``` + +## Read More + +- [MDN - WebSocket](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket) +- [The WebSocket Specification](https://www.rfc-editor.org/rfc/rfc6455) +- [The WHATWG WebSocket Specification](https://websockets.spec.whatwg.org/) diff --git a/docs/api/api-lifecycle.md b/docs/api/api-lifecycle.md new file mode 100644 index 0000000..d158126 --- /dev/null +++ b/docs/api/api-lifecycle.md @@ -0,0 +1,62 @@ +# Client Lifecycle + +An Undici [Client](Client.md) can be best described as a state machine. The following list is a summary of the various state transitions the `Client` will go through in its lifecycle. This document also contains detailed breakdowns of each state. + +> This diagram is not a perfect representation of the undici Client. Since the Client class is not actually implemented as a state-machine, actual execution may deviate slightly from what is described below. Consider this as a general resource for understanding the inner workings of the Undici client rather than some kind of formal specification. + +## State Transition Overview + +* A `Client` begins in the **idle** state with no socket connection and no requests in queue. + * The *connect* event transitions the `Client` to the **pending** state where requests can be queued prior to processing. + * The *close* and *destroy* events transition the `Client` to the **destroyed** state. Since there are no requests in the queue, the *close* event immediately transitions to the **destroyed** state. +* The **pending** state indicates the underlying socket connection has been successfully established and requests are queueing. + * The *process* event transitions the `Client` to the **processing** state where requests are processed. + * If requests are queued, the *close* event transitions to the **processing** state; otherwise, it transitions to the **destroyed** state. + * The *destroy* event transitions to the **destroyed** state. +* The **processing** state initializes to the **processing.running** state. + * If the current request requires draining, the *needDrain* event transitions the `Client` into the **processing.busy** state which will return to the **processing.running** state with the *drainComplete* event. + * After all queued requests are completed, the *keepalive* event transitions the `Client` back to the **pending** state. If no requests are queued during the timeout, the **close** event transitions the `Client` to the **destroyed** state. + * If the *close* event is fired while the `Client` still has queued requests, the `Client` transitions to the **process.closing** state where it will complete all existing requests before firing the *done* event. + * The *done* event gracefully transitions the `Client` to the **destroyed** state. + * At any point in time, the *destroy* event will transition the `Client` from the **processing** state to the **destroyed** state, destroying any queued requests. +* The **destroyed** state is a final state and the `Client` is no longer functional. + +![A state diagram representing an Undici Client instance](../assets/lifecycle-diagram.png) + +> The diagram was generated using Mermaid.js Live Editor. Modify the state diagram [here](https://mermaid-js.github.io/mermaid-live-editor/#/edit/eyJjb2RlIjoic3RhdGVEaWFncmFtLXYyXG4gICAgWypdIC0tPiBpZGxlXG4gICAgaWRsZSAtLT4gcGVuZGluZyA6IGNvbm5lY3RcbiAgICBpZGxlIC0tPiBkZXN0cm95ZWQgOiBkZXN0cm95L2Nsb3NlXG4gICAgXG4gICAgcGVuZGluZyAtLT4gaWRsZSA6IHRpbWVvdXRcbiAgICBwZW5kaW5nIC0tPiBkZXN0cm95ZWQgOiBkZXN0cm95XG5cbiAgICBzdGF0ZSBjbG9zZV9mb3JrIDw8Zm9yaz4-XG4gICAgcGVuZGluZyAtLT4gY2xvc2VfZm9yayA6IGNsb3NlXG4gICAgY2xvc2VfZm9yayAtLT4gcHJvY2Vzc2luZ1xuICAgIGNsb3NlX2ZvcmsgLS0-IGRlc3Ryb3llZFxuXG4gICAgcGVuZGluZyAtLT4gcHJvY2Vzc2luZyA6IHByb2Nlc3NcblxuICAgIHByb2Nlc3NpbmcgLS0-IHBlbmRpbmcgOiBrZWVwYWxpdmVcbiAgICBwcm9jZXNzaW5nIC0tPiBkZXN0cm95ZWQgOiBkb25lXG4gICAgcHJvY2Vzc2luZyAtLT4gZGVzdHJveWVkIDogZGVzdHJveVxuXG4gICAgc3RhdGUgcHJvY2Vzc2luZyB7XG4gICAgICAgIHJ1bm5pbmcgLS0-IGJ1c3kgOiBuZWVkRHJhaW5cbiAgICAgICAgYnVzeSAtLT4gcnVubmluZyA6IGRyYWluQ29tcGxldGVcbiAgICAgICAgcnVubmluZyAtLT4gWypdIDoga2VlcGFsaXZlXG4gICAgICAgIHJ1bm5pbmcgLS0-IGNsb3NpbmcgOiBjbG9zZVxuICAgICAgICBjbG9zaW5nIC0tPiBbKl0gOiBkb25lXG4gICAgICAgIFsqXSAtLT4gcnVubmluZ1xuICAgIH1cbiAgICAiLCJtZXJtYWlkIjp7InRoZW1lIjoiYmFzZSJ9LCJ1cGRhdGVFZGl0b3IiOmZhbHNlfQ) + +## State details + +### idle + +The **idle** state is the initial state of a `Client` instance. While an `origin` is required for instantiating a `Client` instance, the underlying socket connection will not be established until a request is queued using [`Client.dispatch()`](Client.md#clientdispatchoptions-handlers). By calling `Client.dispatch()` directly or using one of the multiple implementations ([`Client.connect()`](Client.md#clientconnectoptions-callback), [`Client.pipeline()`](Client.md#clientpipelineoptions-handler), [`Client.request()`](Client.md#clientrequestoptions-callback), [`Client.stream()`](Client.md#clientstreamoptions-factory-callback), and [`Client.upgrade()`](Client.md#clientupgradeoptions-callback)), the `Client` instance will transition from **idle** to [**pending**](#pending) and then most likely directly to [**processing**](#processing). + +Calling [`Client.close()`](Client.md#clientclosecallback) or [`Client.destroy()`](Client.md#clientdestroyerror-callback) transitions directly to the [**destroyed**](#destroyed) state since the `Client` instance will have no queued requests in this state. + +### pending + +The **pending** state signifies a non-processing `Client`. Upon entering this state, the `Client` establishes a socket connection and emits the [`'connect'`](Client.md#event-connect) event signalling a connection was successfully established with the `origin` provided during `Client` instantiation. The internal queue is initially empty, and requests can start queueing. + +Calling [`Client.close()`](Client.md#clientclosecallback) with queued requests, transitions the `Client` to the [**processing**](#processing) state. Without queued requests, it transitions to the [**destroyed**](#destroyed) state. + +Calling [`Client.destroy()`](Client.md#clientdestroyerror-callback) transitions directly to the [**destroyed**](#destroyed) state regardless of existing requests. + +### processing + +The **processing** state is a state machine within itself. It initializes to the [**processing.running**](#running) state. The [`Client.dispatch()`](Client.md#clientdispatchoptions-handlers), [`Client.close()`](Client.md#clientclosecallback), and [`Client.destroy()`](Client.md#clientdestroyerror-callback) can be called at any time while the `Client` is in this state. `Client.dispatch()` will add more requests to the queue while existing requests continue to be processed. `Client.close()` will transition to the [**processing.closing**](#closing) state. And `Client.destroy()` will transition to [**destroyed**](#destroyed). + +#### running + +In the **processing.running** sub-state, queued requests are being processed in a FIFO order. If a request body requires draining, the *needDrain* event transitions to the [**processing.busy**](#busy) sub-state. The *close* event transitions the Client to the [**process.closing**](#closing) sub-state. If all queued requests are processed and neither [`Client.close()`](Client.md#clientclosecallback) nor [`Client.destroy()`](Client.md#clientdestroyerror-callback) are called, then the [**processing**](#processing) machine will trigger a *keepalive* event transitioning the `Client` back to the [**pending**](#pending) state. During this time, the `Client` is waiting for the socket connection to timeout, and once it does, it triggers the *timeout* event and transitions to the [**idle**](#idle) state. + +#### busy + +This sub-state is only entered when a request body is an instance of [Stream](https://nodejs.org/api/stream.html) and requires draining. The `Client` cannot process additional requests while in this state and must wait until the currently processing request body is completely drained before transitioning back to [**processing.running**](#running). + +#### closing + +This sub-state is only entered when a `Client` instance has queued requests and the [`Client.close()`](Client.md#clientclosecallback) method is called. In this state, the `Client` instance continues to process requests as usual, with the one exception that no additional requests can be queued. Once all of the queued requests are processed, the `Client` will trigger the *done* event gracefully entering the [**destroyed**](#destroyed) state without an error. + +### destroyed + +The **destroyed** state is a final state for the `Client` instance. Once in this state, a `Client` is nonfunctional. Calling any other `Client` methods will result in an `ClientDestroyedError`. diff --git a/docs/assets/lifecycle-diagram.png b/docs/assets/lifecycle-diagram.png new file mode 100644 index 0000000..4ef17b5 Binary files /dev/null and b/docs/assets/lifecycle-diagram.png differ diff --git a/docs/best-practices/client-certificate.md b/docs/best-practices/client-certificate.md new file mode 100644 index 0000000..4fc84ec --- /dev/null +++ b/docs/best-practices/client-certificate.md @@ -0,0 +1,64 @@ +# Client certificate + +Client certificate authentication can be configured with the `Client`, the required options are passed along through the `connect` option. + +The client certificates must be signed by a trusted CA. The Node.js default is to trust the well-known CAs curated by Mozilla. + +Setting the server option `requestCert: true` tells the server to request the client certificate. + +The server option `rejectUnauthorized: false` allows us to handle any invalid certificate errors in client code. The `authorized` property on the socket of the incoming request will show if the client certificate was valid. The `authorizationError` property will give the reason if the certificate was not valid. + +### Client Certificate Authentication + +```js +const { readFileSync } = require('fs') +const { join } = require('path') +const { createServer } = require('https') +const { Client } = require('undici') + +const serverOptions = { + ca: [ + readFileSync(join(__dirname, 'client-ca-crt.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'server-key.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'server-crt.pem'), 'utf8'), + requestCert: true, + rejectUnauthorized: false +} + +const server = createServer(serverOptions, (req, res) => { + // true if client cert is valid + if(req.client.authorized === true) { + console.log('valid') + } else { + console.error(req.client.authorizationError) + } + res.end() +}) + +server.listen(0, function () { + const tls = { + ca: [ + readFileSync(join(__dirname, 'server-ca-crt.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'client-key.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'client-crt.pem'), 'utf8'), + rejectUnauthorized: false, + servername: 'agent1' + } + const client = new Client(`https://localhost:${server.address().port}`, { + connect: tls + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + body.on('data', (buf) => {}) + body.on('end', () => { + client.close() + server.close() + }) + }) +}) +``` diff --git a/docs/best-practices/mocking-request.md b/docs/best-practices/mocking-request.md new file mode 100644 index 0000000..6954392 --- /dev/null +++ b/docs/best-practices/mocking-request.md @@ -0,0 +1,136 @@ +# Mocking Request + +Undici has its own mocking [utility](../api/MockAgent.md). It allow us to intercept undici HTTP requests and return mocked values instead. It can be useful for testing purposes. + +Example: + +```js +// bank.mjs +import { request } from 'undici' + +export async function bankTransfer(recipient, amount) { + const { body } = await request('http://localhost:3000/bank-transfer', + { + method: 'POST', + headers: { + 'X-TOKEN-SECRET': 'SuperSecretToken', + }, + body: JSON.stringify({ + recipient, + amount + }) + } + ) + return await body.json() +} +``` + +And this is what the test file looks like: + +```js +// index.test.mjs +import { strict as assert } from 'assert' +import { MockAgent, setGlobalDispatcher, } from 'undici' +import { bankTransfer } from './bank.mjs' + +const mockAgent = new MockAgent(); + +setGlobalDispatcher(mockAgent); + +// Provide the base url to the request +const mockPool = mockAgent.get('http://localhost:3000'); + +// intercept the request +mockPool.intercept({ + path: '/bank-transfer', + method: 'POST', + headers: { + 'X-TOKEN-SECRET': 'SuperSecretToken', + }, + body: JSON.stringify({ + recipient: '1234567890', + amount: '100' + }) +}).reply(200, { + message: 'transaction processed' +}) + +const success = await bankTransfer('1234567890', '100') + +assert.deepEqual(success, { message: 'transaction processed' }) + +// if you dont want to check whether the body or the headers contain the same value +// just remove it from interceptor +mockPool.intercept({ + path: '/bank-transfer', + method: 'POST', +}).reply(400, { + message: 'bank account not found' +}) + +const badRequest = await bankTransfer('1234567890', '100') + +assert.deepEqual(badRequest, { message: 'bank account not found' }) +``` + +Explore other MockAgent functionality [here](../api/MockAgent.md) + +## Debug Mock Value + +When the interceptor and the request options are not the same, undici will automatically make a real HTTP request. To prevent real requests from being made, use `mockAgent.disableNetConnect()`: + +```js +const mockAgent = new MockAgent(); + +setGlobalDispatcher(mockAgent); +mockAgent.disableNetConnect() + +// Provide the base url to the request +const mockPool = mockAgent.get('http://localhost:3000'); + +mockPool.intercept({ + path: '/bank-transfer', + method: 'POST', +}).reply(200, { + message: 'transaction processed' +}) + +const badRequest = await bankTransfer('1234567890', '100') +// Will throw an error +// MockNotMatchedError: Mock dispatch not matched for path '/bank-transfer': +// subsequent request to origin http://localhost:3000 was not allowed (net.connect disabled) +``` + +## Reply with data based on request + +If the mocked response needs to be dynamically derived from the request parameters, you can provide a function instead of an object to `reply`: + +```js +mockPool.intercept({ + path: '/bank-transfer', + method: 'POST', + headers: { + 'X-TOKEN-SECRET': 'SuperSecretToken', + }, + body: JSON.stringify({ + recipient: '1234567890', + amount: '100' + }) +}).reply(200, (opts) => { + // do something with opts + + return { message: 'transaction processed' } +}) +``` + +in this case opts will be + +``` +{ + method: 'POST', + headers: { 'X-TOKEN-SECRET': 'SuperSecretToken' }, + body: '{"recipient":"1234567890","amount":"100"}', + origin: 'http://localhost:3000', + path: '/bank-transfer' +} +``` diff --git a/docs/best-practices/proxy.md b/docs/best-practices/proxy.md new file mode 100644 index 0000000..bf10295 --- /dev/null +++ b/docs/best-practices/proxy.md @@ -0,0 +1,127 @@ +# Connecting through a proxy + +Connecting through a proxy is possible by: + +- Using [AgentProxy](../api/ProxyAgent.md). +- Configuring `Client` or `Pool` constructor. + +The proxy url should be passed to the `Client` or `Pool` constructor, while the upstream server url +should be added to every request call in the `path`. +For instance, if you need to send a request to the `/hello` route of your upstream server, +the `path` should be `path: 'http://upstream.server:port/hello?foo=bar'`. + +If you proxy requires basic authentication, you can send it via the `proxy-authorization` header. + +### Connect without authentication + +```js +import { Client } from 'undici' +import { createServer } from 'http' +import proxy from 'proxy' + +const server = await buildServer() +const proxyServer = await buildProxy() + +const serverUrl = `http://localhost:${server.address().port}` +const proxyUrl = `http://localhost:${proxyServer.address().port}` + +server.on('request', (req, res) => { + console.log(req.url) // '/hello?foo=bar' + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) +}) + +const client = new Client(proxyUrl) + +const response = await client.request({ + method: 'GET', + path: serverUrl + '/hello?foo=bar' +}) + +response.body.setEncoding('utf8') +let data = '' +for await (const chunk of response.body) { + data += chunk +} +console.log(response.statusCode) // 200 +console.log(JSON.parse(data)) // { hello: 'world' } + +server.close() +proxyServer.close() +client.close() + +function buildServer () { + return new Promise((resolve, reject) => { + const server = createServer() + server.listen(0, () => resolve(server)) + }) +} + +function buildProxy () { + return new Promise((resolve, reject) => { + const server = proxy(createServer()) + server.listen(0, () => resolve(server)) + }) +} +``` + +### Connect with authentication + +```js +import { Client } from 'undici' +import { createServer } from 'http' +import proxy from 'proxy' + +const server = await buildServer() +const proxyServer = await buildProxy() + +const serverUrl = `http://localhost:${server.address().port}` +const proxyUrl = `http://localhost:${proxyServer.address().port}` + +proxyServer.authenticate = function (req, fn) { + fn(null, req.headers['proxy-authorization'] === `Basic ${Buffer.from('user:pass').toString('base64')}`) +} + +server.on('request', (req, res) => { + console.log(req.url) // '/hello?foo=bar' + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) +}) + +const client = new Client(proxyUrl) + +const response = await client.request({ + method: 'GET', + path: serverUrl + '/hello?foo=bar', + headers: { + 'proxy-authorization': `Basic ${Buffer.from('user:pass').toString('base64')}` + } +}) + +response.body.setEncoding('utf8') +let data = '' +for await (const chunk of response.body) { + data += chunk +} +console.log(response.statusCode) // 200 +console.log(JSON.parse(data)) // { hello: 'world' } + +server.close() +proxyServer.close() +client.close() + +function buildServer () { + return new Promise((resolve, reject) => { + const server = createServer() + server.listen(0, () => resolve(server)) + }) +} + +function buildProxy () { + return new Promise((resolve, reject) => { + const server = proxy(createServer()) + server.listen(0, () => resolve(server)) + }) +} +``` + diff --git a/docs/best-practices/writing-tests.md b/docs/best-practices/writing-tests.md new file mode 100644 index 0000000..57549de --- /dev/null +++ b/docs/best-practices/writing-tests.md @@ -0,0 +1,20 @@ +# Writing tests + +Undici is tuned for a production use case and its default will keep +a socket open for a few seconds after an HTTP request is completed to +remove the overhead of opening up a new socket. These settings that makes +Undici shine in production are not a good fit for using Undici in automated +tests, as it will result in longer execution times. + +The following are good defaults that will keep the socket open for only 10ms: + +```js +import { request, setGlobalDispatcher, Agent } from 'undici' + +const agent = new Agent({ + keepAliveTimeout: 10, // milliseconds + keepAliveMaxTimeout: 10 // milliseconds +}) + +setGlobalDispatcher(agent) +``` diff --git a/docsify/sidebar.md b/docsify/sidebar.md new file mode 100644 index 0000000..b7c7d6a --- /dev/null +++ b/docsify/sidebar.md @@ -0,0 +1,28 @@ + + +* [**Home**](/ "Node.js Undici") +* API + * [Dispatcher](/docs/api/Dispatcher.md "Undici API - Dispatcher") + * [Client](/docs/api/Client.md "Undici API - Client") + * [Pool](/docs/api/Pool.md "Undici API - Pool") + * [BalancedPool](/docs/api/BalancedPool.md "Undici API - BalancedPool") + * [Agent](/docs/api/Agent.md "Undici API - Agent") + * [ProxyAgent](/docs/api/ProxyAgent.md "Undici API - ProxyAgent") + * [Connector](/docs/api/Connector.md "Custom connector") + * [Errors](/docs/api/Errors.md "Undici API - Errors") + * [Fetch](/docs/api/Fetch.md "Undici API - Fetch") + * [Cookies](/docs/api/Cookies.md "Undici API - Cookies") + * [MockClient](/docs/api/MockClient.md "Undici API - MockClient") + * [MockPool](/docs/api/MockPool.md "Undici API - MockPool") + * [MockAgent](/docs/api/MockAgent.md "Undici API - MockAgent") + * [MockErrors](/docs/api/MockErrors.md "Undici API - MockErrors") + * [API Lifecycle](/docs/api/api-lifecycle.md "Undici API - Lifecycle") + * [Diagnostics Channel Support](/docs/api/DiagnosticsChannel.md "Diagnostics Channel Support") + * [WebSocket](/docs/api/WebSocket.md "Undici API - WebSocket") + * [MIME Type Parsing](/docs/api/ContentType.md "Undici API - MIME Type Parsing") + * [CacheStorage](/docs/api/CacheStorage.md "Undici API - CacheStorage") +* Best Practices + * [Proxy](/docs/best-practices/proxy.md "Connecting through a proxy") + * [Client Certificate](/docs/best-practices/client-certificate.md "Connect using a client certificate") + * [Writing Tests](/docs/best-practices/writing-tests.md "Using Undici inside tests") + * [Mocking Request](/docs/best-practices/mocking-request.md "Using Undici inside tests") diff --git a/examples/ca-fingerprint/index.js b/examples/ca-fingerprint/index.js new file mode 100644 index 0000000..792c08c --- /dev/null +++ b/examples/ca-fingerprint/index.js @@ -0,0 +1,80 @@ +'use strict' + +const crypto = require('crypto') +const https = require('https') +const { Client, buildConnector } = require('../..') +const pem = require('https-pem') + +const caFingerprint = getFingerprint(pem.cert.toString() + .split('\n') + .slice(1, -1) + .map(line => line.trim()) + .join('') +) + +const server = https.createServer(pem, (req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end('hello') +}) + +server.listen(0, function () { + const connector = buildConnector({ rejectUnauthorized: false }) + const client = new Client(`https://localhost:${server.address().port}`, { + connect (opts, cb) { + connector(opts, (err, socket) => { + if (err) { + cb(err) + } else if (getIssuerCertificate(socket).fingerprint256 !== caFingerprint) { + socket.destroy() + cb(new Error('Fingerprint does not match or malformed certificate')) + } else { + cb(null, socket) + } + }) + } + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + if (err) throw err + + const bufs = [] + data.body.on('data', (buf) => { + bufs.push(buf) + }) + data.body.on('end', () => { + console.log(Buffer.concat(bufs).toString('utf8')) + client.close() + server.close() + }) + }) +}) + +function getIssuerCertificate (socket) { + let certificate = socket.getPeerCertificate(true) + while (certificate && Object.keys(certificate).length > 0) { + // invalid certificate + if (certificate.issuerCertificate == null) { + return null + } + + // We have reached the root certificate. + // In case of self-signed certificates, `issuerCertificate` may be a circular reference. + if (certificate.fingerprint256 === certificate.issuerCertificate.fingerprint256) { + break + } + + // continue the loop + certificate = certificate.issuerCertificate + } + return certificate +} + +function getFingerprint (content, inputEncoding = 'base64', outputEncoding = 'hex') { + const shasum = crypto.createHash('sha256') + shasum.update(content, inputEncoding) + const res = shasum.digest(outputEncoding) + return res.toUpperCase().match(/.{1,2}/g).join(':') +} diff --git a/examples/fetch.js b/examples/fetch.js new file mode 100644 index 0000000..7ece2b8 --- /dev/null +++ b/examples/fetch.js @@ -0,0 +1,13 @@ +'use strict' + +const { fetch } = require('../') + +async function main () { + const res = await fetch('http://localhost:3001/') + + const data = await res.text() + console.log('response received', res.status) + console.log('headers', res.headers) + console.log('data', data) +} +main() diff --git a/examples/proxy-agent.js b/examples/proxy-agent.js new file mode 100644 index 0000000..7caf836 --- /dev/null +++ b/examples/proxy-agent.js @@ -0,0 +1,25 @@ +'use strict' + +const { request, setGlobalDispatcher, ProxyAgent } = require('../') + +setGlobalDispatcher(new ProxyAgent('http://localhost:8000/')) + +async function main () { + const { + statusCode, + headers, + trailers, + body + // send the request via the http://localhost:8000/ HTTP proxy + } = await request('http://localhost:3000/undici') + + console.log('response received', statusCode) + console.log('headers', headers) + + for await (const data of body) { + console.log('data', data) + } + + console.log('trailers', trailers) +} +main() diff --git a/examples/proxy/index.js b/examples/proxy/index.js new file mode 100644 index 0000000..5f35049 --- /dev/null +++ b/examples/proxy/index.js @@ -0,0 +1,49 @@ +const { Pool, Client } = require('../../') +const http = require('http') +const proxy = require('./proxy') + +const pool = new Pool('http://localhost:4001', { + connections: 256, + pipelining: 1 +}) + +async function run () { + await Promise.all([ + new Promise(resolve => { + // Proxy + http.createServer((req, res) => { + proxy({ req, res, proxyName: 'example' }, pool).catch(err => { + if (res.headersSent) { + res.destroy(err) + } else { + for (const name of res.getHeaderNames()) { + res.removeHeader(name) + } + res.statusCode = err.statusCode || 500 + res.end() + } + }) + }).listen(4000, resolve) + }), + new Promise(resolve => { + // Upstream + http.createServer((req, res) => { + res.end('hello world') + }).listen(4001, resolve) + }) + ]) + + const client = new Client('http://localhost:4000') + const { body } = await client.request({ + method: 'GET', + path: '/' + }) + + for await (const chunk of body) { + console.log(String(chunk)) + } +} + +run() + +// TODO: Add websocket example. diff --git a/examples/proxy/proxy.js b/examples/proxy/proxy.js new file mode 100644 index 0000000..bb9fcc4 --- /dev/null +++ b/examples/proxy/proxy.js @@ -0,0 +1,256 @@ +const net = require('net') +const { pipeline } = require('stream') +const createError = require('http-errors') + +module.exports = async function proxy (ctx, client) { + const { req, socket, proxyName } = ctx + + const headers = getHeaders({ + headers: req.rawHeaders, + httpVersion: req.httpVersion, + socket: req.socket, + proxyName + }) + + if (socket) { + const handler = new WSHandler(ctx) + client.dispatch({ + method: req.method, + path: req.url, + headers, + upgrade: 'Websocket' + }, handler) + return handler.promise + } else { + const handler = new HTTPHandler(ctx) + client.dispatch({ + method: req.method, + path: req.url, + headers, + body: req + }, handler) + return handler.promise + } +} + +class HTTPHandler { + constructor (ctx) { + const { req, res, proxyName } = ctx + + this.proxyName = proxyName + this.req = req + this.res = res + this.resume = null + this.abort = null + this.promise = new Promise((resolve, reject) => { + this.callback = err => err ? reject(err) : resolve() + }) + } + + onConnect (abort) { + if (this.req.aborted) { + abort() + } else { + this.abort = abort + this.res.on('close', abort) + } + } + + onHeaders (statusCode, headers, resume) { + if (statusCode < 200) { + return + } + + this.resume = resume + this.res.on('drain', resume) + this.res.writeHead(statusCode, getHeaders({ + headers, + proxyName: this.proxyName, + httpVersion: this.httpVersion + })) + } + + onData (chunk) { + return this.res.write(chunk) + } + + onComplete () { + this.res.off('close', this.abort) + this.res.off('drain', this.resume) + + this.res.end() + this.callback() + } + + onError (err) { + this.res.off('close', this.abort) + this.res.off('drain', this.resume) + + this.callback(err) + } +} + +class WSHandler { + constructor (ctx) { + const { req, socket, proxyName, head } = ctx + + setupSocket(socket) + + this.proxyName = proxyName + this.httpVersion = req.httpVersion + this.socket = socket + this.head = head + this.abort = null + this.promise = new Promise((resolve, reject) => { + this.callback = err => err ? reject(err) : resolve() + }) + } + + onConnect (abort) { + if (this.socket.destroyed) { + abort() + } else { + this.abort = abort + this.socket.on('close', abort) + } + } + + onUpgrade (statusCode, headers, socket) { + this.socket.off('close', this.abort) + + // TODO: Check statusCode? + + if (this.head && this.head.length) { + socket.unshift(this.head) + } + + setupSocket(socket) + + headers = getHeaders({ + headers, + proxyName: this.proxyName, + httpVersion: this.httpVersion + }) + + let head = '' + for (let n = 0; n < headers.length; n += 2) { + head += `\r\n${headers[n]}: ${headers[n + 1]}` + } + + this.socket.write(`HTTP/1.1 101 Switching Protocols\r\nconnection: upgrade\r\nupgrade: websocket${head}\r\n\r\n`) + + pipeline(socket, this.socket, socket, this.callback) + } + + onError (err) { + this.socket.off('close', this.abort) + + this.callback(err) + } +} + +// This expression matches hop-by-hop headers. +// These headers are meaningful only for a single transport-level connection, +// and must not be retransmitted by proxies or cached. +const HOP_EXPR = /^(te|host|upgrade|trailers|connection|keep-alive|http2-settings|transfer-encoding|proxy-connection|proxy-authenticate|proxy-authorization)$/i + +// Removes hop-by-hop and pseudo headers. +// Updates via and forwarded headers. +// Only hop-by-hop headers may be set using the Connection general header. +function getHeaders ({ + headers, + proxyName, + httpVersion, + socket +}) { + let via = '' + let forwarded = '' + let host = '' + let authority = '' + let connection = '' + + for (let n = 0; n < headers.length; n += 2) { + const key = headers[n] + const val = headers[n + 1] + + if (!via && key.length === 3 && key.toLowerCase() === 'via') { + via = val + } else if (!host && key.length === 4 && key.toLowerCase() === 'host') { + host = val + } else if (!forwarded && key.length === 9 && key.toLowerCase() === 'forwarded') { + forwarded = val + } else if (!connection && key.length === 10 && key.toLowerCase() === 'connection') { + connection = val + } else if (!authority && key.length === 10 && key === ':authority') { + authority = val + } + } + + let remove + if (connection && !HOP_EXPR.test(connection)) { + remove = connection.split(/,\s*/) + } + + const result = [] + for (let n = 0; n < headers.length; n += 2) { + const key = headers[n] + const val = headers[n + 1] + + if ( + key.charAt(0) !== ':' && + !HOP_EXPR.test(key) && + (!remove || !remove.includes(key)) + ) { + result.push(key, val) + } + } + + if (socket) { + result.push('forwarded', (forwarded ? forwarded + ', ' : '') + [ + `by=${printIp(socket.localAddress, socket.localPort)}`, + `for=${printIp(socket.remoteAddress, socket.remotePort)}`, + `proto=${socket.encrypted ? 'https' : 'http'}`, + `host=${printIp(authority || host || '')}` + ].join(';')) + } else if (forwarded) { + // The forwarded header should not be included in response. + throw new createError.BadGateway() + } + + if (proxyName) { + if (via) { + if (via.split(',').some(name => name.endsWith(proxyName))) { + throw new createError.LoopDetected() + } + via += ', ' + } + via += `${httpVersion} ${proxyName}` + } + + if (via) { + result.push('via', via) + } + + return result +} + +function setupSocket (socket) { + socket.setTimeout(0) + socket.setNoDelay(true) + socket.setKeepAlive(true, 0) +} + +function printIp (address, port) { + const isIPv6 = net.isIPv6(address) + let str = `${address}` + if (isIPv6) { + str = `[${str}]` + } + if (port) { + str = `${str}:${port}` + } + if (isIPv6 || port) { + str = `"${str}"` + } + return str +} diff --git a/examples/request.js b/examples/request.js new file mode 100644 index 0000000..1b03254 --- /dev/null +++ b/examples/request.js @@ -0,0 +1,18 @@ +'use strict' + +const { request } = require('../') + +async function main () { + const { + statusCode, + headers, + body + } = await request('http://localhost:3001/') + + const data = await body.text() + console.log('response received', statusCode) + console.log('headers', headers) + console.log('data', data) +} + +main() diff --git a/fastify-busboy/.eslintrc.js b/fastify-busboy/.eslintrc.js new file mode 100644 index 0000000..4b904cd --- /dev/null +++ b/fastify-busboy/.eslintrc.js @@ -0,0 +1,27 @@ +module.exports = { + ignorePatterns: [ + 'bench', + 'deps/encoding' + ], + extends: [ + 'standard', + 'eslint:recommended', + 'plugin:n/recommended' + ], + rules: { + 'no-unused-vars': [1, { vars: 'all', args: 'none' }], + 'n/no-missing-require': 1, + 'no-constant-condition': 'off', + 'no-var': 'off', + 'no-redeclare': 1, + 'no-fallthrough': 1, + 'no-control-regex': 1, + 'no-empty': 'off', + 'prefer-const': 'off' + }, + env: { + node: true, + mocha: true, + es6: true + } +} diff --git a/fastify-busboy/.gitattributes b/fastify-busboy/.gitattributes new file mode 100644 index 0000000..49b4f89 --- /dev/null +++ b/fastify-busboy/.gitattributes @@ -0,0 +1,2 @@ +* text=false +*.header -crlf diff --git a/fastify-busboy/.github/dependabot.yml b/fastify-busboy/.github/dependabot.yml new file mode 100644 index 0000000..dfa7fa6 --- /dev/null +++ b/fastify-busboy/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + open-pull-requests-limit: 10 + + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "weekly" + open-pull-requests-limit: 10 diff --git a/fastify-busboy/.github/workflows/ci.yml b/fastify-busboy/.github/workflows/ci.yml new file mode 100644 index 0000000..babd56d --- /dev/null +++ b/fastify-busboy/.github/workflows/ci.yml @@ -0,0 +1,22 @@ +name: CI + +on: + push: + branches: + - main + - master + - next + - 'v*' + paths-ignore: + - 'docs/**' + - '*.md' + pull_request: + paths-ignore: + - 'docs/**' + - '*.md' + +jobs: + test: + uses: fastify/workflows/.github/workflows/plugins-ci.yml@v3 + with: + license-check: true diff --git a/fastify-busboy/.github/workflows/coverage.yml b/fastify-busboy/.github/workflows/coverage.yml new file mode 100644 index 0000000..3d7b943 --- /dev/null +++ b/fastify-busboy/.github/workflows/coverage.yml @@ -0,0 +1,44 @@ +--- + +name: coverage + +on: + push: + branches: + - master + +jobs: + build: + runs-on: ubuntu-latest + name: coverage + + strategy: + matrix: + node-version: [16.x] + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Setup Node ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + always-auth: false + node-version: ${{ matrix.node-version }} + + - name: Run npm install + run: npm install + + - name: Run Tests + run: npm run test:coverage + + - name: Generate LCOV + run: npm run coveralls + + - name: Update Coveralls + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + if: success() diff --git a/fastify-busboy/.github/workflows/linting.yml b/fastify-busboy/.github/workflows/linting.yml new file mode 100644 index 0000000..407f53e --- /dev/null +++ b/fastify-busboy/.github/workflows/linting.yml @@ -0,0 +1,35 @@ +--- + +name: Linting and Types + +on: + pull_request: + branches: + - master + +jobs: + build: + runs-on: ubuntu-latest + name: Linting and Types + + strategy: + matrix: + node-version: [16.x] + + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Setup Node ${{ matrix.node-version }} + uses: actions/setup-node@v4 + with: + always-auth: false + node-version: ${{ matrix.node-version }} + + - name: Run npm install + run: npm install + + - name: Run lint:everything + run: npm run lint:everything diff --git a/fastify-busboy/.gitignore b/fastify-busboy/.gitignore new file mode 100644 index 0000000..6e49526 --- /dev/null +++ b/fastify-busboy/.gitignore @@ -0,0 +1,152 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp +.cache + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +# Vim swap files +*.swp + +# macOS files +.DS_Store + +# Clinic +.clinic + +# lock files +bun.lockb +package-lock.json +pnpm-lock.yaml +yarn.lock + +# editor files +.vscode +.idea + +/benchmarks/node_modules/ +/benchmarks/package-lock.json diff --git a/fastify-busboy/.taprc b/fastify-busboy/.taprc new file mode 100644 index 0000000..30a802b --- /dev/null +++ b/fastify-busboy/.taprc @@ -0,0 +1,4 @@ +files: + - test/**/*.test.js + +coverage: false \ No newline at end of file diff --git a/fastify-busboy/CHANGELOG.md b/fastify-busboy/CHANGELOG.md new file mode 100644 index 0000000..6d10297 --- /dev/null +++ b/fastify-busboy/CHANGELOG.md @@ -0,0 +1,28 @@ +# Changelog + +Major changes since the last busboy release (0.3.1): + +# 1.1.0 - 09 June, 2022 + +* Fix potential ReDOS-Attack-Vector in Headerparser (#72) +* Improve array parse performances (#69) +* Export Dicer library (#90) + +# 1.0.0 - 04 December, 2021 + +* Prevent malformed headers from crashing the web server (#34) +* Prevent empty parts from hanging the process (#55) +* Use non-deprecated Buffer creation (#8, #10) +* Include TypeScript types in the package itself (#13) +* Make `busboy` importable both as ESM and as CJS module (#61) +* Improve performance (#21, #32, #36) +* Set `autoDestroy` to `false` by default in order to avoid regressions when upgrading from Node.js 12 to Node.js 14 (#9) +* Add option `isPartAFile`, to make the file-detection configurable (#53) +* Add property `bytesRead` on FileStreams (#51) +* Add and expose headerSize limit (#64) +* Throw an error on non-number limit (#7) +* Use the native TextDecoder and the package `text-decoding` for fallback if Node.js does not support the requested encoding (#50) +* Integrate `dicer` dependency into `busboy` itself (#14) +* Convert tests to Mocha (#11, #12, #22, #23) +* Implement better benchmarks (#40, #54) +* Use JavaScript Standard style (#44, #45) diff --git a/fastify-busboy/LICENSE b/fastify-busboy/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/fastify-busboy/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/fastify-busboy/README.md b/fastify-busboy/README.md new file mode 100644 index 0000000..c74e618 --- /dev/null +++ b/fastify-busboy/README.md @@ -0,0 +1,271 @@ +# busboy + +
+ +[![Build Status](https://github.com/fastify/busboy/workflows/ci/badge.svg)](https://github.com/fastify/busboy/actions) +[![Coverage Status](https://coveralls.io/repos/fastify/busboy/badge.svg?branch=master)](https://coveralls.io/r/fastify/busboy?branch=master) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) +[![Security Responsible Disclosure](https://img.shields.io/badge/Security-Responsible%20Disclosure-yellow.svg)](https://github.com/nodejs/security-wg/blob/HEAD/processes/responsible_disclosure_template.md) + +
+ +
+ +[![NPM version](https://img.shields.io/npm/v/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) +[![NPM downloads](https://img.shields.io/npm/dm/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy) + +
+ +Description +=========== + +A Node.js module for parsing incoming HTML form data. + +This is an officially supported fork by [fastify](https://github.com/fastify/) organization of the amazing library [originally created](https://github.com/mscdex/busboy) by Brian White, +aimed at addressing long-standing issues with it. + +Benchmark (Mean time for 500 Kb payload, 2000 cycles, 1000 cycle warmup): + +| Library | Version | Mean time in nanoseconds (less is better) | +|-----------------------|---------|-------------------------------------------| +| busboy | 0.3.1 | `340114` | +| @fastify/busboy | 1.0.0 | `270984` | + +[Changelog](https://github.com/fastify/busboy/blob/master/CHANGELOG.md) since busboy 0.31. + +Requirements +============ + +* [Node.js](http://nodejs.org/) 10+ + + +Install +======= + + npm i @fastify/busboy + + +Examples +======== + +* Parsing (multipart) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); +const Busboy = require('busboy'); + +http.createServer((req, res) => { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', (fieldname, file, filename, encoding, mimetype) => { + console.log(`File [${fieldname}]: filename: ${filename}, encoding: ${encoding}, mimetype: ${mimetype}`); + file.on('data', data => { + console.log(`File [${fieldname}] got ${data.length} bytes`); + }); + file.on('end', () => { + console.log(`File [${fieldname}] Finished`); + }); + }); + busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + console.log(`Field [${fieldname}]: value: ${inspect(val)}`); + }); + busboy.on('finish', () => { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end(` +
+
+
+ +
+ `); + } +}).listen(8000, () => { + console.log('Listening for requests'); +}); + +// Example output, using http://nodejs.org/images/ryan-speaker.jpg as the file: +// +// Listening for requests +// File [filefield]: filename: ryan-speaker.jpg, encoding: binary +// File [filefield] got 11971 bytes +// Field [textfield]: value: 'testing! :-)' +// File [filefield] Finished +// Done parsing form! +``` + +* Save all incoming files to disk: + +```javascript +const http = require('node:http'); +const path = require('node:path'); +const os = require('node:os'); +const fs = require('node:fs'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + var saveTo = path.join(os.tmpdir(), path.basename(fieldname)); + file.pipe(fs.createWriteStream(saveTo)); + }); + busboy.on('finish', function() { + res.writeHead(200, { 'Connection': 'close' }); + res.end("That's all folks!"); + }); + return req.pipe(busboy); + } + res.writeHead(404); + res.end(); +}).listen(8000, function() { + console.log('Listening for requests'); +}); +``` + +* Parsing (urlencoded) with default options: + +```javascript +const http = require('node:http'); +const { inspect } = require('node:util'); + +const Busboy = require('busboy'); + +http.createServer(function(req, res) { + if (req.method === 'POST') { + const busboy = new Busboy({ headers: req.headers }); + busboy.on('file', function(fieldname, file, filename, encoding, mimetype) { + console.log('File [' + fieldname + ']: filename: ' + filename); + file.on('data', function(data) { + console.log('File [' + fieldname + '] got ' + data.length + ' bytes'); + }); + file.on('end', function() { + console.log('File [' + fieldname + '] Finished'); + }); + }); + busboy.on('field', function(fieldname, val, fieldnameTruncated, valTruncated) { + console.log('Field [' + fieldname + ']: value: ' + inspect(val)); + }); + busboy.on('finish', function() { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(busboy); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end('\ +
\ +
\ +
\ + Node.js rules!
\ + \ +
\ + '); + } +}).listen(8000, function() { + console.log('Listening for requests'); +}); + +// Example output: +// +// Listening for requests +// Field [textfield]: value: 'testing! :-)' +// Field [selectfield]: value: '9001' +// Field [checkfield]: value: 'on' +// Done parsing form! +``` + + +API +=== + +_Busboy_ is a _Writable_ stream + +Busboy (special) events +----------------------- + +* **file**(< _string_ >fieldname, < _ReadableStream_ >stream, < _string_ >filename, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new file form field found. `transferEncoding` contains the 'Content-Transfer-Encoding' value for the file stream. `mimeType` contains the 'Content-Type' value for the file stream. + * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically and safely discarded (these discarded files do still count towards `files` and `parts` limits). + * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens. + * The property `bytesRead` informs about the number of bytes that have been read so far. + +* **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found. + +* **partsLimit**() - Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted. + +* **filesLimit**() - Emitted when specified `files` limit has been reached. No more 'file' events will be emitted. + +* **fieldsLimit**() - Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted. + + +Busboy methods +-------------- + +* **(constructor)**(< _object_ >config) - Creates and returns a new Busboy instance. + + * The constructor takes the following valid `config` settings: + + * **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers. + + * **autoDestroy** - _boolean_ - Whether this stream should automatically call .destroy() on itself after ending. (Default: false). + + * **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default). + + * **fileHwm** - _integer_ - highWaterMark to use for file streams (Default: ReadableStream default). + + * **defCharset** - _string_ - Default character set to use when one isn't defined (Default: 'utf8'). + + * **preservePath** - _boolean_ - If paths in the multipart 'filename' field shall be preserved. (Default: false). + + * **isPartAFile** - __function__ - Use this function to override the default file detection functionality. It has following parameters: + + * fieldName - __string__ The name of the field. + + * contentType - __string__ The content-type of the part, e.g. `text/plain`, `image/jpeg`, `application/octet-stream` + + * fileName - __string__ The name of a file supplied by the part. + + (Default: `(fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)`) + + * **limits** - _object_ - Various limits on incoming data. Valid properties are: + + * **fieldNameSize** - _integer_ - Max field name size (in bytes) (Default: 100 bytes). + + * **fieldSize** - _integer_ - Max field value size (in bytes) (Default: 1 MiB, which is 1024 x 1024 bytes). + + * **fields** - _integer_ - Max number of non-file fields (Default: Infinity). + + * **fileSize** - _integer_ - For multipart forms, the max file size (in bytes) (Default: Infinity). + + * **files** - _integer_ - For multipart forms, the max number of file fields (Default: Infinity). + + * **parts** - _integer_ - For multipart forms, the max number of parts (fields + files) (Default: Infinity). + + * **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000 + + * **headerSize** - _integer_ - For multipart forms, the max size of a multipart header **Default:** 81920. + + * The constructor can throw errors: + + * **Busboy expected an options-Object.** - Busboy expected an Object as first parameters. + + * **Busboy expected an options-Object with headers-attribute.** - The first parameter is lacking of a headers-attribute. + + * **Limit $limit is not a valid number** - Busboy expected the desired limit to be of type number. Busboy throws this Error to prevent a potential security issue by falling silently back to the Busboy-defaults. Potential source for this Error can be the direct use of environment variables without transforming them to the type number. + + * **Unsupported Content-Type.** - The `Content-Type` isn't one Busboy can parse. + + * **Missing Content-Type-header.** - The provided headers don't include `Content-Type` at all. diff --git a/fastify-busboy/bench/busboy-form-bench-latin1.js b/fastify-busboy/bench/busboy-form-bench-latin1.js new file mode 100644 index 0000000..33634ad --- /dev/null +++ b/fastify-busboy/bench/busboy-form-bench-latin1.js @@ -0,0 +1,32 @@ +'use strict' + +const Busboy = require('busboy'); +const { createMultipartBufferForEncodingBench } = require("./createMultipartBufferForEncodingBench"); + + for (var i = 0, il = 10000; i < il; i++) { // eslint-disable-line no-var + const boundary = '-----------------------------168072824752491622650073', + busboy = new Busboy({ + headers: { + 'content-type': 'multipart/form-data; boundary=' + boundary + } + }), + buffer = createMultipartBufferForEncodingBench(boundary, 100, 'iso-8859-1'), + mb = buffer.length / 1048576; + + let processedData = 0; + busboy.on('file', (field, file, filename, encoding, mimetype) => { + file.resume() + }) + + busboy.on('error', function (err) { + }) + busboy.on('finish', function () { + }) + + const start = +new Date(); + const result = busboy.write(buffer, () => { }); + busboy.end(); + const duration = +new Date - start; + const mbPerSec = (mb / (duration / 1000)).toFixed(2); + console.log(mbPerSec + ' mb/sec'); + } \ No newline at end of file diff --git a/fastify-busboy/bench/busboy-form-bench-utf8.js b/fastify-busboy/bench/busboy-form-bench-utf8.js new file mode 100644 index 0000000..1e6e0b7 --- /dev/null +++ b/fastify-busboy/bench/busboy-form-bench-utf8.js @@ -0,0 +1,32 @@ +'use strict' + +const Busboy = require('busboy'); +const { createMultipartBufferForEncodingBench } = require("./createMultipartBufferForEncodingBench"); + + for (var i = 0, il = 10000; i < il; i++) { // eslint-disable-line no-var + const boundary = '-----------------------------168072824752491622650073', + busboy = new Busboy({ + headers: { + 'content-type': 'multipart/form-data; boundary=' + boundary + } + }), + buffer = createMultipartBufferForEncodingBench(boundary, 100, 'utf-8'), + mb = buffer.length / 1048576; + + let processedData = 0; + busboy.on('file', (field, file, filename, encoding, mimetype) => { + file.resume() + }) + + busboy.on('error', function (err) { + }) + busboy.on('finish', function () { + }) + + const start = +new Date(); + const result = busboy.write(buffer, () => { }); + busboy.end(); + const duration = +new Date - start; + const mbPerSec = (mb / (duration / 1000)).toFixed(2); + console.log(mbPerSec + ' mb/sec'); + } \ No newline at end of file diff --git a/fastify-busboy/bench/createMultipartBufferForEncodingBench.js b/fastify-busboy/bench/createMultipartBufferForEncodingBench.js new file mode 100644 index 0000000..9d20f8f --- /dev/null +++ b/fastify-busboy/bench/createMultipartBufferForEncodingBench.js @@ -0,0 +1,23 @@ +'use strict' + +function createMultipartBufferForEncodingBench(boundary, amount, charset) { + const filename = charset === 'utf-8' ? 'utf-8\'\'%c2%a3%20and%20%e2%82%ac%20rates' : `${charset}\'en\'%A3%20rates`; + const head = '--' + boundary + '\r\n' + + 'content-disposition: form-data; name="field1"\r\n' + + 'content-type: text/plain;charset=' + charset + '; filename*=' + filename + '\r\n' + + '\r\n', tail = '\r\n--' + boundary + '--\r\n', buffer = Buffer.concat([Buffer.from(head), Buffer.from(` +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus pretium leo ex, vitae dignissim felis viverra non. Praesent id quam ac elit tincidunt porttitor sed eget magna. Vivamus nibh ipsum, malesuada in eros sit amet, rutrum mattis leo. Ut nunc justo, ornare a finibus in, consectetur euismod sapien. Praesent facilisis, odio consectetur facilisis varius, tellus justo tristique sapien, non porttitor eros massa quis nibh. Nam blandit orci ac efficitur cursus. Nunc non mollis sapien, sit amet pretium odio. Nam vestibulum lectus ac orci egestas aliquet. Duis nec nibh quis augue consequat vulputate a a dui. + +Aenean nec laoreet dolor, commodo aliquam leo. Quisque at placerat sem. In scelerisque cursus dolor, ac aliquam metus malesuada in. Vestibulum lacinia dolor purus, at convallis ipsum iaculis id. Integer bibendum sem neque, at bibendum enim lobortis eu. Cras pretium arcu eget congue cursus. Curabitur blandit ultricies mollis. Sed lacinia quis felis ut fringilla. + +Nulla vitae lobortis metus. Morbi gravida risus tortor, in pulvinar massa lobortis vitae. Etiam vitae massa libero. Sed id tincidunt elit. Quisque congue felis vel aliquam varius. Sed a massa vitae lectus vehicula lacinia vitae ac justo. In commodo sodales nisi finibus vulputate. Suspendisse viverra, est eget fringilla gravida, nulla justo vulputate lorem, at eleifend nisi urna a eros. Sed sit amet ipsum vehicula, venenatis urna ac, interdum felis. + +Cras semper mi magna, nec iaculis neque rhoncus at. In sit amet odio sed libero fringilla commodo. Sed hendrerit pulvinar turpis sed porta. Pellentesque consequat scelerisque sapien nec iaculis. Aenean sed nunc a purus laoreet efficitur id eu orci. Mauris tincidunt auctor congue. Aliquam nisi ligula, facilisis a molestie sed, luctus vitae mauris. Mauris at facilisis elit. Maecenas sodales pretium nisi in sodales. Cras nec blandit enim. Praesent in lacus et nibh varius suscipit in sit amet nibh. + +Nam hendrerit justo eu lectus molestie, sit amet fringilla ipsum semper. Maecenas sit amet nunc elementum, interdum nunc eu, euismod ipsum. Vestibulum ut mauris sapien. Praesent nec felis ex. Fusce vel leo lobortis, mattis sem a, ullamcorper dolor. Aliquam erat volutpat. Fusce feugiat odio ut feugiat volutpat. Vestibulum magna ante, tempor in volutpat ut, gravida vitae justo. Praesent vitae eleifend eros. Integer feugiat molestie dolor, et pretium enim accumsan sit amet. Sed quis suscipit dui. Integer gravida dolor elit, sit amet fringilla odio commodo at. Quisque ut eleifend risus. Nunc mollis velit quis lectus laoreet pellentesque.\r\n\r\n`)]); + + const buffers = new Array(amount).fill(buffer); + buffers.push(Buffer.from(tail)); + return Buffer.concat(buffers); +} +exports.createMultipartBufferForEncodingBench = createMultipartBufferForEncodingBench; diff --git a/fastify-busboy/bench/dicer/dicer-bench-multipart-parser.js b/fastify-busboy/bench/dicer/dicer-bench-multipart-parser.js new file mode 100644 index 0000000..d24f599 --- /dev/null +++ b/fastify-busboy/bench/dicer/dicer-bench-multipart-parser.js @@ -0,0 +1,60 @@ +'use strict' + +const Dicer = require('../../deps/dicer/lib/Dicer') + +function createMultipartBuffer(boundary, size) { + const head = + '--' + boundary + '\r\n' + + 'content-disposition: form-data; name="field1"\r\n' + + '\r\n' + , tail = '\r\n--' + boundary + '--\r\n' + , buffer = Buffer.allocUnsafe(size); + + buffer.write(head, 0, 'ascii'); + buffer.write(tail, buffer.length - tail.length, 'ascii'); + return buffer; +} + +for (var i = 0, il = 10; i < il; i++) { // eslint-disable-line no-var + const boundary = '-----------------------------168072824752491622650073', + d = new Dicer({ boundary: boundary }), + mb = 100, + buffer = createMultipartBuffer(boundary, mb * 1024 * 1024), + callbacks = + { + partBegin: -1, + partEnd: -1, + headerField: -1, + headerValue: -1, + partData: -1, + end: -1, + }; + + + d.on('part', function (p) { + callbacks.partBegin++; + p.on('header', function (header) { + /*for (var h in header) + console.log('Part header: k: ' + inspect(h) + ', v: ' + inspect(header[h]));*/ + }); + p.on('data', function (data) { + callbacks.partData++; + //console.log('Part data: ' + inspect(data.toString())); + }); + p.on('end', function () { + //console.log('End of part\n'); + callbacks.partEnd++; + }); + }); + d.on('end', function () { + //console.log('End of parts'); + callbacks.end++; + }); + + const start = +new Date(); + d.write(buffer); + const duration = +new Date - start; + const mbPerSec = (mb / (duration / 1000)).toFixed(2); + + console.log(mbPerSec + ' mb/sec'); +} diff --git a/fastify-busboy/bench/dicer/formidable-bench-multipart-parser.js b/fastify-busboy/bench/dicer/formidable-bench-multipart-parser.js new file mode 100644 index 0000000..0470771 --- /dev/null +++ b/fastify-busboy/bench/dicer/formidable-bench-multipart-parser.js @@ -0,0 +1,71 @@ +'use strict' + +require('../node_modules/formidable/test/common'); +var multipartParser = require('../node_modules/formidable/lib/multipart_parser'), + MultipartParser = multipartParser.MultipartParser, + parser = new MultipartParser(), + boundary = '-----------------------------168072824752491622650073', + mb = 100, + buffer = createMultipartBuffer(boundary, mb * 1024 * 1024), + callbacks = + { partBegin: -1, + partEnd: -1, + headerField: -1, + headerValue: -1, + partData: -1, + end: -1, + }; + + +parser.initWithBoundary(boundary); +parser.onHeaderField = function() { + callbacks.headerField++; +}; + +parser.onHeaderValue = function() { + callbacks.headerValue++; +}; + +parser.onPartBegin = function() { + callbacks.partBegin++; +}; + +parser.onPartData = function() { + callbacks.partData++; +}; + +parser.onPartEnd = function() { + callbacks.partEnd++; +}; + +parser.onEnd = function() { + callbacks.end++; +}; + +var start = +new Date(), + nparsed = parser.write(buffer), + duration = +new Date - start, + mbPerSec = (mb / (duration / 1000)).toFixed(2); + +console.log(mbPerSec+' mb/sec'); + +//assert.equal(nparsed, buffer.length); + +function createMultipartBuffer(boundary, size) { + var head = + '--'+boundary+'\r\n' + + 'content-disposition: form-data; name="field1"\r\n' + + '\r\n' + , tail = '\r\n--'+boundary+'--\r\n' + , buffer = Buffer.allocUnsafe(size); + + buffer.write(head, 'ascii', 0); + buffer.write(tail, 'ascii', buffer.length - tail.length); + return buffer; +} + +process.on('exit', function() { + /*for (var k in callbacks) { + assert.equal(0, callbacks[k], k+' count off by '+callbacks[k]); + }*/ +}); diff --git a/fastify-busboy/bench/dicer/multipartser-bench-multipart-parser.js b/fastify-busboy/bench/dicer/multipartser-bench-multipart-parser.js new file mode 100644 index 0000000..40ca00b --- /dev/null +++ b/fastify-busboy/bench/dicer/multipartser-bench-multipart-parser.js @@ -0,0 +1,57 @@ +'use strict' + +var multipartser = require('multipartser'), + boundary = '-----------------------------168072824752491622650073', + parser = multipartser(), + mb = 100, + buffer = createMultipartBuffer(boundary, mb * 1024 * 1024), + callbacks = + { partBegin: -1, + partEnd: -1, + headerField: -1, + headerValue: -1, + partData: -1, + end: -1, + }; + +parser.boundary( boundary ); + +parser.on( 'part', function ( part ) { +}); + +parser.on( 'end', function () { + //console.log( 'completed parsing' ); +}); + +parser.on( 'error', function ( error ) { + console.error( error ); +}); + +var start = +new Date(), + nparsed = parser.data(buffer), + nend = parser.end(), + duration = +new Date - start, + mbPerSec = (mb / (duration / 1000)).toFixed(2); + +console.log(mbPerSec+' mb/sec'); + +//assert.equal(nparsed, buffer.length); + +function createMultipartBuffer(boundary, size) { + var head = + '--'+boundary+'\r\n' + + 'content-disposition: form-data; name="field1"\r\n' + + '\r\n' + , tail = '\r\n--'+boundary+'--\r\n' + , buffer = Buffer.allocUnsafe(size); + + buffer.write(head, 'ascii', 0); + buffer.write(tail, 'ascii', buffer.length - tail.length); + return buffer; +} + +process.on('exit', function() { + /*for (var k in callbacks) { + assert.equal(0, callbacks[k], k+' count off by '+callbacks[k]); + }*/ +}); diff --git a/fastify-busboy/bench/dicer/multiparty-bench-multipart-parser.js b/fastify-busboy/bench/dicer/multiparty-bench-multipart-parser.js new file mode 100644 index 0000000..ab79ec0 --- /dev/null +++ b/fastify-busboy/bench/dicer/multiparty-bench-multipart-parser.js @@ -0,0 +1,78 @@ +'use strict' + +var assert = require('node:assert'), + Form = require('multiparty').Form, + boundary = '-----------------------------168072824752491622650073', + mb = 100, + buffer = createMultipartBuffer(boundary, mb * 1024 * 1024), + callbacks = + { partBegin: -1, + partEnd: -1, + headerField: -1, + headerValue: -1, + partData: -1, + end: -1, + }; + +var form = new Form({ boundary: boundary }); + +hijack('onParseHeaderField', function() { + callbacks.headerField++; +}); + +hijack('onParseHeaderValue', function() { + callbacks.headerValue++; +}); + +hijack('onParsePartBegin', function() { + callbacks.partBegin++; +}); + +hijack('onParsePartData', function() { + callbacks.partData++; +}); + +hijack('onParsePartEnd', function() { + callbacks.partEnd++; +}); + +form.on('finish', function() { + callbacks.end++; +}); + +var start = new Date(); +form.write(buffer, function(err) { + var duration = new Date() - start; + assert.ifError(err); + var mbPerSec = (mb / (duration / 1000)).toFixed(2); + console.log(mbPerSec+' mb/sec'); +}); + +//assert.equal(nparsed, buffer.length); + +function createMultipartBuffer(boundary, size) { + var head = + '--'+boundary+'\r\n' + + 'content-disposition: form-data; name="field1"\r\n' + + '\r\n' + , tail = '\r\n--'+boundary+'--\r\n' + , buffer = Buffer.allocUnsafe(size); + + buffer.write(head, 'ascii', 0); + buffer.write(tail, 'ascii', buffer.length - tail.length); + return buffer; +} + +process.on('exit', function() { + /*for (var k in callbacks) { + assert.equal(0, callbacks[k], k+' count off by '+callbacks[k]); + }*/ +}); + +function hijack(name, fn) { + var oldFn = form[name]; + form[name] = function() { + fn(); + return oldFn.apply(this, arguments); + }; +} diff --git a/fastify-busboy/bench/dicer/parted-bench-multipart-parser.js b/fastify-busboy/bench/dicer/parted-bench-multipart-parser.js new file mode 100644 index 0000000..e0a4670 --- /dev/null +++ b/fastify-busboy/bench/dicer/parted-bench-multipart-parser.js @@ -0,0 +1,65 @@ +'use strict' + +// A special, edited version of the multipart parser from parted is needed here +// because otherwise it attempts to do some things above and beyond just parsing +// -- like saving to disk and whatnot + +var assert = require('node:assert'); +var Parser = require('./parted-multipart'), + boundary = '-----------------------------168072824752491622650073', + parser = new Parser('boundary=' + boundary), + mb = 100, + buffer = createMultipartBuffer(boundary, mb * 1024 * 1024), + callbacks = + { partBegin: -1, + partEnd: -1, + headerField: -1, + headerValue: -1, + partData: -1, + end: -1, + }; + + +parser.on('header', function() { + //callbacks.headerField++; +}); + +parser.on('data', function() { + //callbacks.partBegin++; +}); + +parser.on('part', function() { + +}); + +parser.on('end', function() { + //callbacks.end++; +}); + +var start = +new Date(), + nparsed = parser.write(buffer), + duration = +new Date - start, + mbPerSec = (mb / (duration / 1000)).toFixed(2); + +console.log(mbPerSec+' mb/sec'); + +//assert.equal(nparsed, buffer.length); + +function createMultipartBuffer(boundary, size) { + var head = + '--'+boundary+'\r\n' + + 'content-disposition: form-data; name="field1"\r\n' + + '\r\n' + , tail = '\r\n--'+boundary+'--\r\n' + , buffer = Buffer.allocUnsafe(size); + + buffer.write(head, 'ascii', 0); + buffer.write(tail, 'ascii', buffer.length - tail.length); + return buffer; +} + +process.on('exit', function() { + /*for (var k in callbacks) { + assert.equal(0, callbacks[k], k+' count off by '+callbacks[k]); + }*/ +}); diff --git a/fastify-busboy/bench/dicer/parted-multipart.js b/fastify-busboy/bench/dicer/parted-multipart.js new file mode 100644 index 0000000..f214ff4 --- /dev/null +++ b/fastify-busboy/bench/dicer/parted-multipart.js @@ -0,0 +1,486 @@ +'use strict' + +/** + * Parted (https://github.com/chjj/parted) + * A streaming multipart state parser. + * Copyright (c) 2011, Christopher Jeffrey. (MIT Licensed) + */ + +var fs = require('node:fs') + , path = require('node:path') + , EventEmitter = require('node:events').EventEmitter + , StringDecoder = require('node:string_decoder').StringDecoder + , set = require('qs').set + , each = Array.prototype.forEach; + +/** + * Character Constants + */ + +var DASH = '-'.charCodeAt(0) + , CR = '\r'.charCodeAt(0) + , LF = '\n'.charCodeAt(0) + , COLON = ':'.charCodeAt(0) + , SPACE = ' '.charCodeAt(0); + +/** + * Parser + */ + +var Parser = function(type, options) { + if (!(this instanceof Parser)) { + return new Parser(type, options); + } + + EventEmitter.call(this); + + this.writable = true; + this.readable = true; + + this.options = options || {}; + + var key = grab(type, 'boundary'); + if (!key) { + return this._error('No boundary key found.'); + } + + this.key = Buffer.allocUnsafe('\r\n--' + key); + + this._key = {}; + each.call(this.key, function(ch) { + this._key[ch] = true; + }, this); + + this.state = 'start'; + this.pending = 0; + this.written = 0; + this.writtenDisk = 0; + this.buff = Buffer.allocUnsafe(200); + + this.preamble = true; + this.epilogue = false; + + this._reset(); +}; + +Parser.prototype.__proto__ = EventEmitter.prototype; + +/** + * Parsing + */ + +Parser.prototype.write = function(data) { + if (!this.writable + || this.epilogue) return; + + try { + this._parse(data); + } catch (e) { + this._error(e); + } + + return true; +}; + +Parser.prototype.end = function(data) { + if (!this.writable) return; + + if (data) this.write(data); + + if (!this.epilogue) { + return this._error('Message underflow.'); + } + + return true; +}; + +Parser.prototype._parse = function(data) { + var i = 0 + , len = data.length + , buff = this.buff + , key = this.key + , ch + , val + , j; + + for (; i < len; i++) { + if (this.pos >= 200) { + return this._error('Potential buffer overflow.'); + } + + ch = data[i]; + + switch (this.state) { + case 'start': + switch (ch) { + case DASH: + this.pos = 3; + this.state = 'key'; + break; + default: + break; + } + break; + case 'key': + if (this.pos === key.length) { + this.state = 'key_end'; + i--; + } else if (ch !== key[this.pos]) { + if (this.preamble) { + this.state = 'start'; + i--; + } else { + this.state = 'body'; + val = this.pos - i; + if (val > 0) { + this._write(key.slice(0, val)); + } + i--; + } + } else { + this.pos++; + } + break; + case 'key_end': + switch (ch) { + case CR: + this.state = 'key_line_end'; + break; + case DASH: + this.state = 'key_dash_end'; + break; + default: + return this._error('Expected CR or DASH.'); + } + break; + case 'key_line_end': + switch (ch) { + case LF: + if (this.preamble) { + this.preamble = false; + } else { + this._finish(); + } + this.state = 'header_name'; + this.pos = 0; + break; + default: + return this._error('Expected CR.'); + } + break; + case 'key_dash_end': + switch (ch) { + case DASH: + this.epilogue = true; + this._finish(); + return; + default: + return this._error('Expected DASH.'); + } + case 'header_name': + switch (ch) { + case COLON: + this.header = buff.toString('ascii', 0, this.pos); + this.pos = 0; + this.state = 'header_val'; + break; + default: + buff[this.pos++] = ch | 32; + break; + } + break; + case 'header_val': + switch (ch) { + case CR: + this.state = 'header_val_end'; + break; + case SPACE: + if (this.pos === 0) { + break; + } + // FALL-THROUGH + default: + buff[this.pos++] = ch; + break; + } + break; + case 'header_val_end': + switch (ch) { + case LF: + val = buff.toString('ascii', 0, this.pos); + this._header(this.header, val); + this.pos = 0; + this.state = 'header_end'; + break; + default: + return this._error('Expected LF.'); + } + break; + case 'header_end': + switch (ch) { + case CR: + this.state = 'head_end'; + break; + default: + this.state = 'header_name'; + i--; + break; + } + break; + case 'head_end': + switch (ch) { + case LF: + this.state = 'body'; + i++; + if (i >= len) return; + data = data.slice(i); + i = -1; + len = data.length; + break; + default: + return this._error('Expected LF.'); + } + break; + case 'body': + switch (ch) { + case CR: + if (i > 0) { + this._write(data.slice(0, i)); + } + this.pos = 1; + this.state = 'key'; + data = data.slice(i); + i = 0; + len = data.length; + break; + default: + // boyer-moore-like algorithm + // at felixge's suggestion + while ((j = i + key.length - 1) < len) { + if (this._key[data[j]]) break; + i = j; + } + break; + } + break; + } + } + + if (this.state === 'body') { + this._write(data); + } +}; + +Parser.prototype._header = function(name, val) { + /*if (name === 'content-disposition') { + this.field = grab(val, 'name'); + this.file = grab(val, 'filename'); + + if (this.file) { + this.data = stream(this.file, this.options.path); + } else { + this.decode = new StringDecoder('utf8'); + this.data = ''; + } + }*/ + + return this.emit('header', name, val); +}; + +Parser.prototype._write = function(data) { + /*if (this.data == null) { + return this._error('No disposition.'); + } + + if (this.file) { + this.data.write(data); + this.writtenDisk += data.length; + } else { + this.data += this.decode.write(data); + this.written += data.length; + }*/ + + this.emit('data', data); +}; + +Parser.prototype._reset = function() { + this.pos = 0; + this.decode = null; + this.field = null; + this.data = null; + this.file = null; + this.header = null; +}; + +Parser.prototype._error = function(err) { + this.destroy(); + this.emit('error', typeof err === 'string' + ? new Error(err) + : err); +}; + +Parser.prototype.destroy = function(err) { + this.writable = false; + this.readable = false; + this._reset(); +}; + +Parser.prototype._finish = function() { + var self = this + , field = this.field + , data = this.data + , file = this.file + , part; + + this.pending++; + + this._reset(); + + if (data && data.path) { + part = data.path; + data.end(next); + } else { + part = data; + next(); + } + + function next() { + if (!self.readable) return; + + self.pending--; + + self.emit('part', field, part); + + if (data && data.path) { + self.emit('file', field, part, file); + } + + if (self.epilogue && !self.pending) { + self.emit('end'); + self.destroy(); + } + } +}; + +/** + * Uploads + */ + +Parser.root = process.platform === 'win32' + ? 'C:/Temp' + : '/tmp'; + +/** + * Middleware + */ + +Parser.middleware = function(options) { + options = options || {}; + return function(req, res, next) { + if (options.ensureBody) { + req.body = {}; + } + + if (req.method === 'GET' + || req.method === 'HEAD' + || req._multipart) return next(); + + req._multipart = true; + + var type = req.headers['content-type']; + + if (type) type = type.split(';', 1)[0].trim().toLowerCase(); + + if (type === 'multipart/form-data') { + Parser.handle(req, res, next, options); + } else { + next(); + } + }; +}; + +/** + * Handler + */ + +Parser.handle = function(req, res, next, options) { + var parser = new Parser(req.headers['content-type'], options) + , diskLimit = options.diskLimit + , limit = options.limit + , parts = {} + , files = {}; + + parser.on('error', function(err) { + req.destroy(); + next(err); + }); + + parser.on('part', function(field, part) { + set(parts, field, part); + }); + + parser.on('file', function(field, path, name) { + set(files, field, { + path: path, + name: name, + toString: function() { + return path; + } + }); + }); + + parser.on('data', function() { + if (this.writtenDisk > diskLimit || this.written > limit) { + this.emit('error', new Error('Overflow.')); + this.destroy(); + } + }); + + parser.on('end', next); + + req.body = parts; + req.files = files; + req.pipe(parser); +}; + +/** + * Helpers + */ + +var isWindows = process.platform === 'win32'; + +var stream = function(name, dir) { + var ext = path.extname(name) || '' + , name = path.basename(name, ext) || '' + , dir = dir || Parser.root + , tag; + + tag = Math.random().toString(36).substring(2); + + name = name.substring(0, 200) + '.' + tag; + name = path.join(dir, name) + ext.substring(0, 6); + name = name.replace(/\0/g, ''); + + if (isWindows) { + name = name.replace(/[:*<>|"?]/g, ''); + } + + return fs.createWriteStream(name); +}; + +var grab = function(str, name) { + if (!str) return; + + var rx = new RegExp('\\b' + name + '\\s*=\\s*("[^"]+"|\'[^\']+\'|[^;,]+)', 'i') + , cap = rx.exec(str); + + if (cap) { + return cap[1].trim().replace(/^['"]|['"]$/g, ''); + } +}; + +/** + * Expose + */ + +module.exports = Parser; diff --git a/fastify-busboy/bench/fastify-busboy-form-bench-latin1.js b/fastify-busboy/bench/fastify-busboy-form-bench-latin1.js new file mode 100644 index 0000000..7ca5f44 --- /dev/null +++ b/fastify-busboy/bench/fastify-busboy-form-bench-latin1.js @@ -0,0 +1,31 @@ +'use strict' + +const Busboy = require('../lib/main'); +const { createMultipartBufferForEncodingBench } = require("./createMultipartBufferForEncodingBench"); + + for (var i = 0, il = 10000; i < il; i++) { // eslint-disable-line no-var + const boundary = '-----------------------------168072824752491622650073', + busboy = new Busboy({ + headers: { + 'content-type': 'multipart/form-data; boundary=' + boundary + } + }), + buffer = createMultipartBufferForEncodingBench(boundary, 100, 'iso-8859-1'), + mb = buffer.length / 1048576; + + busboy.on('file', (field, file, filename, encoding, mimetype) => { + file.resume() + }) + + busboy.on('error', function (err) { + }) + busboy.on('finish', function () { + }) + + const start = +new Date(); + busboy.write(buffer, () => { }); + busboy.end(); + const duration = +new Date - start; + const mbPerSec = (mb / (duration / 1000)).toFixed(2); + console.log(mbPerSec + ' mb/sec'); + } diff --git a/fastify-busboy/bench/fastify-busboy-form-bench-utf8.js b/fastify-busboy/bench/fastify-busboy-form-bench-utf8.js new file mode 100644 index 0000000..6c35071 --- /dev/null +++ b/fastify-busboy/bench/fastify-busboy-form-bench-utf8.js @@ -0,0 +1,31 @@ +'use strict' + +const Busboy = require('../lib/main'); +const { createMultipartBufferForEncodingBench } = require("./createMultipartBufferForEncodingBench"); + + for (var i = 0, il = 10000; i < il; i++) { // eslint-disable-line no-var + const boundary = '-----------------------------168072824752491622650073', + busboy = new Busboy({ + headers: { + 'content-type': 'multipart/form-data; boundary=' + boundary + } + }), + buffer = createMultipartBufferForEncodingBench(boundary, 100, 'utf-8'), + mb = buffer.length / 1048576; + + busboy.on('file', (field, file, filename, encoding, mimetype) => { + file.resume() + }) + + busboy.on('error', function (err) { + }) + busboy.on('finish', function () { + }) + + const start = +new Date(); + busboy.write(buffer, () => { }); + busboy.end(); + const duration = +new Date - start; + const mbPerSec = (mb / (duration / 1000)).toFixed(2); + console.log(mbPerSec + ' mb/sec'); + } diff --git a/fastify-busboy/bench/parse-params.js b/fastify-busboy/bench/parse-params.js new file mode 100644 index 0000000..439a372 --- /dev/null +++ b/fastify-busboy/bench/parse-params.js @@ -0,0 +1,21 @@ +'use strict' + +const parseParams = require('../lib/utils/parseParams') +const { Bench } = require('tinybench'); +const bench = new Bench(); + +const simple = 'video/ogg' +const complex = "'text/plain; filename*=utf-8''%c2%a3%20and%20%e2%82%ac%20rates'" + +bench + .add(simple, function () { parseParams(simple) }) + .add(complex, function () { parseParams(complex) }) + .run() + .then((tasks) => { + const errors = tasks.map(t => t.result?.error).filter((t) => t) + if (errors.length) { + errors.map((e) => console.error(e)) + } else { + console.table(bench.table()) + } + }) diff --git a/fastify-busboy/benchmarks/_results/Busboy_comparison-busboy-Node_12.json b/fastify-busboy/benchmarks/_results/Busboy_comparison-busboy-Node_12.json new file mode 100644 index 0000000..69468dd --- /dev/null +++ b/fastify-busboy/benchmarks/_results/Busboy_comparison-busboy-Node_12.json @@ -0,0 +1,10 @@ +{ + "runtimeVersion": "12.22.7, V8 7.8.279.23-node.56", + "benchmarkName": "Busboy comparison", + "benchmarkEntryName": "busboy", + "benchmarkCycles": 10, + "benchmarkCycleSamples": 50, + "warmupCycles": 10, + "meanTimeNs": 1945927.3472222222, + "meanTimeMs": 1.9459273472222223 +} \ No newline at end of file diff --git a/fastify-busboy/benchmarks/_results/Busboy_comparison-busboy-Node_16.json b/fastify-busboy/benchmarks/_results/Busboy_comparison-busboy-Node_16.json new file mode 100644 index 0000000..b4c492a --- /dev/null +++ b/fastify-busboy/benchmarks/_results/Busboy_comparison-busboy-Node_16.json @@ -0,0 +1,10 @@ +{ + "runtimeVersion": "16.13.0, V8 9.4.146.19-node.13", + "benchmarkName": "Busboy comparison", + "benchmarkEntryName": "busboy", + "benchmarkCycles": 2000, + "benchmarkCycleSamples": 50, + "warmupCycles": 1000, + "meanTimeNs": 340114.0411908194, + "meanTimeMs": 0.3401140411908194 +} \ No newline at end of file diff --git a/fastify-busboy/benchmarks/_results/Busboy_comparison-fastify-busboy-Node_16.json b/fastify-busboy/benchmarks/_results/Busboy_comparison-fastify-busboy-Node_16.json new file mode 100644 index 0000000..30f5d1e --- /dev/null +++ b/fastify-busboy/benchmarks/_results/Busboy_comparison-fastify-busboy-Node_16.json @@ -0,0 +1,10 @@ +{ + "runtimeVersion": "16.13.0, V8 9.4.146.19-node.13", + "benchmarkName": "Busboy comparison", + "benchmarkEntryName": "fastify-busboy", + "benchmarkCycles": 2000, + "benchmarkCycleSamples": 50, + "warmupCycles": 1000, + "meanTimeNs": 270984.48082281026, + "meanTimeMs": 0.27098448082281024 +} \ No newline at end of file diff --git a/fastify-busboy/benchmarks/busboy/contestants/busboy.js b/fastify-busboy/benchmarks/busboy/contestants/busboy.js new file mode 100644 index 0000000..6cb3414 --- /dev/null +++ b/fastify-busboy/benchmarks/busboy/contestants/busboy.js @@ -0,0 +1,40 @@ +'use strict' + +const Busboy = require('busboy') +const { buffer, boundary } = require('../data') + +function process () { + const busboy = Busboy({ + headers: { + 'content-type': 'multipart/form-data; boundary=' + boundary + } + }) + let processedData = '' + + return new Promise((resolve, reject) => { + busboy.on('file', (field, file, filename, encoding, mimetype) => { + // console.log('read file') + file.on('data', (data) => { + processedData += data.toString() + // console.log(`File [${filename}] got ${data.length} bytes`); + }) + file.on('end', (fieldname) => { + // console.log(`File [${fieldname}] Finished`); + }) + }) + + busboy.on('error', function (err) { + reject(err) + }) + busboy.on('finish', function () { + resolve(processedData) + }) + busboy.write(buffer, () => { }) + + busboy.end() + }) +} + +module.exports = { + process +} diff --git a/fastify-busboy/benchmarks/busboy/contestants/fastify-busboy.js b/fastify-busboy/benchmarks/busboy/contestants/fastify-busboy.js new file mode 100644 index 0000000..6750f77 --- /dev/null +++ b/fastify-busboy/benchmarks/busboy/contestants/fastify-busboy.js @@ -0,0 +1,41 @@ +'use strict' + +const Busboy = require('../../../lib/main') +const { buffer, boundary } = require('../data') + +function process () { + const busboy = new Busboy({ + headers: { + 'content-type': 'multipart/form-data; boundary=' + boundary + } + }) + + let processedData = '' + + return new Promise((resolve, reject) => { + busboy.on('file', (field, file, filename, encoding, mimetype) => { + // console.log('read file') + file.on('data', (data) => { + processedData += data.toString() + // console.log(`File [${filename}] got ${data.length} bytes`); + }) + file.on('end', (fieldname) => { + // console.log(`File [${fieldname}] Finished`); + }) + }) + + busboy.on('error', function (err) { + reject(err) + }) + busboy.on('finish', function () { + resolve(processedData) + }) + busboy.write(buffer, () => { }) + + busboy.end() + }) +} + +module.exports = { + process +} diff --git a/fastify-busboy/benchmarks/busboy/data.js b/fastify-busboy/benchmarks/busboy/data.js new file mode 100644 index 0000000..4fdefae --- /dev/null +++ b/fastify-busboy/benchmarks/busboy/data.js @@ -0,0 +1,34 @@ +'use strict' + +const boundary = '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k' +const randomContent = Buffer.from(makeString(1024 * 500), 'utf8') +const buffer = createMultipartBuffer(boundary) + +function makeString (length) { + let result = '' + const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789' + const charactersLength = characters.length + for (var i = 0; i < length; i++) { // eslint-disable-line no-var + result += characters.charAt(Math.floor(Math.random() * + charactersLength)) + } + return result +} + +function createMultipartBuffer (boundary) { + const payload = [ + '--' + boundary, + 'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + randomContent, + '--' + boundary + '--' + ].join('\r\n') + return Buffer.from(payload, 'ascii') +} + +module.exports = { + boundary, + buffer, + randomContent +} diff --git a/fastify-busboy/benchmarks/busboy/executioner.js b/fastify-busboy/benchmarks/busboy/executioner.js new file mode 100644 index 0000000..524912c --- /dev/null +++ b/fastify-busboy/benchmarks/busboy/executioner.js @@ -0,0 +1,50 @@ +'use strict' + +const { process: processBusboy } = require('./contestants/busboy') +const { process: processFastify } = require('./contestants/fastify-busboy') +const { getCommonBuilder } = require('../common/commonBuilder') +const { validateAccuracy } = require('./validator') +const { resolveContestant } = require('../common/contestantResolver') +const { outputResults } = require('../common/resultUtils') + +const contestants = { + busboy: measureBusboy, + fastify: measureFastify +} + +async function measureBusboy () { + const benchmark = getCommonBuilder() + .benchmarkName('Busboy comparison') + .benchmarkEntryName('busboy') + .asyncFunctionUnderTest(processBusboy) + .build() + const benchmarkResults = await benchmark.executeAsync() + outputResults(benchmark, benchmarkResults) +} + +async function measureFastify () { + const benchmark = getCommonBuilder() + .benchmarkName('Busboy comparison') + .benchmarkEntryName('fastify-busboy') + .asyncFunctionUnderTest(processFastify) + .build() + const benchmarkResults = await benchmark.executeAsync() + outputResults(benchmark, benchmarkResults) +} + +function execute () { + return validateAccuracy(processBusboy()) + .then(() => { + return validateAccuracy(processFastify()) + }) + .then(() => { + const contestant = resolveContestant(contestants) + return contestant() + }).then(() => { + console.log('all done') + }).catch((err) => { + console.error(`Something went wrong: ${err.message}`) + }) +} + +execute() diff --git a/fastify-busboy/benchmarks/busboy/regenerate.cmd b/fastify-busboy/benchmarks/busboy/regenerate.cmd new file mode 100644 index 0000000..87c0768 --- /dev/null +++ b/fastify-busboy/benchmarks/busboy/regenerate.cmd @@ -0,0 +1,17 @@ +rem Make sure to run this in Admin account +rem +call npm run install-node +timeout /t 2 +call nvm use 17.2.0 +timeout /t 2 +call npm run benchmark-all +call nvm use 16.13.1 +timeout /t 2 +call npm run benchmark-all +call nvm use 14.18.2 +timeout /t 2 +call npm run benchmark-all +call nvm use 12.22.7 +timeout /t 2 +call npm run benchmark-all +call npm run combine-results diff --git a/fastify-busboy/benchmarks/busboy/validator.js b/fastify-busboy/benchmarks/busboy/validator.js new file mode 100644 index 0000000..a86cc33 --- /dev/null +++ b/fastify-busboy/benchmarks/busboy/validator.js @@ -0,0 +1,15 @@ +'use strict' + +const { validateEqual } = require('validation-utils') +const { randomContent } = require('./data') + +const EXPECTED_RESULT = randomContent.toString() + +async function validateAccuracy (actualResultPromise) { + const result = await actualResultPromise + validateEqual(result, EXPECTED_RESULT) +} + +module.exports = { + validateAccuracy +} diff --git a/fastify-busboy/benchmarks/common/commonBuilder.js b/fastify-busboy/benchmarks/common/commonBuilder.js new file mode 100644 index 0000000..b5707aa --- /dev/null +++ b/fastify-busboy/benchmarks/common/commonBuilder.js @@ -0,0 +1,46 @@ +'use strict' + +const { validateNotNil } = require('validation-utils') +const { BenchmarkBuilder } = require('photofinish') +const getopts = require('getopts') + +const options = getopts(process.argv.slice(1), { + alias: { + preset: 'p' + }, + default: {} +}) + +const PRESET = { + LOW: (builder) => { + return builder + .warmupCycles(1000) + .benchmarkCycles(1000) + }, + + MEDIUM: (builder) => { + return builder + .warmupCycles(1000) + .benchmarkCycles(2000) + }, + + HIGH: (builder) => { + return builder + .warmupCycles(1000) + .benchmarkCycles(10000) + } +} + +function getCommonBuilder () { + const presetId = options.preset || 'MEDIUM' + const preset = validateNotNil(PRESET[presetId.toUpperCase()], `Unknown preset: ${presetId}`) + + const builder = new BenchmarkBuilder() + preset(builder) + return builder + .benchmarkCycleSamples(50) +} + +module.exports = { + getCommonBuilder +} diff --git a/fastify-busboy/benchmarks/common/contestantResolver.js b/fastify-busboy/benchmarks/common/contestantResolver.js new file mode 100644 index 0000000..7cfc90e --- /dev/null +++ b/fastify-busboy/benchmarks/common/contestantResolver.js @@ -0,0 +1,26 @@ +'use strict' + +const getopts = require('getopts') + +const options = getopts(process.argv.slice(1), { + alias: { + contestant: 'c' + }, + default: {} +}) + +function resolveContestant (contestants) { + const contestantId = options.contestant + const contestant = Number.isFinite(contestantId) + ? Object.values(contestants)[contestantId] + : contestants[contestantId] + + if (!contestant) { + throw new Error(`Unknown contestant ${contestantId}`) + } + return contestant +} + +module.exports = { + resolveContestant +} diff --git a/fastify-busboy/benchmarks/common/executionUtils.js b/fastify-busboy/benchmarks/common/executionUtils.js new file mode 100644 index 0000000..8c52ec8 --- /dev/null +++ b/fastify-busboy/benchmarks/common/executionUtils.js @@ -0,0 +1,18 @@ +'use strict' + +const { getCommonBuilder } = require('./commonBuilder') +const { outputResults } = require('./resultUtils') + +function getMeasureFn (constestandId, fn) { + return () => { + const benchmark = getCommonBuilder() + .benchmarkEntryName(constestandId) + .functionUnderTest(fn).build() + const benchmarkResults = benchmark.execute() + outputResults(benchmark, benchmarkResults) + } +} + +module.exports = { + getMeasureFn +} diff --git a/fastify-busboy/benchmarks/common/resultUtils.js b/fastify-busboy/benchmarks/common/resultUtils.js new file mode 100644 index 0000000..ec7bce7 --- /dev/null +++ b/fastify-busboy/benchmarks/common/resultUtils.js @@ -0,0 +1,17 @@ +'use strict' + +const { exportResults } = require('photofinish') + +function outputResults (benchmark, benchmarkResults) { + console.log( + `Mean time for ${ + benchmark.benchmarkEntryName + } is ${benchmarkResults.meanTime.getTimeInNanoSeconds()} nanoseconds` + ) + + exportResults(benchmarkResults, { exportPath: '_results' }) +} + +module.exports = { + outputResults +} diff --git a/fastify-busboy/benchmarks/common/resultsCombinator.js b/fastify-busboy/benchmarks/common/resultsCombinator.js new file mode 100644 index 0000000..253211b --- /dev/null +++ b/fastify-busboy/benchmarks/common/resultsCombinator.js @@ -0,0 +1,54 @@ +'use strict' + +const fs = require('node:fs') +const path = require('node:path') +const getopts = require('getopts') +const systemInformation = require('systeminformation') +const { loadResults } = require('photofinish') + +const options = getopts(process.argv.slice(1), { + alias: { + resultsDir: 'r', + precision: 'p' + }, + default: {} +}) + +const { generateTable } = require('photofinish') + +async function getSpecs () { + const cpuInfo = await systemInformation.cpu() + + return { + cpu: { + brand: cpuInfo.brand, + speed: `${cpuInfo.speed} GHz` + } + } +} + +async function saveTable () { + const baseResultsDir = options.resultsDir + const benchmarkResults = await loadResults(baseResultsDir) + + const table = generateTable(benchmarkResults, { + precision: options.precision, + sortBy: [ + { field: 'meanTimeNs', order: 'asc' } + ] + }) + + const specs = await getSpecs() + + console.log(specs) + console.log(table) + + const targetFilePath = path.resolve(baseResultsDir, 'results.md') + fs.writeFileSync( + targetFilePath, + `${table}` + + `\n\n**Specs**: ${specs.cpu.brand} (${specs.cpu.speed})` + ) +} + +saveTable() diff --git a/fastify-busboy/benchmarks/package.json b/fastify-busboy/benchmarks/package.json new file mode 100644 index 0000000..2574b8b --- /dev/null +++ b/fastify-busboy/benchmarks/package.json @@ -0,0 +1,21 @@ +{ + "name": "busboy-benchmarks", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "getopts": "^2.3.0", + "photofinish": "^1.8.0", + "systeminformation": "^5.9.15", + "tslib": "^2.3.1", + "validation-utils": "^7.0.0" + }, + "scripts": { + "install-node": "nvm install 17.2.0 && nvm install 16.13.1 && nvm install 14.18.2 && nvm install 12.22.7", + "benchmark-busboy": "node busboy/executioner.js -c 0", + "benchmark-fastify": "node busboy/executioner.js -c 1", + "benchmark-all": "npm run benchmark-busboy -- -p high && npm run benchmark-fastify -- -p high", + "benchmark-all-medium": "npm run benchmark-busboy -- -p medium && npm run benchmark-fastify -- -p medium", + "benchmark-all-low": "npm run benchmark-busboy -- -p low && npm run benchmark-fastify -- -p low", + "combine-results": "node common/resultsCombinator.js -r _results -p 6" + } +} diff --git a/fastify-busboy/deps/dicer/LICENSE b/fastify-busboy/deps/dicer/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/fastify-busboy/deps/dicer/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/fastify-busboy/deps/dicer/lib/Dicer.js b/fastify-busboy/deps/dicer/lib/Dicer.js new file mode 100644 index 0000000..79da160 --- /dev/null +++ b/fastify-busboy/deps/dicer/lib/Dicer.js @@ -0,0 +1,207 @@ +'use strict' + +const WritableStream = require('node:stream').Writable +const inherits = require('node:util').inherits + +const StreamSearch = require('../../streamsearch/sbmh') + +const PartStream = require('./PartStream') +const HeaderParser = require('./HeaderParser') + +const DASH = 45 +const B_ONEDASH = Buffer.from('-') +const B_CRLF = Buffer.from('\r\n') +const EMPTY_FN = function () {} + +function Dicer (cfg) { + if (!(this instanceof Dicer)) { return new Dicer(cfg) } + WritableStream.call(this, cfg) + + if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } + + if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } + + this._headerFirst = cfg.headerFirst + + this._dashes = 0 + this._parts = 0 + this._finished = false + this._realFinish = false + this._isPreamble = true + this._justMatched = false + this._firstWrite = true + this._inHeader = true + this._part = undefined + this._cb = undefined + this._ignoreData = false + this._partOpts = { highWaterMark: cfg.partHwm } + this._pause = false + + const self = this + this._hparser = new HeaderParser(cfg) + this._hparser.on('header', function (header) { + self._inHeader = false + self._part.emit('header', header) + }) +} +inherits(Dicer, WritableStream) + +Dicer.prototype.emit = function (ev) { + if (ev === 'finish' && !this._realFinish) { + if (!this._finished) { + const self = this + process.nextTick(function () { + self.emit('error', new Error('Unexpected end of multipart data')) + if (self._part && !self._ignoreData) { + const type = (self._isPreamble ? 'Preamble' : 'Part') + self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) + self._part.push(null) + process.nextTick(function () { + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + return + } + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + } + } else { WritableStream.prototype.emit.apply(this, arguments) } +} + +Dicer.prototype._write = function (data, encoding, cb) { + // ignore unexpected data (e.g. extra trailer data after finished) + if (!this._hparser && !this._bparser) { return cb() } + + if (this._headerFirst && this._isPreamble) { + if (!this._part) { + this._part = new PartStream(this._partOpts) + if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() } + } + const r = this._hparser.push(data) + if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } + } + + // allows for "easier" testing + if (this._firstWrite) { + this._bparser.push(B_CRLF) + this._firstWrite = false + } + + this._bparser.push(data) + + if (this._pause) { this._cb = cb } else { cb() } +} + +Dicer.prototype.reset = function () { + this._part = undefined + this._bparser = undefined + this._hparser = undefined +} + +Dicer.prototype.setBoundary = function (boundary) { + const self = this + this._bparser = new StreamSearch('\r\n--' + boundary) + this._bparser.on('info', function (isMatch, data, start, end) { + self._oninfo(isMatch, data, start, end) + }) +} + +Dicer.prototype._ignore = function () { + if (this._part && !this._ignoreData) { + this._ignoreData = true + this._part.on('error', EMPTY_FN) + // we must perform some kind of read on the stream even though we are + // ignoring the data, otherwise node's Readable stream will not emit 'end' + // after pushing null to the stream + this._part.resume() + } +} + +Dicer.prototype._oninfo = function (isMatch, data, start, end) { + let buf; const self = this; let i = 0; let r; let shouldWriteMore = true + + if (!this._part && this._justMatched && data) { + while (this._dashes < 2 && (start + i) < end) { + if (data[start + i] === DASH) { + ++i + ++this._dashes + } else { + if (this._dashes) { buf = B_ONEDASH } + this._dashes = 0 + break + } + } + if (this._dashes === 2) { + if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) } + this.reset() + this._finished = true + // no more parts will be added + if (self._parts === 0) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } + } + if (this._dashes) { return } + } + if (this._justMatched) { this._justMatched = false } + if (!this._part) { + this._part = new PartStream(this._partOpts) + this._part._read = function (n) { + self._unpause() + } + if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() } + if (!this._isPreamble) { this._inHeader = true } + } + if (data && start < end && !this._ignoreData) { + if (this._isPreamble || !this._inHeader) { + if (buf) { shouldWriteMore = this._part.push(buf) } + shouldWriteMore = this._part.push(data.slice(start, end)) + if (!shouldWriteMore) { this._pause = true } + } else if (!this._isPreamble && this._inHeader) { + if (buf) { this._hparser.push(buf) } + r = this._hparser.push(data.slice(start, end)) + if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } + } + } + if (isMatch) { + this._hparser.reset() + if (this._isPreamble) { this._isPreamble = false } else { + if (start !== end) { + ++this._parts + this._part.on('end', function () { + if (--self._parts === 0) { + if (self._finished) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } else { + self._unpause() + } + } + }) + } + } + this._part.push(null) + this._part = undefined + this._ignoreData = false + this._justMatched = true + this._dashes = 0 + } +} + +Dicer.prototype._unpause = function () { + if (!this._pause) { return } + + this._pause = false + if (this._cb) { + const cb = this._cb + this._cb = undefined + cb() + } +} + +module.exports = Dicer diff --git a/fastify-busboy/deps/dicer/lib/HeaderParser.js b/fastify-busboy/deps/dicer/lib/HeaderParser.js new file mode 100644 index 0000000..65f667b --- /dev/null +++ b/fastify-busboy/deps/dicer/lib/HeaderParser.js @@ -0,0 +1,100 @@ +'use strict' + +const EventEmitter = require('node:events').EventEmitter +const inherits = require('node:util').inherits +const getLimit = require('../../../lib/utils/getLimit') + +const StreamSearch = require('../../streamsearch/sbmh') + +const B_DCRLF = Buffer.from('\r\n\r\n') +const RE_CRLF = /\r\n/g +const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex + +function HeaderParser (cfg) { + EventEmitter.call(this) + + cfg = cfg || {} + const self = this + this.nread = 0 + this.maxed = false + this.npairs = 0 + this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) + this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) + this.buffer = '' + this.header = {} + this.finished = false + this.ss = new StreamSearch(B_DCRLF) + this.ss.on('info', function (isMatch, data, start, end) { + if (data && !self.maxed) { + if (self.nread + end - start >= self.maxHeaderSize) { + end = self.maxHeaderSize - self.nread + start + self.nread = self.maxHeaderSize + self.maxed = true + } else { self.nread += (end - start) } + + self.buffer += data.toString('binary', start, end) + } + if (isMatch) { self._finish() } + }) +} +inherits(HeaderParser, EventEmitter) + +HeaderParser.prototype.push = function (data) { + const r = this.ss.push(data) + if (this.finished) { return r } +} + +HeaderParser.prototype.reset = function () { + this.finished = false + this.buffer = '' + this.header = {} + this.ss.reset() +} + +HeaderParser.prototype._finish = function () { + if (this.buffer) { this._parseHeader() } + this.ss.matches = this.ss.maxMatches + const header = this.header + this.header = {} + this.buffer = '' + this.finished = true + this.nread = this.npairs = 0 + this.maxed = false + this.emit('header', header) +} + +HeaderParser.prototype._parseHeader = function () { + if (this.npairs === this.maxHeaderPairs) { return } + + const lines = this.buffer.split(RE_CRLF) + const len = lines.length + let m, h + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (lines[i].length === 0) { continue } + if (lines[i][0] === '\t' || lines[i][0] === ' ') { + // folded header content + // RFC2822 says to just remove the CRLF and not the whitespace following + // it, so we follow the RFC and include the leading whitespace ... + if (h) { + this.header[h][this.header[h].length - 1] += lines[i] + continue + } + } + + const posColon = lines[i].indexOf(':') + if ( + posColon === -1 || + posColon === 0 + ) { + return + } + m = RE_HDR.exec(lines[i]) + h = m[1].toLowerCase() + this.header[h] = this.header[h] || [] + this.header[h].push((m[2] || '')) + if (++this.npairs === this.maxHeaderPairs) { break } + } +} + +module.exports = HeaderParser diff --git a/fastify-busboy/deps/dicer/lib/PartStream.js b/fastify-busboy/deps/dicer/lib/PartStream.js new file mode 100644 index 0000000..c91da1c --- /dev/null +++ b/fastify-busboy/deps/dicer/lib/PartStream.js @@ -0,0 +1,13 @@ +'use strict' + +const inherits = require('node:util').inherits +const ReadableStream = require('node:stream').Readable + +function PartStream (opts) { + ReadableStream.call(this, opts) +} +inherits(PartStream, ReadableStream) + +PartStream.prototype._read = function (n) {} + +module.exports = PartStream diff --git a/fastify-busboy/deps/dicer/lib/dicer.d.ts b/fastify-busboy/deps/dicer/lib/dicer.d.ts new file mode 100644 index 0000000..3c5b896 --- /dev/null +++ b/fastify-busboy/deps/dicer/lib/dicer.d.ts @@ -0,0 +1,164 @@ +// Type definitions for dicer 0.2 +// Project: https://github.com/mscdex/dicer +// Definitions by: BendingBender +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped +// TypeScript Version: 2.2 +/// + +import stream = require("stream"); + +// tslint:disable:unified-signatures + +/** + * A very fast streaming multipart parser for node.js. + * Dicer is a WritableStream + * + * Dicer (special) events: + * - on('finish', ()) - Emitted when all parts have been parsed and the Dicer instance has been ended. + * - on('part', (stream: PartStream)) - Emitted when a new part has been found. + * - on('preamble', (stream: PartStream)) - Emitted for preamble if you should happen to need it (can usually be ignored). + * - on('trailer', (data: Buffer)) - Emitted when trailing data was found after the terminating boundary (as with the preamble, this can usually be ignored too). + */ +export class Dicer extends stream.Writable { + /** + * Creates and returns a new Dicer instance with the following valid config settings: + * + * @param config The configuration to use + */ + constructor(config: Dicer.Config); + /** + * Sets the boundary to use for parsing and performs some initialization needed for parsing. + * You should only need to use this if you set headerFirst to true in the constructor and are parsing the boundary from the preamble header. + * + * @param boundary The boundary to use + */ + setBoundary(boundary: string): void; + addListener(event: "finish", listener: () => void): this; + addListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + addListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + addListener(event: "trailer", listener: (data: Buffer) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "drain", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: "pipe", listener: (src: stream.Readable) => void): this; + addListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + on(event: "finish", listener: () => void): this; + on(event: "part", listener: (stream: Dicer.PartStream) => void): this; + on(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + on(event: "trailer", listener: (data: Buffer) => void): this; + on(event: "close", listener: () => void): this; + on(event: "drain", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: "pipe", listener: (src: stream.Readable) => void): this; + on(event: "unpipe", listener: (src: stream.Readable) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "finish", listener: () => void): this; + once(event: "part", listener: (stream: Dicer.PartStream) => void): this; + once(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + once(event: "trailer", listener: (data: Buffer) => void): this; + once(event: "close", listener: () => void): this; + once(event: "drain", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: "pipe", listener: (src: stream.Readable) => void): this; + once(event: "unpipe", listener: (src: stream.Readable) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "finish", listener: () => void): this; + prependListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + prependListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + prependListener(event: "trailer", listener: (data: Buffer) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "drain", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "finish", listener: () => void): this; + prependOnceListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + prependOnceListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + prependOnceListener(event: "trailer", listener: (data: Buffer) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "drain", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: "pipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + removeListener(event: "finish", listener: () => void): this; + removeListener(event: "part", listener: (stream: Dicer.PartStream) => void): this; + removeListener(event: "preamble", listener: (stream: Dicer.PartStream) => void): this; + removeListener(event: "trailer", listener: (data: Buffer) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "drain", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: "pipe", listener: (src: stream.Readable) => void): this; + removeListener(event: "unpipe", listener: (src: stream.Readable) => void): this; + removeListener(event: string, listener: (...args: any[]) => void): this; +} + +declare namespace Dicer { + interface Config { + /** + * This is the boundary used to detect the beginning of a new part. + */ + boundary?: string | undefined; + /** + * If true, preamble header parsing will be performed first. + */ + headerFirst?: boolean | undefined; + /** + * The maximum number of header key=>value pairs to parse Default: 2000 (same as node's http). + */ + maxHeaderPairs?: number | undefined; + } + + /** + * PartStream is a _ReadableStream_ + * + * PartStream (special) events: + * - on('header', (header: object)) - An object containing the header for this particular part. Each property value is an array of one or more string values. + */ + interface PartStream extends stream.Readable { + addListener(event: "header", listener: (header: object) => void): this; + addListener(event: "close", listener: () => void): this; + addListener(event: "data", listener: (chunk: Buffer | string) => void): this; + addListener(event: "end", listener: () => void): this; + addListener(event: "readable", listener: () => void): this; + addListener(event: "error", listener: (err: Error) => void): this; + addListener(event: string, listener: (...args: any[]) => void): this; + on(event: "header", listener: (header: object) => void): this; + on(event: "close", listener: () => void): this; + on(event: "data", listener: (chunk: Buffer | string) => void): this; + on(event: "end", listener: () => void): this; + on(event: "readable", listener: () => void): this; + on(event: "error", listener: (err: Error) => void): this; + on(event: string, listener: (...args: any[]) => void): this; + once(event: "header", listener: (header: object) => void): this; + once(event: "close", listener: () => void): this; + once(event: "data", listener: (chunk: Buffer | string) => void): this; + once(event: "end", listener: () => void): this; + once(event: "readable", listener: () => void): this; + once(event: "error", listener: (err: Error) => void): this; + once(event: string, listener: (...args: any[]) => void): this; + prependListener(event: "header", listener: (header: object) => void): this; + prependListener(event: "close", listener: () => void): this; + prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependListener(event: "end", listener: () => void): this; + prependListener(event: "readable", listener: () => void): this; + prependListener(event: "error", listener: (err: Error) => void): this; + prependListener(event: string, listener: (...args: any[]) => void): this; + prependOnceListener(event: "header", listener: (header: object) => void): this; + prependOnceListener(event: "close", listener: () => void): this; + prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; + prependOnceListener(event: "end", listener: () => void): this; + prependOnceListener(event: "readable", listener: () => void): this; + prependOnceListener(event: "error", listener: (err: Error) => void): this; + prependOnceListener(event: string, listener: (...args: any[]) => void): this; + removeListener(event: "header", listener: (header: object) => void): this; + removeListener(event: "close", listener: () => void): this; + removeListener(event: "data", listener: (chunk: Buffer | string) => void): this; + removeListener(event: "end", listener: () => void): this; + removeListener(event: "readable", listener: () => void): this; + removeListener(event: "error", listener: (err: Error) => void): this; + removeListener(event: string, listener: (...args: any[]) => void): this; + } +} \ No newline at end of file diff --git a/fastify-busboy/deps/streamsearch/sbmh.js b/fastify-busboy/deps/streamsearch/sbmh.js new file mode 100644 index 0000000..b90c0e8 --- /dev/null +++ b/fastify-busboy/deps/streamsearch/sbmh.js @@ -0,0 +1,228 @@ +'use strict' + +/** + * Copyright Brian White. All rights reserved. + * + * @see https://github.com/mscdex/streamsearch + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + */ +const EventEmitter = require('node:events').EventEmitter +const inherits = require('node:util').inherits + +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) + } + + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } + + const needleLength = needle.length + + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } + + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } + + this.maxMatches = Infinity + this.matches = 0 + + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 + + this._lookbehind = Buffer.alloc(needleLength) + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i + } +} +inherits(SBMH, EventEmitter) + +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 +} + +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r +} + +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) + + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) + + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] + } + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } + } + + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } + + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff + + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len + + this._bufpos = len + return len + } + } + + pos += (pos >= 0) * this._bufpos + + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } + + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } + + this._bufpos = len + return len +} + +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] +} + +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true +} + +module.exports = SBMH diff --git a/fastify-busboy/lib/main.d.ts b/fastify-busboy/lib/main.d.ts new file mode 100644 index 0000000..91b6448 --- /dev/null +++ b/fastify-busboy/lib/main.d.ts @@ -0,0 +1,196 @@ +// Definitions by: Jacob Baskin +// BendingBender +// Igor Savin + +/// + +import * as http from 'http'; +import { Readable, Writable } from 'stream'; +export { Dicer } from "../deps/dicer/lib/dicer"; + +export const Busboy: BusboyConstructor; +export default Busboy; + +export interface BusboyConfig { + /** + * These are the HTTP headers of the incoming request, which are used by individual parsers. + */ + headers: BusboyHeaders; + /** + * `highWaterMark` to use for this Busboy instance. + * @default WritableStream default. + */ + highWaterMark?: number | undefined; + /** + * highWaterMark to use for file streams. + * @default ReadableStream default. + */ + fileHwm?: number | undefined; + /** + * Default character set to use when one isn't defined. + * @default 'utf8' + */ + defCharset?: string | undefined; + /** + * Detect if a Part is a file. + * + * By default a file is detected if contentType + * is application/octet-stream or fileName is not + * undefined. + * + * Modify this to handle e.g. Blobs. + */ + isPartAFile?: (fieldName: string | undefined, contentType: string | undefined, fileName: string | undefined) => boolean; + /** + * If paths in the multipart 'filename' field shall be preserved. + * @default false + */ + preservePath?: boolean | undefined; + /** + * Various limits on incoming data. + */ + limits?: + | { + /** + * Max field name size (in bytes) + * @default 100 bytes + */ + fieldNameSize?: number | undefined; + /** + * Max field value size (in bytes) + * @default 1MB + */ + fieldSize?: number | undefined; + /** + * Max number of non-file fields + * @default Infinity + */ + fields?: number | undefined; + /** + * For multipart forms, the max file size (in bytes) + * @default Infinity + */ + fileSize?: number | undefined; + /** + * For multipart forms, the max number of file fields + * @default Infinity + */ + files?: number | undefined; + /** + * For multipart forms, the max number of parts (fields + files) + * @default Infinity + */ + parts?: number | undefined; + /** + * For multipart forms, the max number of header key=>value pairs to parse + * @default 2000 + */ + headerPairs?: number | undefined; + + /** + * For multipart forms, the max size of a header part + * @default 81920 + */ + headerSize?: number | undefined; + } + | undefined; +} + +export type BusboyHeaders = { 'content-type': string } & http.IncomingHttpHeaders; + +export interface BusboyFileStream extends + Readable { + + truncated: boolean; + + /** + * The number of bytes that have been read so far. + */ + bytesRead: number; +} + +export interface Busboy extends Writable { + addListener(event: Event, listener: BusboyEvents[Event]): this; + + addListener(event: string | symbol, listener: (...args: any[]) => void): this; + + on(event: Event, listener: BusboyEvents[Event]): this; + + on(event: string | symbol, listener: (...args: any[]) => void): this; + + once(event: Event, listener: BusboyEvents[Event]): this; + + once(event: string | symbol, listener: (...args: any[]) => void): this; + + removeListener(event: Event, listener: BusboyEvents[Event]): this; + + removeListener(event: string | symbol, listener: (...args: any[]) => void): this; + + off(event: Event, listener: BusboyEvents[Event]): this; + + off(event: string | symbol, listener: (...args: any[]) => void): this; + + prependListener(event: Event, listener: BusboyEvents[Event]): this; + + prependListener(event: string | symbol, listener: (...args: any[]) => void): this; + + prependOnceListener(event: Event, listener: BusboyEvents[Event]): this; + + prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this; +} + +export interface BusboyEvents { + /** + * Emitted for each new file form field found. + * + * * Note: if you listen for this event, you should always handle the `stream` no matter if you care about the + * file contents or not (e.g. you can simply just do `stream.resume();` if you want to discard the contents), + * otherwise the 'finish' event will never fire on the Busboy instance. However, if you don't care about **any** + * incoming files, you can simply not listen for the 'file' event at all and any/all files will be automatically + * and safely discarded (these discarded files do still count towards `files` and `parts` limits). + * * If a configured file size limit was reached, `stream` will both have a boolean property `truncated` + * (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens. + * + * @param listener.transferEncoding Contains the 'Content-Transfer-Encoding' value for the file stream. + * @param listener.mimeType Contains the 'Content-Type' value for the file stream. + */ + file: ( + fieldname: string, + stream: BusboyFileStream, + filename: string, + transferEncoding: string, + mimeType: string, + ) => void; + /** + * Emitted for each new non-file field found. + */ + field: ( + fieldname: string, + value: string, + fieldnameTruncated: boolean, + valueTruncated: boolean, + transferEncoding: string, + mimeType: string, + ) => void; + finish: () => void; + /** + * Emitted when specified `parts` limit has been reached. No more 'file' or 'field' events will be emitted. + */ + partsLimit: () => void; + /** + * Emitted when specified `files` limit has been reached. No more 'file' events will be emitted. + */ + filesLimit: () => void; + /** + * Emitted when specified `fields` limit has been reached. No more 'field' events will be emitted. + */ + fieldsLimit: () => void; + error: (error: unknown) => void; +} + +export interface BusboyConstructor { + (options: BusboyConfig): Busboy; + + new(options: BusboyConfig): Busboy; +} + diff --git a/fastify-busboy/lib/main.js b/fastify-busboy/lib/main.js new file mode 100644 index 0000000..8794beb --- /dev/null +++ b/fastify-busboy/lib/main.js @@ -0,0 +1,85 @@ +'use strict' + +const WritableStream = require('node:stream').Writable +const { inherits } = require('node:util') +const Dicer = require('../deps/dicer/lib/Dicer') + +const MultipartParser = require('./types/multipart') +const UrlencodedParser = require('./types/urlencoded') +const parseParams = require('./utils/parseParams') + +function Busboy (opts) { + if (!(this instanceof Busboy)) { return new Busboy(opts) } + + if (typeof opts !== 'object') { + throw new TypeError('Busboy expected an options-Object.') + } + if (typeof opts.headers !== 'object') { + throw new TypeError('Busboy expected an options-Object with headers-attribute.') + } + if (typeof opts.headers['content-type'] !== 'string') { + throw new TypeError('Missing Content-Type-header.') + } + + const { + headers, + ...streamOptions + } = opts + + this.opts = { + autoDestroy: false, + ...streamOptions + } + WritableStream.call(this, this.opts) + + this._done = false + this._parser = this.getParserByHeaders(headers) + this._finished = false +} +inherits(Busboy, WritableStream) + +Busboy.prototype.emit = function (ev) { + if (ev === 'finish') { + if (!this._done) { + this._parser?.end() + return + } else if (this._finished) { + return + } + this._finished = true + } + WritableStream.prototype.emit.apply(this, arguments) +} + +Busboy.prototype.getParserByHeaders = function (headers) { + const parsed = parseParams(headers['content-type']) + + const cfg = { + defCharset: this.opts.defCharset, + fileHwm: this.opts.fileHwm, + headers, + highWaterMark: this.opts.highWaterMark, + isPartAFile: this.opts.isPartAFile, + limits: this.opts.limits, + parsedConType: parsed, + preservePath: this.opts.preservePath + } + + if (MultipartParser.detect.test(parsed[0])) { + return new MultipartParser(this, cfg) + } + if (UrlencodedParser.detect.test(parsed[0])) { + return new UrlencodedParser(this, cfg) + } + throw new Error('Unsupported Content-Type.') +} + +Busboy.prototype._write = function (chunk, encoding, cb) { + this._parser.write(chunk, cb) +} + +module.exports = Busboy +module.exports.default = Busboy +module.exports.Busboy = Busboy + +module.exports.Dicer = Dicer diff --git a/fastify-busboy/lib/types/multipart.js b/fastify-busboy/lib/types/multipart.js new file mode 100644 index 0000000..ad242db --- /dev/null +++ b/fastify-busboy/lib/types/multipart.js @@ -0,0 +1,306 @@ +'use strict' + +// TODO: +// * support 1 nested multipart level +// (see second multipart example here: +// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) +// * support limits.fieldNameSize +// -- this will require modifications to utils.parseParams + +const { Readable } = require('node:stream') +const { inherits } = require('node:util') + +const Dicer = require('../../deps/dicer/lib/Dicer') + +const parseParams = require('../utils/parseParams') +const decodeText = require('../utils/decodeText') +const basename = require('../utils/basename') +const getLimit = require('../utils/getLimit') + +const RE_BOUNDARY = /^boundary$/i +const RE_FIELD = /^form-data$/i +const RE_CHARSET = /^charset$/i +const RE_FILENAME = /^filename$/i +const RE_NAME = /^name$/i + +Multipart.detect = /^multipart\/form-data/i +function Multipart (boy, cfg) { + let i + let len + const self = this + let boundary + const limits = cfg.limits + const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) + const parsedConType = cfg.parsedConType || [] + const defCharset = cfg.defCharset || 'utf8' + const preservePath = cfg.preservePath + const fileOpts = { highWaterMark: cfg.fileHwm } + + for (i = 0, len = parsedConType.length; i < len; ++i) { + if (Array.isArray(parsedConType[i]) && + RE_BOUNDARY.test(parsedConType[i][0])) { + boundary = parsedConType[i][1] + break + } + } + + function checkFinished () { + if (nends === 0 && finished && !boy._done) { + finished = false + self.end() + } + } + + if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } + + const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) + const filesLimit = getLimit(limits, 'files', Infinity) + const fieldsLimit = getLimit(limits, 'fields', Infinity) + const partsLimit = getLimit(limits, 'parts', Infinity) + const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) + const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) + + let nfiles = 0 + let nfields = 0 + let nends = 0 + let curFile + let curField + let finished = false + + this._needDrain = false + this._pause = false + this._cb = undefined + this._nparts = 0 + this._boy = boy + + const parserCfg = { + boundary, + maxHeaderPairs: headerPairsLimit, + maxHeaderSize: headerSizeLimit, + partHwm: fileOpts.highWaterMark, + highWaterMark: cfg.highWaterMark + } + + this.parser = new Dicer(parserCfg) + this.parser.on('drain', function () { + self._needDrain = false + if (self._cb && !self._pause) { + const cb = self._cb + self._cb = undefined + cb() + } + }).on('part', function onPart (part) { + if (++self._nparts > partsLimit) { + self.parser.removeListener('part', onPart) + self.parser.on('part', skipPart) + boy.hitPartsLimit = true + boy.emit('partsLimit') + return skipPart(part) + } + + // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let + // us emit 'end' early since we know the part has ended if we are already + // seeing the next part + if (curField) { + const field = curField + field.emit('end') + field.removeAllListeners('end') + } + + part.on('header', function (header) { + let contype + let fieldname + let parsed + let charset + let encoding + let filename + let nsize = 0 + + if (header['content-type']) { + parsed = parseParams(header['content-type'][0]) + if (parsed[0]) { + contype = parsed[0].toLowerCase() + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_CHARSET.test(parsed[i][0])) { + charset = parsed[i][1].toLowerCase() + break + } + } + } + } + + if (contype === undefined) { contype = 'text/plain' } + if (charset === undefined) { charset = defCharset } + + if (header['content-disposition']) { + parsed = parseParams(header['content-disposition'][0]) + if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_NAME.test(parsed[i][0])) { + fieldname = parsed[i][1] + } else if (RE_FILENAME.test(parsed[i][0])) { + filename = parsed[i][1] + if (!preservePath) { filename = basename(filename) } + } + } + } else { return skipPart(part) } + + if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } + + let onData, + onEnd + + if (isPartAFile(fieldname, contype, filename)) { + // file/binary field + if (nfiles === filesLimit) { + if (!boy.hitFilesLimit) { + boy.hitFilesLimit = true + boy.emit('filesLimit') + } + return skipPart(part) + } + + ++nfiles + + if (!boy._events.file) { + self.parser._ignore() + return + } + + ++nends + const file = new FileStream(fileOpts) + curFile = file + file.on('end', function () { + --nends + self._pause = false + checkFinished() + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + }) + file._read = function (n) { + if (!self._pause) { return } + self._pause = false + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + } + boy.emit('file', fieldname, file, filename, encoding, contype) + + onData = function (data) { + if ((nsize += data.length) > fileSizeLimit) { + const extralen = fileSizeLimit - nsize + data.length + if (extralen > 0) { file.push(data.slice(0, extralen)) } + file.truncated = true + file.bytesRead = fileSizeLimit + part.removeAllListeners('data') + file.emit('limit') + return + } else if (!file.push(data)) { self._pause = true } + + file.bytesRead = nsize + } + + onEnd = function () { + curFile = undefined + file.push(null) + } + } else { + // non-file field + if (nfields === fieldsLimit) { + if (!boy.hitFieldsLimit) { + boy.hitFieldsLimit = true + boy.emit('fieldsLimit') + } + return skipPart(part) + } + + ++nfields + ++nends + let buffer = '' + let truncated = false + curField = part + + onData = function (data) { + if ((nsize += data.length) > fieldSizeLimit) { + const extralen = (fieldSizeLimit - (nsize - data.length)) + buffer += data.toString('binary', 0, extralen) + truncated = true + part.removeAllListeners('data') + } else { buffer += data.toString('binary') } + } + + onEnd = function () { + curField = undefined + if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } + boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) + --nends + checkFinished() + } + } + + /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become + broken. Streams2/streams3 is a huge black box of confusion, but + somehow overriding the sync state seems to fix things again (and still + seems to work for previous node versions). + */ + part._readableState.sync = false + + part.on('data', onData) + part.on('end', onEnd) + }).on('error', function (err) { + if (curFile) { curFile.emit('error', err) } + }) + }).on('error', function (err) { + boy.emit('error', err) + }).on('finish', function () { + finished = true + checkFinished() + }) +} + +Multipart.prototype.write = function (chunk, cb) { + const r = this.parser.write(chunk) + if (r && !this._pause) { + cb() + } else { + this._needDrain = !r + this._cb = cb + } +} + +Multipart.prototype.end = function () { + const self = this + + if (self.parser.writable) { + self.parser.end() + } else if (!self._boy._done) { + process.nextTick(function () { + self._boy._done = true + self._boy.emit('finish') + }) + } +} + +function skipPart (part) { + part.resume() +} + +function FileStream (opts) { + Readable.call(this, opts) + + this.bytesRead = 0 + + this.truncated = false +} + +inherits(FileStream, Readable) + +FileStream.prototype._read = function (n) {} + +module.exports = Multipart diff --git a/fastify-busboy/lib/types/urlencoded.js b/fastify-busboy/lib/types/urlencoded.js new file mode 100644 index 0000000..6f5f784 --- /dev/null +++ b/fastify-busboy/lib/types/urlencoded.js @@ -0,0 +1,190 @@ +'use strict' + +const Decoder = require('../utils/Decoder') +const decodeText = require('../utils/decodeText') +const getLimit = require('../utils/getLimit') + +const RE_CHARSET = /^charset$/i + +UrlEncoded.detect = /^application\/x-www-form-urlencoded/i +function UrlEncoded (boy, cfg) { + const limits = cfg.limits + const parsedConType = cfg.parsedConType + this.boy = boy + + this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) + this.fieldsLimit = getLimit(limits, 'fields', Infinity) + + let charset + for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var + if (Array.isArray(parsedConType[i]) && + RE_CHARSET.test(parsedConType[i][0])) { + charset = parsedConType[i][1].toLowerCase() + break + } + } + + if (charset === undefined) { charset = cfg.defCharset || 'utf8' } + + this.decoder = new Decoder() + this.charset = charset + this._fields = 0 + this._state = 'key' + this._checkingBytes = true + this._bytesKey = 0 + this._bytesVal = 0 + this._key = '' + this._val = '' + this._keyTrunc = false + this._valTrunc = false + this._hitLimit = false +} + +UrlEncoded.prototype.write = function (data, cb) { + if (this._fields === this.fieldsLimit) { + if (!this.boy.hitFieldsLimit) { + this.boy.hitFieldsLimit = true + this.boy.emit('fieldsLimit') + } + return cb() + } + + let idxeq; let idxamp; let i; let p = 0; const len = data.length + + while (p < len) { + if (this._state === 'key') { + idxeq = idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x3D/* = */) { + idxeq = i + break + } else if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesKey } + } + + if (idxeq !== undefined) { + // key with assignment + if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } + this._state = 'val' + + this._hitLimit = false + this._checkingBytes = true + this._val = '' + this._bytesVal = 0 + this._valTrunc = false + this.decoder.reset() + + p = idxeq + 1 + } else if (idxamp !== undefined) { + // key with no assignment + ++this._fields + let key; const keyTrunc = this._keyTrunc + if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + if (key.length) { + this.boy.emit('field', decodeText(key, 'binary', this.charset), + '', + keyTrunc, + false) + } + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._keyTrunc = true + } + } else { + if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } + p = len + } + } else { + idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesVal } + } + + if (idxamp !== undefined) { + ++this._fields + if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + this._state = 'key' + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._val === '' && this.fieldSizeLimit === 0) || + (this._bytesVal = this._val.length) === this.fieldSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._valTrunc = true + } + } else { + if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } + p = len + } + } + } + cb() +} + +UrlEncoded.prototype.end = function () { + if (this.boy._done) { return } + + if (this._state === 'key' && this._key.length > 0) { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + '', + this._keyTrunc, + false) + } else if (this._state === 'val') { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + } + this.boy._done = true + this.boy.emit('finish') +} + +module.exports = UrlEncoded diff --git a/fastify-busboy/lib/utils/Decoder.js b/fastify-busboy/lib/utils/Decoder.js new file mode 100644 index 0000000..7917678 --- /dev/null +++ b/fastify-busboy/lib/utils/Decoder.js @@ -0,0 +1,54 @@ +'use strict' + +const RE_PLUS = /\+/g + +const HEX = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +] + +function Decoder () { + this.buffer = undefined +} +Decoder.prototype.write = function (str) { + // Replace '+' with ' ' before decoding + str = str.replace(RE_PLUS, ' ') + let res = '' + let i = 0; let p = 0; const len = str.length + for (; i < len; ++i) { + if (this.buffer !== undefined) { + if (!HEX[str.charCodeAt(i)]) { + res += '%' + this.buffer + this.buffer = undefined + --i // retry character + } else { + this.buffer += str[i] + ++p + if (this.buffer.length === 2) { + res += String.fromCharCode(parseInt(this.buffer, 16)) + this.buffer = undefined + } + } + } else if (str[i] === '%') { + if (i > p) { + res += str.substring(p, i) + p = i + } + this.buffer = '' + ++p + } + } + if (p < len && this.buffer === undefined) { res += str.substring(p) } + return res +} +Decoder.prototype.reset = function () { + this.buffer = undefined +} + +module.exports = Decoder diff --git a/fastify-busboy/lib/utils/basename.js b/fastify-busboy/lib/utils/basename.js new file mode 100644 index 0000000..db58819 --- /dev/null +++ b/fastify-busboy/lib/utils/basename.js @@ -0,0 +1,14 @@ +'use strict' + +module.exports = function basename (path) { + if (typeof path !== 'string') { return '' } + for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1) + return (path === '..' || path === '.' ? '' : path) + } + } + return (path === '..' || path === '.' ? '' : path) +} diff --git a/fastify-busboy/lib/utils/decodeText.js b/fastify-busboy/lib/utils/decodeText.js new file mode 100644 index 0000000..be35d6b --- /dev/null +++ b/fastify-busboy/lib/utils/decodeText.js @@ -0,0 +1,114 @@ +'use strict' + +// Node has always utf-8 +const utf8Decoder = new TextDecoder('utf-8') +const textDecoders = new Map([ + ['utf-8', utf8Decoder], + ['utf8', utf8Decoder] +]) + +function getDecoder (charset) { + let lc + while (true) { + switch (charset) { + case 'utf-8': + case 'utf8': + return decoders.utf8 + case 'latin1': + case 'ascii': // TODO: Make these a separate, strict decoder? + case 'us-ascii': + case 'iso-8859-1': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'windows-1252': + case 'iso_8859-1:1987': + case 'cp1252': + case 'x-cp1252': + return decoders.latin1 + case 'utf16le': + case 'utf-16le': + case 'ucs2': + case 'ucs-2': + return decoders.utf16le + case 'base64': + return decoders.base64 + default: + if (lc === undefined) { + lc = true + charset = charset.toLowerCase() + continue + } + return decoders.other.bind(charset) + } + } +} + +const decoders = { + utf8: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.utf8Slice(0, data.length) + }, + + latin1: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + return data + } + return data.latin1Slice(0, data.length) + }, + + utf16le: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.ucs2Slice(0, data.length) + }, + + base64: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.base64Slice(0, data.length) + }, + + other: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + + if (textDecoders.has(this.toString())) { + try { + return textDecoders.get(this).decode(data) + } catch (e) { } + } + return typeof data === 'string' + ? data + : data.toString() + } +} + +function decodeText (text, sourceEncoding, destEncoding) { + if (text) { + return getDecoder(destEncoding)(text, sourceEncoding) + } + return text +} + +module.exports = decodeText diff --git a/fastify-busboy/lib/utils/getLimit.js b/fastify-busboy/lib/utils/getLimit.js new file mode 100644 index 0000000..cb64fd6 --- /dev/null +++ b/fastify-busboy/lib/utils/getLimit.js @@ -0,0 +1,16 @@ +'use strict' + +module.exports = function getLimit (limits, name, defaultLimit) { + if ( + !limits || + limits[name] === undefined || + limits[name] === null + ) { return defaultLimit } + + if ( + typeof limits[name] !== 'number' || + isNaN(limits[name]) + ) { throw new TypeError('Limit ' + name + ' is not a valid number') } + + return limits[name] +} diff --git a/fastify-busboy/lib/utils/parseParams.js b/fastify-busboy/lib/utils/parseParams.js new file mode 100644 index 0000000..1698e62 --- /dev/null +++ b/fastify-busboy/lib/utils/parseParams.js @@ -0,0 +1,196 @@ +/* eslint-disable object-property-newline */ +'use strict' + +const decodeText = require('./decodeText') + +const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g + +const EncodedLookup = { + '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04', + '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09', + '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c', + '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e', + '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12', + '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17', + '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b', + '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d', + '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20', + '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25', + '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a', + '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c', + '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f', + '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33', + '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38', + '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b', + '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e', + '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41', + '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46', + '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a', + '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d', + '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f', + '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54', + '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59', + '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c', + '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e', + '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62', + '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67', + '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b', + '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d', + '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70', + '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75', + '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a', + '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c', + '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f', + '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83', + '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88', + '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b', + '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e', + '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91', + '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96', + '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a', + '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d', + '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f', + '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2', + '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4', + '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7', + '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9', + '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab', + '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac', + '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad', + '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae', + '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0', + '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2', + '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5', + '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7', + '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba', + '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb', + '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc', + '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd', + '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf', + '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0', + '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3', + '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5', + '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8', + '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca', + '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb', + '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc', + '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce', + '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf', + '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1', + '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3', + '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6', + '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8', + '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda', + '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb', + '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd', + '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde', + '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf', + '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1', + '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4', + '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6', + '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9', + '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea', + '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec', + '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed', + '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee', + '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef', + '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2', + '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4', + '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7', + '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9', + '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb', + '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc', + '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd', + '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe', + '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff' +} + +function encodedReplacer (match) { + return EncodedLookup[match] +} + +const STATE_KEY = 0 +const STATE_VALUE = 1 +const STATE_CHARSET = 2 +const STATE_LANG = 3 + +function parseParams (str) { + const res = [] + let state = STATE_KEY + let charset = '' + let inquote = false + let escaping = false + let p = 0 + let tmp = '' + const len = str.length + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + const char = str[i] + if (char === '\\' && inquote) { + if (escaping) { escaping = false } else { + escaping = true + continue + } + } else if (char === '"') { + if (!escaping) { + if (inquote) { + inquote = false + state = STATE_KEY + } else { inquote = true } + continue + } else { escaping = false } + } else { + if (escaping && inquote) { tmp += '\\' } + escaping = false + if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") { + if (state === STATE_CHARSET) { + state = STATE_LANG + charset = tmp.substring(1) + } else { state = STATE_VALUE } + tmp = '' + continue + } else if (state === STATE_KEY && + (char === '*' || char === '=') && + res.length) { + state = char === '*' + ? STATE_CHARSET + : STATE_VALUE + res[p] = [tmp, undefined] + tmp = '' + continue + } else if (!inquote && char === ';') { + state = STATE_KEY + if (charset) { + if (tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } + charset = '' + } else if (tmp.length) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } + tmp = '' + ++p + continue + } else if (!inquote && (char === ' ' || char === '\t')) { continue } + } + tmp += char + } + if (charset && tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } else if (tmp) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + + if (res[p] === undefined) { + if (tmp) { res[p] = tmp } + } else { res[p][1] = tmp } + + return res +} + +module.exports = parseParams diff --git a/fastify-busboy/package.json b/fastify-busboy/package.json new file mode 100644 index 0000000..4be895c --- /dev/null +++ b/fastify-busboy/package.json @@ -0,0 +1,86 @@ +{ + "name": "@fastify/busboy", + "version": "2.1.0", + "private": false, + "author": "Brian White ", + "contributors": [ + { + "name": "Igor Savin", + "email": "kibertoad@gmail.com", + "url": "https://github.com/kibertoad" + }, + { + "name": "Aras Abbasi", + "email": "aras.abbasi@gmail.com", + "url": "https://github.com/uzlopak" + } + ], + "description": "A streaming parser for HTML form data for node.js", + "main": "lib/main", + "type": "commonjs", + "types": "lib/main.d.ts", + "scripts": { + "bench:busboy": "cd benchmarks && npm install && npm run benchmark-fastify", + "bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js", + "coveralls": "nyc report --reporter=lcov", + "lint": "npm run lint:standard", + "lint:everything": "npm run lint && npm run test:types", + "lint:fix": "standard --fix", + "lint:standard": "standard --verbose | snazzy", + "test:mocha": "tap", + "test:types": "tsd", + "test:coverage": "nyc npm run test", + "test": "npm run test:mocha" + }, + "engines": { + "node": ">=14" + }, + "devDependencies": { + "@types/node": "^20.1.0", + "busboy": "^1.0.0", + "photofinish": "^1.8.0", + "snazzy": "^9.0.0", + "standard": "^17.0.0", + "tap": "^16.3.8", + "tinybench": "^2.5.1", + "tsd": "^0.29.0", + "typescript": "^5.0.2" + }, + "keywords": [ + "uploads", + "forms", + "multipart", + "form-data" + ], + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/fastify/busboy.git" + }, + "tsd": { + "directory": "test/types", + "compilerOptions": { + "esModuleInterop": false, + "module": "commonjs", + "target": "ES2017" + } + }, + "standard": { + "globals": [ + "describe", + "it" + ], + "ignore": [ + "bench" + ] + }, + "files": [ + "README.md", + "LICENSE", + "lib/*", + "deps/encoding/*", + "deps/dicer/lib", + "deps/streamsearch/", + "deps/dicer/LICENSE" + ] +} diff --git a/fastify-busboy/test/busboy-constructor.test.js b/fastify-busboy/test/busboy-constructor.test.js new file mode 100644 index 0000000..8607789 --- /dev/null +++ b/fastify-busboy/test/busboy-constructor.test.js @@ -0,0 +1,75 @@ +'use strict' + +const Busboy = require('../lib/main') +const { test } = require('tap') + +test('busboy-constructor - should throw an Error if no options are provided', t => { + t.plan(1) + + t.throws(() => new Busboy(), new Error('Busboy expected an options-Object.')) +}) + +test('busboy-constructor - should throw an Error if options does not contain headers', t => { + t.plan(1) + + t.throws(() => new Busboy({}), new Error('Busboy expected an options-Object with headers-attribute.')) +}) + +test('busboy-constructor - if busboy is called without new-operator, still creates a busboy instance', t => { + t.plan(1) + + const busboyInstance = Busboy({ headers: { 'content-type': 'application/x-www-form-urlencoded' } }) + t.type(busboyInstance, Busboy) +}) + +test('busboy-constructor - should throw an Error if content-type is not set', t => { + t.plan(1) + + t.throws(() => new Busboy({ headers: {} }), new Error('Missing Content-Type-header.')) +}) + +test('busboy-constructor - should throw an Error if content-type is unsupported', t => { + t.plan(1) + + t.throws(() => new Busboy({ headers: { 'content-type': 'unsupported' } }), new Error('Unsupported Content-Type.')) +}) + +test('busboy-constructor - should not throw an Error if content-type is urlencoded', t => { + t.plan(1) + + t.doesNotThrow(() => new Busboy({ headers: { 'content-type': 'application/x-www-form-urlencoded' } })) +}) + +test('busboy-constructor - if busboy is called without stream options autoDestroy is set to false', t => { + t.plan(1) + + const busboyInstance = Busboy({ headers: { 'content-type': 'application/x-www-form-urlencoded' } }) + t.equal(busboyInstance._writableState.autoDestroy, false) +}) + +test('busboy-constructor - if busboy is called with invalid value for stream option highWaterMark we should throw', t => { + t.plan(1) + + t.throws(() => Busboy({ highWaterMark: 'not_allowed_value_for_highWaterMark', headers: { 'content-type': 'application/x-www-form-urlencoded' } }), new Error('not_allowed_value_for_highWaterMark')) +}) + +test('busboy-constructor - if busboy is called with stream options and autoDestroy:true, autoDestroy should be set to true', t => { + t.plan(1) + + const busboyInstance = Busboy({ autoDestroy: true, headers: { 'content-type': 'application/x-www-form-urlencoded' } }) + t.equal(busboyInstance._writableState.autoDestroy, true) +}) + +test('busboy-constructor - busboy should be initialized with private attribute _done set as false', t => { + t.plan(1) + + const busboyInstance = Busboy({ headers: { 'content-type': 'application/x-www-form-urlencoded' } }) + t.equal(busboyInstance._done, false) +}) + +test('busboy-constructor - busboy should be initialized with private attribute _finished set as false', t => { + t.plan(1) + + const busboyInstance = Busboy({ headers: { 'content-type': 'application/x-www-form-urlencoded' } }) + t.equal(busboyInstance._finished, false) +}) diff --git a/fastify-busboy/test/decoder.test.js b/fastify-busboy/test/decoder.test.js new file mode 100644 index 0000000..fa4ce69 --- /dev/null +++ b/fastify-busboy/test/decoder.test.js @@ -0,0 +1,98 @@ +'use strict' + +const { test } = require('tap') +const Decoder = require('../lib/utils/Decoder') + +test('Decoder', t => { + const tests = + [ + { + source: ['Hello world'], + expected: 'Hello world', + what: 'No encoded bytes' + }, + { + source: ['Hello%20world'], + expected: 'Hello world', + what: 'One full encoded byte' + }, + { + source: ['Hello%20world%21'], + expected: 'Hello world!', + what: 'Two full encoded bytes' + }, + { + source: ['Hello%', '20world'], + expected: 'Hello world', + what: 'One full encoded byte split #1' + }, + { + source: ['Hello%2', '0world'], + expected: 'Hello world', + what: 'One full encoded byte split #2' + }, + { + source: ['Hello%20', 'world'], + expected: 'Hello world', + what: 'One full encoded byte (concat)' + }, + { + source: ['Hello%2Qworld'], + expected: 'Hello%2Qworld', + what: 'Malformed encoded byte #1' + }, + { + source: ['Hello%world'], + expected: 'Hello%world', + what: 'Malformed encoded byte #2' + }, + { + source: ['Hello+world'], + expected: 'Hello world', + what: 'Plus to space' + }, + { + source: ['Hello+world%21'], + expected: 'Hello world!', + what: 'Plus and encoded byte' + }, + { + source: ['5%2B5%3D10'], + expected: '5+5=10', + what: 'Encoded plus' + }, + { + source: ['5+%2B+5+%3D+10'], + expected: '5 + 5 = 10', + what: 'Spaces and encoded plus' + } + ] + t.plan(tests.length + 1) + + tests.forEach((v) => { + t.test(v.what, t => { + t.plan(1) + + const dec = new Decoder() + let result = '' + v.source.forEach(function (s) { + result += dec.write(s) + }) + const msg = 'Decoded string mismatch.\n' + + 'Saw: ' + result + '\n' + + 'Expected: ' + v.expected + t.strictSame(result, v.expected, msg) + }) + }) + + t.test('reset sets internal buffer to undefined', t => { + t.plan(2) + + const dec = new Decoder() + dec.write('Hello+world%2') + + t.notSame(dec.buffer, undefined) + dec.reset() + t.equal(dec.buffer, undefined) + }) +}) diff --git a/fastify-busboy/test/dicer-constructor.test.js b/fastify-busboy/test/dicer-constructor.test.js new file mode 100644 index 0000000..e0e6a6c --- /dev/null +++ b/fastify-busboy/test/dicer-constructor.test.js @@ -0,0 +1,22 @@ +'use strict' + +const { test } = require('tap') +const Dicer = require('../deps/dicer/lib/Dicer') + +test('dicer-constructor', t => { + t.plan(2) + + t.test('should throw an Error when no options parameter is supplied to Dicer', t => { + t.plan(1) + + t.throws(() => new Dicer(), new Error('Boundary required')) + }) + + t.test('without new operator a new dicer instance will be initialized', t => { + t.plan(1) + + t.type(Dicer({ + boundary: '----boundary' + }), Dicer) + }) +}) diff --git a/fastify-busboy/test/dicer-endfinish.test.js b/fastify-busboy/test/dicer-endfinish.test.js new file mode 100644 index 0000000..4718076 --- /dev/null +++ b/fastify-busboy/test/dicer-endfinish.test.js @@ -0,0 +1,96 @@ +'use strict' + +const Dicer = require('../deps/dicer/lib/Dicer') +const { test } = require('tap') + +test('dicer-endfinish', t => { + t.plan(1) + + t.test('should properly handle finish', t => { + t.plan(4) + + const CRLF = '\r\n' + const boundary = 'boundary' + + const writeSep = '--' + boundary + + const writePart = [ + writeSep, + 'Content-Type: text/plain', + 'Content-Length: 0' + ].join(CRLF) + + CRLF + CRLF + + 'some data' + CRLF + + const writeEnd = '--' + CRLF + + let firedEnd = false + let firedFinish = false + + const dicer = new Dicer({ boundary }) + dicer.on('part', partListener) + dicer.on('finish', finishListener) + dicer.write(writePart + writeSep) + + function partListener (partReadStream) { + partReadStream.on('data', function () { }) + partReadStream.on('end', partEndListener) + } + function partEndListener () { + firedEnd = true + setImmediate(afterEnd) + } + function afterEnd () { + dicer.end(writeEnd) + setImmediate(afterWrite) + } + function finishListener () { + t.ok(firedEnd, 'end before finishing') + firedFinish = true + test2() + } + function afterWrite () { + t.ok(firedFinish, 'Failed to finish') + } + + let isPausePush = true + + let firedPauseCallback = false + let firedPauseFinish = false + + let dicer2 = null + + function test2 () { + dicer2 = new Dicer({ boundary }) + dicer2.on('part', pausePartListener) + dicer2.on('finish', pauseFinish) + dicer2.write(writePart + writeSep, 'utf8', pausePartCallback) + setImmediate(pauseAfterWrite) + } + function pausePartListener (partReadStream) { + partReadStream.on('data', function () { }) + partReadStream.on('end', function () { }) + const realPush = partReadStream.push + partReadStream.push = function fakePush () { + realPush.apply(partReadStream, arguments) + if (!isPausePush) { return true } + isPausePush = false + return false + } + } + function pauseAfterWrite () { + dicer2.end(writeEnd) + setImmediate(pauseAfterEnd) + } + function pauseAfterEnd () { + t.ok(firedPauseCallback, 'Called callback after pause') + t.ok(firedPauseFinish, 'Finish after pause') + } + function pauseFinish () { + firedPauseFinish = true + } + function pausePartCallback () { + firedPauseCallback = true + } + }) +}) diff --git a/fastify-busboy/test/dicer-export.test.js b/fastify-busboy/test/dicer-export.test.js new file mode 100644 index 0000000..05df4e6 --- /dev/null +++ b/fastify-busboy/test/dicer-export.test.js @@ -0,0 +1,24 @@ +'use strict' + +const { test } = require('tap') +const { Dicer } = require('../lib/main') + +test('dicer-export', t => { + t.plan(2) + + t.test('without new operator a new dicer instance will be initialized', t => { + t.plan(1) + + t.type(Dicer({ + boundary: '----boundary' + }), Dicer) + }) + + t.test('with new operator a new dicer instance will be initialized', t => { + t.plan(1) + + t.type(new Dicer({ + boundary: '----boundary' + }), Dicer) + }) +}) diff --git a/fastify-busboy/test/dicer-headerparser.test.js b/fastify-busboy/test/dicer-headerparser.test.js new file mode 100644 index 0000000..73da283 --- /dev/null +++ b/fastify-busboy/test/dicer-headerparser.test.js @@ -0,0 +1,192 @@ +'use strict' + +const { test } = require('tap') +const HeaderParser = require('../deps/dicer/lib/HeaderParser') + +test('dicer-headerparser', t => { + const DCRLF = '\r\n\r\n' + const MAXED_BUFFER = Buffer.allocUnsafe(128 * 1024) + MAXED_BUFFER.fill(0x41) // 'A' + + const tests = [ + { + source: DCRLF, + expected: {}, + what: 'No header' + }, + { + source: ['Content-Type:\t text/plain', + 'Content-Length:0' + ].join('\r\n') + DCRLF, + expected: { 'content-type': [' text/plain'], 'content-length': ['0'] }, + what: 'Value spacing' + }, + { + source: ['Content-Type:\t text/plain', + 'Content-Length:0' + ].join('\r\n') + DCRLF, + cfg: { + maxHeaderPairs: 0 + }, + expected: {}, + what: 'should enforce maxHeaderPairs of 0' + }, + { + source: ['Content-Type:\t text/plain', + 'Content-Length:0' + ].join('\r\n') + DCRLF, + cfg: { + maxHeaderPairs: 1 + }, + expected: { 'content-type': [' text/plain'] }, + what: 'should enforce maxHeaderPairs of 1' + }, + { + source: ['Content-Type:\r\n text/plain', + 'Foo:\r\n bar\r\n baz' + ].join('\r\n') + DCRLF, + expected: {}, + cfg: { + maxHeaderSize: 0 + }, + what: 'should enforce maxHeaderSize of 0' + }, + { + source: ['Content-Type:\r\n text/plain', + 'Foo:\r\n bar\r\n baz' + ].join('\r\n') + DCRLF, + expected: { 'content-type': [' text/plai'] }, + cfg: { + maxHeaderSize: 25 + }, + what: 'should enforce maxHeaderSize of 25' + }, + { + source: ['Content-Type:\r\n text/plain', + 'Foo:\r\n bar\r\n baz' + ].join('\r\n') + DCRLF, + expected: { 'content-type': [' text/plain'] }, + cfg: { + maxHeaderSize: 31 + }, + what: 'should enforce maxHeaderSize of 31 and ignore the second header' + }, + { + source: ['Content-Type:\r\n text/plain', + 'Foo:\r\n bar\r\n baz' + ].join('\r\n') + DCRLF, + expected: { 'content-type': [' text/plain'], foo: [''] }, + cfg: { + maxHeaderSize: 32 + }, + what: 'should enforce maxHeaderSize of 32 and only add key of second header' + }, + { + source: ['Content-Type:\r\n text/plain', + 'Foo:\r\n bar\r\n baz' + ].join('\r\n') + DCRLF, + expected: { 'content-type': [' text/plain'], foo: ['\r'] }, + cfg: { + maxHeaderSize: 33 + }, + what: 'should enforce maxHeaderSize of 32 and get only first character of second pair' + }, + { + source: ['Content-Type:\r\n text/plain', + ' : ' + ].join('\r\n') + DCRLF, + expected: { 'content-type': [' text/plain : '] }, + what: 'should not break if invalid header pair (colon exists but empty key and value) is provided' + }, + { + source: ['Content-Type:\r\n text/plain', + 'FoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobaz' + ].join('\r\n') + DCRLF, + expected: { 'content-type': [' text/plain'] }, + what: 'should not break if invalid header pair (no distinctive colon) is provided' + }, + { + source: ['Content-Type:\r\n text/plain', + ':FoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobazFoobaz' + ].join('\r\n') + DCRLF, + expected: { 'content-type': [' text/plain'] }, + what: 'should not break if invalid header pair (no key) is provided' + }, + { + source: ['Content-Type:\t text/plain', + 'Content-Length:0' + ].join('\r\n') + DCRLF, + cfg: { + maxHeaderPairs: 2 + }, + expected: { 'content-type': [' text/plain'], 'content-length': ['0'] }, + what: 'should enforce maxHeaderPairs of 2' + }, + { + source: ['Content-Type:\r\n text/plain', + 'Foo:\r\n bar\r\n baz' + ].join('\r\n') + DCRLF, + expected: { 'content-type': [' text/plain'], foo: [' bar baz'] }, + what: 'Folded values' + }, + { + source: [ + 'Foo: bar', + 'Foo: baz' + ].join('\r\n') + DCRLF, + expected: { foo: ['bar', 'baz'] }, + what: 'Folded values' + }, + { + source: ['Content-Type:', + 'Foo: ' + ].join('\r\n') + DCRLF, + expected: { 'content-type': [''], foo: [''] }, + what: 'Empty values' + }, + { + source: MAXED_BUFFER.toString('ascii') + DCRLF, + expected: {}, + what: 'Max header size (single chunk)' + }, + { + source: ['ABCDEFGHIJ', MAXED_BUFFER.toString('ascii'), DCRLF], + expected: {}, + what: 'Max header size (multiple chunks #1)' + }, + { + source: [MAXED_BUFFER.toString('ascii'), MAXED_BUFFER.toString('ascii'), DCRLF], + expected: {}, + what: 'Max header size (multiple chunk #2)' + } + ] + + t.plan(tests.length) + + tests.forEach(function (v) { + t.test(v.what, t => { + t.plan(4) + + const cfg = { + ...v.cfg + } + + const parser = Object.keys(cfg).length ? new HeaderParser(cfg) : new HeaderParser() + let fired = false + + parser.on('header', function (header) { + t.ok(!fired, `${v.what}: Header event fired more than once`) + fired = true + t.strictSame(header, + v.expected, + `${v.what}: Parsed result mismatch`) + }) + if (!Array.isArray(v.source)) { v.source = [v.source] } + v.source.forEach(function (s) { + parser.push(s) + }) + t.ok(fired, `${v.what}: Did not receive header from parser`) + t.pass() + }) + }) +}) diff --git a/fastify-busboy/test/dicer-malformed-header.test.js b/fastify-busboy/test/dicer-malformed-header.test.js new file mode 100644 index 0000000..c25ccdd --- /dev/null +++ b/fastify-busboy/test/dicer-malformed-header.test.js @@ -0,0 +1,29 @@ +'use strict' + +const { test } = require('tap') +const Dicer = require('../deps/dicer/lib/Dicer') + +test('dicer-malformed-header', t => { + t.plan(1) + + t.test('should gracefully handle headers with leading whitespace', t => { + t.plan(3) + const d = new Dicer({ boundary: '----WebKitFormBoundaryoo6vortfDzBsDiro' }) + + d.on('part', function (p) { + p.on('header', function (header) { + t.hasProp(header, ' content-disposition') + t.strictSame(header[' content-disposition'], ['form-data; name="bildbeschreibung"']) + }) + p.on('data', function (data) { + }) + p.on('end', function () { + }) + }) + d.on('finish', function () { + t.pass() + }) + + d.write(Buffer.from('------WebKitFormBoundaryoo6vortfDzBsDiro\r\n Content-Disposition: form-data; name="bildbeschreibung"\r\n\r\n\r\n------WebKitFormBoundaryoo6vortfDzBsDiro--')) + }) +}) diff --git a/fastify-busboy/test/dicer-multipart-extra-trailer.test.js b/fastify-busboy/test/dicer-multipart-extra-trailer.test.js new file mode 100644 index 0000000..335605a --- /dev/null +++ b/fastify-busboy/test/dicer-multipart-extra-trailer.test.js @@ -0,0 +1,82 @@ +'use strict' + +const { test } = require('tap') +const Dicer = require('../deps/dicer/lib/Dicer') +const fs = require('fs') +const path = require('path') + +const FIXTURES_ROOT = path.join(__dirname, 'fixtures/') + +test('dicer-multipart-extra-trailer', t => { + t.plan(1) + + t.test('Extra trailer data pushed after finished', t => { + t.plan(5) + const fixtureBase = FIXTURES_ROOT + 'many' + let n = 0 + const buffer = Buffer.allocUnsafe(16) + const state = { parts: [] } + + const fd = fs.openSync(fixtureBase + '/original', 'r') + + const dicer = new Dicer({ boundary: '----WebKitFormBoundaryWLHCs9qmcJJoyjKR' }) + let error + let finishes = 0 + let trailerEmitted = false + + dicer.on('part', function (p) { + const part = { + body: undefined, + bodylen: 0, + error: undefined, + header: undefined + } + + p.on('header', function (h) { + part.header = h + }).on('data', function (data) { + // make a copy because we are using readSync which re-uses a buffer ... + const copy = Buffer.allocUnsafe(data.length) + data.copy(copy) + data = copy + if (!part.body) { part.body = [data] } else { part.body.push(data) } + part.bodylen += data.length + }).on('error', function (err) { + part.error = err + t.fail() + }).on('end', function () { + if (part.body) { part.body = Buffer.concat(part.body, part.bodylen) } + state.parts.push(part) + }) + }).on('error', function (err) { + error = err + }).on('trailer', function (data) { + trailerEmitted = true + t.equal(data.toString(), 'Extra', 'trailer should contain the extra data') + }).on('finish', function () { + t.ok(finishes++ === 0, makeMsg('Extra trailer data pushed after finished', 'finish emitted multiple times')) + t.ok(trailerEmitted, makeMsg('Extra trailer data pushed after finished', 'should have emitted trailer')) + + t.ok(error === undefined, makeMsg('Extra trailer data pushed after finished', 'Unexpected error')) + + t.pass() + }) + + while (true) { + n = fs.readSync(fd, buffer, 0, buffer.length, null) + if (n === 0) { + setTimeout(function () { + dicer.write('\r\n\r\n\r\n') + dicer.end() + }, 50) + break + } + dicer.write(n === buffer.length ? buffer : buffer.slice(0, n)) + } + fs.closeSync(fd) + }) +}) + +function makeMsg (what, msg) { + return what + ': ' + msg +} diff --git a/fastify-busboy/test/dicer-multipart-nolisteners.test.js b/fastify-busboy/test/dicer-multipart-nolisteners.test.js new file mode 100644 index 0000000..1e311ba --- /dev/null +++ b/fastify-busboy/test/dicer-multipart-nolisteners.test.js @@ -0,0 +1,44 @@ +'use strict' + +const Dicer = require('../deps/dicer/lib/Dicer') +const { test } = require('tap') +const fs = require('fs') +const path = require('path') + +const FIXTURES_ROOT = path.join(__dirname, 'fixtures/') + +test('dicer-multipart-nolisteners', t => { + t.plan(1) + + t.test('No preamble or part listeners', t => { + t.plan(3) + const fixtureBase = path.resolve(FIXTURES_ROOT, 'many') + let n = 0 + const buffer = Buffer.allocUnsafe(16) + + const fd = fs.openSync(fixtureBase + '/original', 'r') + + const dicer = new Dicer({ boundary: '----WebKitFormBoundaryWLHCs9qmcJJoyjKR' }) + let error + let finishes = 0 + + dicer.on('error', function (err) { + error = err + }).on('finish', function () { + t.ok(finishes++ === 0, 'finish emitted multiple times') + + t.ok(error === undefined, `Unexpected error: ${error}`) + t.pass() + }) + + while (true) { + n = fs.readSync(fd, buffer, 0, buffer.length, null) + if (n === 0) { + dicer.end() + break + } + dicer.write(n === buffer.length ? buffer : buffer.slice(0, n)) + } + fs.closeSync(fd) + }) +}) diff --git a/fastify-busboy/test/dicer-multipart.test.js b/fastify-busboy/test/dicer-multipart.test.js new file mode 100644 index 0000000..c35c4d0 --- /dev/null +++ b/fastify-busboy/test/dicer-multipart.test.js @@ -0,0 +1,223 @@ +'use strict' + +const Dicer = require('../deps/dicer/lib/Dicer') +const assert = require('node:assert') +const fs = require('node:fs') +const path = require('node:path') +const inspect = require('node:util').inspect +const { test } = require('tap') + +const FIXTURES_ROOT = path.join(__dirname, 'fixtures/') + +test('dicer-multipart', t => { + const tests = + [ + { + source: 'nested', + opts: { boundary: 'AaB03x' }, + chsize: 32, + nparts: 2, + what: 'One nested multipart' + }, + { + source: 'many', + opts: { boundary: '----WebKitFormBoundaryWLHCs9qmcJJoyjKR' }, + chsize: 16, + nparts: 7, + what: 'Many parts' + }, + { + source: 'many-wrongboundary', + opts: { boundary: 'LOLOLOL' }, + chsize: 8, + nparts: 0, + dicerError: true, + what: 'Many parts, wrong boundary' + }, + { + source: 'many-noend', + opts: { boundary: '----WebKitFormBoundaryWLHCs9qmcJJoyjKR' }, + chsize: 16, + nparts: 7, + npartErrors: 1, + dicerError: true, + what: 'Many parts, end boundary missing, 1 file open' + }, + { + source: 'nested-full', + opts: { boundary: 'AaB03x', headerFirst: true }, + chsize: 32, + nparts: 2, + what: 'One nested multipart with preceding header' + }, + { + source: 'nested-full', + opts: { headerFirst: true }, + chsize: 32, + nparts: 2, + setBoundary: 'AaB03x', + what: 'One nested multipart with preceding header, using setBoundary' + } + ] + + t.plan(tests.length) + + tests.forEach(function (v) { + t.test(v.what, t => { + t.plan(1) + const fixtureBase = FIXTURES_ROOT + v.source + const state = { parts: [], preamble: undefined } + + const dicer = new Dicer(v.opts) + let error + let partErrors = 0 + let finishes = 0 + + dicer.on('preamble', function (p) { + const preamble = { + body: undefined, + bodylen: 0, + error: undefined, + header: undefined + } + + p.on('header', function (h) { + preamble.header = h + if (v.setBoundary) { dicer.setBoundary(v.setBoundary) } + }).on('data', function (data) { + // make a copy because we are using readSync which re-uses a buffer ... + const copy = Buffer.allocUnsafe(data.length) + data.copy(copy) + data = copy + if (!preamble.body) { preamble.body = [data] } else { preamble.body.push(data) } + preamble.bodylen += data.length + }).on('error', function (err) { + preamble.error = err + }).on('end', function () { + if (preamble.body) { preamble.body = Buffer.concat(preamble.body, preamble.bodylen) } + if (preamble.body || preamble.header) { state.preamble = preamble } + }) + }) + dicer.on('part', function (p) { + const part = { + body: undefined, + bodylen: 0, + error: undefined, + header: undefined + } + + p.on('header', function (h) { + part.header = h + }).on('data', function (data) { + if (!part.body) { part.body = [data] } else { part.body.push(data) } + part.bodylen += data.length + }).on('error', function (err) { + part.error = err + ++partErrors + }).on('end', function () { + if (part.body) { part.body = Buffer.concat(part.body, part.bodylen) } + state.parts.push(part) + }) + }).on('error', function (err) { + error = err + }).on('finish', function () { + assert(finishes++ === 0, makeMsg(v.what, 'finish emitted multiple times')) + + if (v.dicerError) { assert(error !== undefined, makeMsg(v.what, 'Expected error')) } else { assert(error === undefined, makeMsg(v.what, 'Unexpected error: ' + error)) } + + let preamble + if (fs.existsSync(fixtureBase + '/preamble')) { + const prebody = fs.readFileSync(fixtureBase + '/preamble') + if (prebody.length) { + preamble = { + body: prebody, + bodylen: prebody.length, + error: undefined, + header: undefined + } + } + } + if (fs.existsSync(fixtureBase + '/preamble.header')) { + const prehead = JSON.parse(fs.readFileSync(fixtureBase + + '/preamble.header', 'binary')) + if (!preamble) { + preamble = { + body: undefined, + bodylen: 0, + error: undefined, + header: prehead + } + } else { preamble.header = prehead } + } + if (fs.existsSync(fixtureBase + '/preamble.error')) { + const err = new Error(fs.readFileSync(fixtureBase + + '/preamble.error', 'binary')) + if (!preamble) { + preamble = { + body: undefined, + bodylen: 0, + error: err, + header: undefined + } + } else { preamble.error = err } + } + + assert.deepEqual(state.preamble, + preamble, + makeMsg(v.what, + 'Preamble mismatch:\nActual:' + + inspect(state.preamble) + + '\nExpected: ' + + inspect(preamble))) + + assert.equal(state.parts.length, + v.nparts, + makeMsg(v.what, + 'Part count mismatch:\nActual: ' + + state.parts.length + + '\nExpected: ' + + v.nparts)) + + if (!v.npartErrors) { v.npartErrors = 0 } + assert.equal(partErrors, + v.npartErrors, + makeMsg(v.what, + 'Part errors mismatch:\nActual: ' + + partErrors + + '\nExpected: ' + + v.npartErrors)) + + for (let i = 0, header, body; i < v.nparts; ++i) { + if (fs.existsSync(fixtureBase + '/part' + (i + 1))) { + body = fs.readFileSync(fixtureBase + '/part' + (i + 1)) + if (body.length === 0) { body = undefined } + } else { body = undefined } + assert.deepEqual(state.parts[i].body, + body, + makeMsg(v.what, + 'Part #' + (i + 1) + ' body mismatch')) + if (fs.existsSync(fixtureBase + '/part' + (i + 1) + '.header')) { + header = fs.readFileSync(fixtureBase + + '/part' + (i + 1) + '.header', 'binary') + header = JSON.parse(header) + } else { header = undefined } + assert.deepEqual(state.parts[i].header, + header, + makeMsg(v.what, + 'Part #' + (i + 1) + + ' parsed header mismatch:\nActual: ' + + inspect(state.parts[i].header) + + '\nExpected: ' + + inspect(header))) + } + t.pass() + }) + + fs.createReadStream(fixtureBase + '/original').pipe(dicer) + }) + }) +}) + +function makeMsg (what, msg) { + return what + ': ' + msg +} diff --git a/fastify-busboy/test/fixtures/many-noend/original b/fastify-busboy/test/fixtures/many-noend/original new file mode 100644 index 0000000..ad9f0cc --- /dev/null +++ b/fastify-busboy/test/fixtures/many-noend/original @@ -0,0 +1,31 @@ +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="_method" + +put +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[blog]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[public_email]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[interests]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[bio]" + +hello + +"quote" +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="commit" + +Save +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="media"; filename="" +Content-Type: application/octet-stream + + diff --git a/fastify-busboy/test/fixtures/many-noend/part1 b/fastify-busboy/test/fixtures/many-noend/part1 new file mode 100644 index 0000000..a232311 --- /dev/null +++ b/fastify-busboy/test/fixtures/many-noend/part1 @@ -0,0 +1 @@ +put \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-noend/part1.header b/fastify-busboy/test/fixtures/many-noend/part1.header new file mode 100644 index 0000000..5e6bbe5 --- /dev/null +++ b/fastify-busboy/test/fixtures/many-noend/part1.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"_method\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-noend/part2 b/fastify-busboy/test/fixtures/many-noend/part2 new file mode 100644 index 0000000..e69de29 diff --git a/fastify-busboy/test/fixtures/many-noend/part2.header b/fastify-busboy/test/fixtures/many-noend/part2.header new file mode 100644 index 0000000..5b53966 --- /dev/null +++ b/fastify-busboy/test/fixtures/many-noend/part2.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"profile[blog]\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-noend/part3 b/fastify-busboy/test/fixtures/many-noend/part3 new file mode 100644 index 0000000..e69de29 diff --git a/fastify-busboy/test/fixtures/many-noend/part3.header b/fastify-busboy/test/fixtures/many-noend/part3.header new file mode 100644 index 0000000..579e16e --- /dev/null +++ b/fastify-busboy/test/fixtures/many-noend/part3.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"profile[public_email]\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-noend/part4 b/fastify-busboy/test/fixtures/many-noend/part4 new file mode 100644 index 0000000..e69de29 diff --git a/fastify-busboy/test/fixtures/many-noend/part4.header b/fastify-busboy/test/fixtures/many-noend/part4.header new file mode 100644 index 0000000..b41be09 --- /dev/null +++ b/fastify-busboy/test/fixtures/many-noend/part4.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"profile[interests]\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-noend/part5 b/fastify-busboy/test/fixtures/many-noend/part5 new file mode 100644 index 0000000..f2bb979 --- /dev/null +++ b/fastify-busboy/test/fixtures/many-noend/part5 @@ -0,0 +1,3 @@ +hello + +"quote" \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-noend/part5.header b/fastify-busboy/test/fixtures/many-noend/part5.header new file mode 100644 index 0000000..92e417f --- /dev/null +++ b/fastify-busboy/test/fixtures/many-noend/part5.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"profile[bio]\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-noend/part6 b/fastify-busboy/test/fixtures/many-noend/part6 new file mode 100644 index 0000000..f0f5479 --- /dev/null +++ b/fastify-busboy/test/fixtures/many-noend/part6 @@ -0,0 +1 @@ +Save \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-noend/part6.header b/fastify-busboy/test/fixtures/many-noend/part6.header new file mode 100644 index 0000000..65a68a9 --- /dev/null +++ b/fastify-busboy/test/fixtures/many-noend/part6.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"commit\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-noend/part7.header b/fastify-busboy/test/fixtures/many-noend/part7.header new file mode 100644 index 0000000..25171e8 --- /dev/null +++ b/fastify-busboy/test/fixtures/many-noend/part7.header @@ -0,0 +1,2 @@ +{"content-disposition": ["form-data; name=\"media\"; filename=\"\""], + "content-type": ["application/octet-stream"]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-wrongboundary/original b/fastify-busboy/test/fixtures/many-wrongboundary/original new file mode 100644 index 0000000..859770c --- /dev/null +++ b/fastify-busboy/test/fixtures/many-wrongboundary/original @@ -0,0 +1,32 @@ +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="_method" + +put +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[blog]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[public_email]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[interests]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[bio]" + +hello + +"quote" +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="media"; filename="" +Content-Type: application/octet-stream + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="commit" + +Save +------WebKitFormBoundaryWLHCs9qmcJJoyjKR-- \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-wrongboundary/preamble b/fastify-busboy/test/fixtures/many-wrongboundary/preamble new file mode 100644 index 0000000..6e4bcc6 --- /dev/null +++ b/fastify-busboy/test/fixtures/many-wrongboundary/preamble @@ -0,0 +1,33 @@ + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="_method" + +put +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[blog]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[public_email]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[interests]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[bio]" + +hello + +"quote" +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="media"; filename="" +Content-Type: application/octet-stream + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="commit" + +Save +------WebKitFormBoundaryWLHCs9qmcJJoyjKR-- \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many-wrongboundary/preamble.error b/fastify-busboy/test/fixtures/many-wrongboundary/preamble.error new file mode 100644 index 0000000..15f4c89 --- /dev/null +++ b/fastify-busboy/test/fixtures/many-wrongboundary/preamble.error @@ -0,0 +1 @@ +Preamble terminated early due to unexpected end of multipart data \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many/original b/fastify-busboy/test/fixtures/many/original new file mode 100644 index 0000000..779c5cb --- /dev/null +++ b/fastify-busboy/test/fixtures/many/original @@ -0,0 +1,32 @@ +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="_method" + +put +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[blog]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[public_email]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[interests]" + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="profile[bio]" + +hello + +"quote" +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="media"; filename="" +Content-Type: application/octet-stream + + +------WebKitFormBoundaryWLHCs9qmcJJoyjKR +Content-Disposition: form-data; name="commit" + +Save +------WebKitFormBoundaryWLHCs9qmcJJoyjKR--Extra \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many/part1 b/fastify-busboy/test/fixtures/many/part1 new file mode 100644 index 0000000..a232311 --- /dev/null +++ b/fastify-busboy/test/fixtures/many/part1 @@ -0,0 +1 @@ +put \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many/part1.header b/fastify-busboy/test/fixtures/many/part1.header new file mode 100644 index 0000000..5e6bbe5 --- /dev/null +++ b/fastify-busboy/test/fixtures/many/part1.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"_method\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many/part2 b/fastify-busboy/test/fixtures/many/part2 new file mode 100644 index 0000000..e69de29 diff --git a/fastify-busboy/test/fixtures/many/part2.header b/fastify-busboy/test/fixtures/many/part2.header new file mode 100644 index 0000000..5b53966 --- /dev/null +++ b/fastify-busboy/test/fixtures/many/part2.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"profile[blog]\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many/part3 b/fastify-busboy/test/fixtures/many/part3 new file mode 100644 index 0000000..e69de29 diff --git a/fastify-busboy/test/fixtures/many/part3.header b/fastify-busboy/test/fixtures/many/part3.header new file mode 100644 index 0000000..579e16e --- /dev/null +++ b/fastify-busboy/test/fixtures/many/part3.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"profile[public_email]\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many/part4 b/fastify-busboy/test/fixtures/many/part4 new file mode 100644 index 0000000..e69de29 diff --git a/fastify-busboy/test/fixtures/many/part4.header b/fastify-busboy/test/fixtures/many/part4.header new file mode 100644 index 0000000..b41be09 --- /dev/null +++ b/fastify-busboy/test/fixtures/many/part4.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"profile[interests]\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many/part5 b/fastify-busboy/test/fixtures/many/part5 new file mode 100644 index 0000000..f2bb979 --- /dev/null +++ b/fastify-busboy/test/fixtures/many/part5 @@ -0,0 +1,3 @@ +hello + +"quote" \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many/part5.header b/fastify-busboy/test/fixtures/many/part5.header new file mode 100644 index 0000000..92e417f --- /dev/null +++ b/fastify-busboy/test/fixtures/many/part5.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"profile[bio]\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many/part6 b/fastify-busboy/test/fixtures/many/part6 new file mode 100644 index 0000000..e69de29 diff --git a/fastify-busboy/test/fixtures/many/part6.header b/fastify-busboy/test/fixtures/many/part6.header new file mode 100644 index 0000000..25171e8 --- /dev/null +++ b/fastify-busboy/test/fixtures/many/part6.header @@ -0,0 +1,2 @@ +{"content-disposition": ["form-data; name=\"media\"; filename=\"\""], + "content-type": ["application/octet-stream"]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many/part7 b/fastify-busboy/test/fixtures/many/part7 new file mode 100644 index 0000000..f0f5479 --- /dev/null +++ b/fastify-busboy/test/fixtures/many/part7 @@ -0,0 +1 @@ +Save \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/many/part7.header b/fastify-busboy/test/fixtures/many/part7.header new file mode 100644 index 0000000..65a68a9 --- /dev/null +++ b/fastify-busboy/test/fixtures/many/part7.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"commit\""]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/nested-full/original b/fastify-busboy/test/fixtures/nested-full/original new file mode 100644 index 0000000..3044550 --- /dev/null +++ b/fastify-busboy/test/fixtures/nested-full/original @@ -0,0 +1,24 @@ +User-Agent: foo bar baz +Content-Type: multipart/form-data; boundary=AaB03x + +--AaB03x +Content-Disposition: form-data; name="foo" + +bar +--AaB03x +Content-Disposition: form-data; name="files" +Content-Type: multipart/mixed, boundary=BbC04y + +--BbC04y +Content-Disposition: attachment; filename="file.txt" +Content-Type: text/plain + +contents +--BbC04y +Content-Disposition: attachment; filename="flowers.jpg" +Content-Type: image/jpeg +Content-Transfer-Encoding: binary + +contents +--BbC04y-- +--AaB03x-- \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/nested-full/part1 b/fastify-busboy/test/fixtures/nested-full/part1 new file mode 100644 index 0000000..ba0e162 --- /dev/null +++ b/fastify-busboy/test/fixtures/nested-full/part1 @@ -0,0 +1 @@ +bar \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/nested-full/part1.header b/fastify-busboy/test/fixtures/nested-full/part1.header new file mode 100644 index 0000000..03bd093 --- /dev/null +++ b/fastify-busboy/test/fixtures/nested-full/part1.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"foo\""]} diff --git a/fastify-busboy/test/fixtures/nested-full/part2 b/fastify-busboy/test/fixtures/nested-full/part2 new file mode 100644 index 0000000..2d4deb5 --- /dev/null +++ b/fastify-busboy/test/fixtures/nested-full/part2 @@ -0,0 +1,12 @@ +--BbC04y +Content-Disposition: attachment; filename="file.txt" +Content-Type: text/plain + +contents +--BbC04y +Content-Disposition: attachment; filename="flowers.jpg" +Content-Type: image/jpeg +Content-Transfer-Encoding: binary + +contents +--BbC04y-- \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/nested-full/part2.header b/fastify-busboy/test/fixtures/nested-full/part2.header new file mode 100644 index 0000000..bbe4513 --- /dev/null +++ b/fastify-busboy/test/fixtures/nested-full/part2.header @@ -0,0 +1,2 @@ +{"content-disposition": ["form-data; name=\"files\""], + "content-type": ["multipart/mixed, boundary=BbC04y"]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/nested-full/preamble.header b/fastify-busboy/test/fixtures/nested-full/preamble.header new file mode 100644 index 0000000..2815341 --- /dev/null +++ b/fastify-busboy/test/fixtures/nested-full/preamble.header @@ -0,0 +1,2 @@ +{"user-agent": ["foo bar baz"], + "content-type": ["multipart/form-data; boundary=AaB03x"]} \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/nested/original b/fastify-busboy/test/fixtures/nested/original new file mode 100644 index 0000000..380f451 --- /dev/null +++ b/fastify-busboy/test/fixtures/nested/original @@ -0,0 +1,21 @@ +--AaB03x +Content-Disposition: form-data; name="foo" + +bar +--AaB03x +Content-Disposition: form-data; name="files" +Content-Type: multipart/mixed, boundary=BbC04y + +--BbC04y +Content-Disposition: attachment; filename="file.txt" +Content-Type: text/plain + +contents +--BbC04y +Content-Disposition: attachment; filename="flowers.jpg" +Content-Type: image/jpeg +Content-Transfer-Encoding: binary + +contents +--BbC04y-- +--AaB03x-- \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/nested/part1 b/fastify-busboy/test/fixtures/nested/part1 new file mode 100644 index 0000000..ba0e162 --- /dev/null +++ b/fastify-busboy/test/fixtures/nested/part1 @@ -0,0 +1 @@ +bar \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/nested/part1.header b/fastify-busboy/test/fixtures/nested/part1.header new file mode 100644 index 0000000..03bd093 --- /dev/null +++ b/fastify-busboy/test/fixtures/nested/part1.header @@ -0,0 +1 @@ +{"content-disposition": ["form-data; name=\"foo\""]} diff --git a/fastify-busboy/test/fixtures/nested/part2 b/fastify-busboy/test/fixtures/nested/part2 new file mode 100644 index 0000000..2d4deb5 --- /dev/null +++ b/fastify-busboy/test/fixtures/nested/part2 @@ -0,0 +1,12 @@ +--BbC04y +Content-Disposition: attachment; filename="file.txt" +Content-Type: text/plain + +contents +--BbC04y +Content-Disposition: attachment; filename="flowers.jpg" +Content-Type: image/jpeg +Content-Transfer-Encoding: binary + +contents +--BbC04y-- \ No newline at end of file diff --git a/fastify-busboy/test/fixtures/nested/part2.header b/fastify-busboy/test/fixtures/nested/part2.header new file mode 100644 index 0000000..bbe4513 --- /dev/null +++ b/fastify-busboy/test/fixtures/nested/part2.header @@ -0,0 +1,2 @@ +{"content-disposition": ["form-data; name=\"files\""], + "content-type": ["multipart/mixed, boundary=BbC04y"]} \ No newline at end of file diff --git a/fastify-busboy/test/get-limit.test.js b/fastify-busboy/test/get-limit.test.js new file mode 100644 index 0000000..76a2997 --- /dev/null +++ b/fastify-busboy/test/get-limit.test.js @@ -0,0 +1,34 @@ +'use strict' + +const getLimit = require('../lib/utils/getLimit') +const { test } = require('tap') + +test('Get limit', t => { + t.plan(2) + + t.test('Correctly resolves limits', t => { + t.plan(8) + t.strictSame(getLimit(undefined, 'fieldSize', 1), 1) + t.strictSame(getLimit(undefined, 'fileSize', Infinity), Infinity) + + t.strictSame(getLimit({}, 'fieldSize', 1), 1) + t.strictSame(getLimit({}, 'fileSize', Infinity), Infinity) + t.strictSame(getLimit({ fieldSize: null }, 'fieldSize', 1), 1) + t.strictSame(getLimit({ fileSize: null }, 'fileSize', Infinity), Infinity) + + t.strictSame(getLimit({ fieldSize: 0 }, 'fieldSize', 1), 0) + t.strictSame(getLimit({ fileSize: 2 }, 'fileSize', 1), 2) + }) + + t.test('Throws an error on incorrect limits', t => { + t.plan(2) + + t.throws(function () { + getLimit({ fieldSize: '1' }, 'fieldSize', 1) + }, new Error('Limit fieldSize is not a valid number')) + + t.throws(function () { + getLimit({ fieldSize: NaN }, 'fieldSize', 1) + }, new Error('Limit fieldSize is not a valid number')) + }) +}) diff --git a/fastify-busboy/test/multipart-stream-pause.test.js b/fastify-busboy/test/multipart-stream-pause.test.js new file mode 100644 index 0000000..856cf71 --- /dev/null +++ b/fastify-busboy/test/multipart-stream-pause.test.js @@ -0,0 +1,82 @@ +'use strict' + +const { inspect } = require('util') +const { test } = require('tap') + +const Busboy = require('..') + +const BOUNDARY = 'u2KxIV5yF1y+xUspOQCCZopaVgeV6Jxihv35XQJmuTx8X3sh' + +function formDataSection (key, value) { + return Buffer.from('\r\n--' + BOUNDARY + + '\r\nContent-Disposition: form-data; name="' + + key + '"\r\n\r\n' + value) +} +function formDataFile (key, filename, contentType) { + return Buffer.concat([ + Buffer.from('\r\n--' + BOUNDARY + '\r\n'), + Buffer.from('Content-Disposition: form-data; name="' + + key + '"; filename="' + filename + '"\r\n'), + Buffer.from('Content-Type: ' + contentType + '\r\n\r\n'), + Buffer.allocUnsafe(100000) + ]) +} + +test('multipart-stream-pause - processes stream correctly', t => { + t.plan(6) + const reqChunks = [ + Buffer.concat([ + formDataFile('file', 'file.bin', 'application/octet-stream'), + formDataSection('foo', 'foo value') + ]), + formDataSection('bar', 'bar value'), + Buffer.from('\r\n--' + BOUNDARY + '--\r\n') + ] + const busboy = new Busboy({ + headers: { + 'content-type': 'multipart/form-data; boundary=' + BOUNDARY + } + }) + let finishes = 0 + const results = [] + const expected = [ + ['file', 'file', 'file.bin', '7bit', 'application/octet-stream'], + ['field', 'foo', 'foo value', false, false, '7bit', 'text/plain'], + ['field', 'bar', 'bar value', false, false, '7bit', 'text/plain'] + ] + + busboy.on('field', function (key, val, keyTrunc, valTrunc, encoding, contype) { + results.push(['field', key, val, keyTrunc, valTrunc, encoding, contype]) + }) + busboy.on('file', function (fieldname, stream, filename, encoding, mimeType) { + results.push(['file', fieldname, filename, encoding, mimeType]) + // Simulate a pipe where the destination is pausing (perhaps due to waiting + // for file system write to finish) + setTimeout(function () { + stream.resume() + }, 10) + }) + busboy.on('finish', function () { + t.ok(finishes++ === 0, 'finish emitted multiple times') + t.strictSame(results.length, + expected.length, + 'Parsed result count mismatch. Saw ' + + results.length + + '. Expected: ' + expected.length) + + results.forEach(function (result, i) { + t.strictSame(result, + expected[i], + 'Result mismatch:\nParsed: ' + inspect(result) + + '\nExpected: ' + inspect(expected[i])) + }) + t.pass() + }).on('error', function (err) { + t.error(err) + }) + + reqChunks.forEach(function (buf) { + busboy.write(buf) + }) + busboy.end() +}) diff --git a/fastify-busboy/test/parse-params.test.js b/fastify-busboy/test/parse-params.test.js new file mode 100644 index 0000000..eea4768 --- /dev/null +++ b/fastify-busboy/test/parse-params.test.js @@ -0,0 +1,124 @@ +'use strict' + +const { inspect } = require('node:util') +const { test } = require('tap') +const parseParams = require('../lib/utils/parseParams') + +test('parse-params', t => { + const tests = [ + { + source: 'video/ogg', + expected: ['video/ogg'], + what: 'No parameters' + }, + { + source: 'video/ogg;', + expected: ['video/ogg'], + what: 'No parameters (with separator)' + }, + { + source: 'video/ogg; ', + expected: ['video/ogg'], + what: 'No parameters (with separator followed by whitespace)' + }, + { + source: ';video/ogg', + expected: ['', 'video/ogg'], + what: 'Empty parameter' + }, + { + source: 'video/*', + expected: ['video/*'], + what: 'Subtype with asterisk' + }, + { + source: 'text/plain; encoding=utf8', + expected: ['text/plain', ['encoding', 'utf8']], + what: 'Unquoted' + }, + { + source: 'text/plain; encoding=', + expected: ['text/plain', ['encoding', '']], + what: 'Unquoted empty string' + }, + { + source: 'text/plain; encoding="utf8"', + expected: ['text/plain', ['encoding', 'utf8']], + what: 'Quoted' + }, + { + source: 'text/plain; greeting="hello \\"world\\""', + expected: ['text/plain', ['greeting', 'hello "world"']], + what: 'Quotes within quoted' + }, + { + source: 'text/plain; encoding=""', + expected: ['text/plain', ['encoding', '']], + what: 'Quoted empty string' + }, + { + source: 'text/plain; encoding="utf8";\t foo=bar;test', + expected: ['text/plain', ['encoding', 'utf8'], ['foo', 'bar'], 'test'], + what: 'Multiple params with various spacing' + }, + { + source: "text/plain; filename*=iso-8859-1'en'%A3%20rates", + expected: ['text/plain', ['filename', '£ rates']], + what: 'Extended parameter (RFC 5987) with language' + }, + { + source: "text/plain; filename*=utf-8''%c2%a3%20and%20%e2%82%ac%20rates", + expected: ['text/plain', ['filename', '£ and € rates']], + what: 'Extended parameter (RFC 5987) without language' + }, + { + source: "text/plain; filename*=utf-8''%E6%B5%8B%E8%AF%95%E6%96%87%E6%A1%A3", + expected: ['text/plain', ['filename', '测试文档']], + what: 'Extended parameter (RFC 5987) without language #2' + }, + { + source: "text/plain; filename*=iso-8859-1'en'%A3%20rates; altfilename*=utf-8''%c2%a3%20and%20%e2%82%ac%20rates", + expected: ['text/plain', ['filename', '£ rates'], ['altfilename', '£ and € rates']], + what: 'Multiple extended parameters (RFC 5987) with mixed charsets' + }, + { + source: "text/plain; filename*=iso-8859-1'en'%A3%20rates; altfilename=\"foobarbaz\"", + expected: ['text/plain', ['filename', '£ rates'], ['altfilename', 'foobarbaz']], + what: 'Mixed regular and extended parameters (RFC 5987)' + }, + { + source: "text/plain; filename=\"foobarbaz\"; altfilename*=iso-8859-1'en'%A3%20rates", + expected: ['text/plain', ['filename', 'foobarbaz'], ['altfilename', '£ rates']], + what: 'Mixed regular and extended parameters (RFC 5987) #2' + }, + { + source: 'text/plain; filename="C:\\folder\\test.png"', + expected: ['text/plain', ['filename', 'C:\\folder\\test.png']], + what: 'Unescaped backslashes should be considered backslashes' + }, + { + source: 'text/plain; filename="John \\"Magic\\" Smith.png"', + expected: ['text/plain', ['filename', 'John "Magic" Smith.png']], + what: 'Escaped double-quotes should be considered double-quotes' + }, + { + source: 'multipart/form-data; charset=utf-8; boundary=0xKhTmLbOuNdArY', + expected: ['multipart/form-data', ['charset', 'utf-8'], ['boundary', '0xKhTmLbOuNdArY']], + what: 'Multiple non-quoted parameters' + } + ] + + t.plan(tests.length) + + tests.forEach((v) => { + t.test(v.what, t => { + t.plan(1) + + const result = parseParams(v.source) + t.strictSame( + result, + v.expected, + `parsed parameters match.\nSaw: ${inspect(result)}\nExpected: ${inspect(v.expected)}`) + }) + }) +}) diff --git a/fastify-busboy/test/streamsearch.test.js b/fastify-busboy/test/streamsearch.test.js new file mode 100644 index 0000000..968c7de --- /dev/null +++ b/fastify-busboy/test/streamsearch.test.js @@ -0,0 +1,396 @@ +'use strict' + +const { test } = require('tap') +const Streamsearch = require('../deps/streamsearch/sbmh') + +test('streamsearch', t => { + t.plan(17) + + t.test('should throw an error if the needle is not a String or Buffer', t => { + t.plan(1) + + t.throws(() => new Streamsearch(2), new Error('The needle has to be a String or a Buffer.')) + }) + t.test('should throw an error if the needle is an empty String', t => { + t.plan(1) + + t.throws(() => new Streamsearch(''), new Error('The needle cannot be an empty String/Buffer.')) + }) + t.test('should throw an error if the needle is an empty Buffer', t => { + t.plan(1) + + t.throws(() => new Streamsearch(Buffer.from('')), new Error('The needle cannot be an empty String/Buffer.')) + }) + t.test('should throw an error if the needle is bigger than 256 characters', t => { + t.plan(1) + + t.throws(() => new Streamsearch(Buffer.from(Array(257).fill('a').join(''))), new Error('The needle cannot have a length bigger than 256.')) + }) + + t.test('should process a Buffer without a needle', t => { + t.plan(5) + const expected = [ + [false, Buffer.from('bar hello'), 0, 9] + ] + const needle = '\r\n' + const s = new Streamsearch(needle) + const chunks = [ + Buffer.from('bar hello') + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 1) { + t.pass() + } + }) + + s.push(chunks[0]) + }) + + t.test('should cast a string without a needle', t => { + t.plan(5) + + const expected = [ + [false, Buffer.from('bar hello'), 0, 9] + ] + const needle = '\r\n' + const s = new Streamsearch(needle) + const chunks = [ + 'bar hello' + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 1) { + t.pass() + } + }) + + s.push(chunks[0]) + }) + + t.test('should process a chunk with a needle at the beginning', t => { + t.plan(9) + + const expected = [ + [true, undefined, undefined, undefined], + [false, Buffer.from('\r\nbar hello'), 2, 11] + ] + const needle = '\r\n' + const s = new Streamsearch(needle) + const chunks = [ + Buffer.from('\r\nbar hello') + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 2) { + t.pass() + } + }) + + s.push(chunks[0]) + }) + + t.test('should process a chunk with a needle in the middle', t => { + t.plan(9) + const expected = [ + [true, Buffer.from('bar\r\n hello'), 0, 3], + [false, Buffer.from('bar\r\n hello'), 5, 11] + ] + const needle = '\r\n' + const s = new Streamsearch(needle) + const chunks = [ + Buffer.from('bar\r\n hello') + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 2) { + t.pass() + } + }) + + s.push(chunks[0]) + }) + + t.test('should process a chunk with a needle at the end', t => { + t.plan(5) + const expected = [ + [true, Buffer.from('bar hello\r\n'), 0, 9] + ] + const needle = '\r\n' + const s = new Streamsearch(needle) + const chunks = [ + Buffer.from('bar hello\r\n') + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 1) { + t.pass() + } + }) + + s.push(chunks[0]) + }) + + t.test('should process a chunk with multiple needle at the end', t => { + t.plan(9) + const expected = [ + [true, Buffer.from('bar hello\r\n\r\n'), 0, 9], + [true, Buffer.from('bar hello\r\n\r\n'), 11, 11] + ] + const needle = '\r\n' + const s = new Streamsearch(needle) + const chunks = [ + Buffer.from('bar hello\r\n\r\n') + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 2) { + t.pass() + } + }) + + s.push(chunks[0]) + }) + + t.test('should process two chunks without a needle', t => { + t.plan(9) + const expected = [ + [false, Buffer.from('bar'), 0, 3], + [false, Buffer.from('hello'), 0, 5] + ] + const needle = '\r\n' + const s = new Streamsearch(needle) + const chunks = [ + Buffer.from('bar'), + Buffer.from('hello') + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 2) { + t.pass() + } + }) + + s.push(chunks[0]) + s.push(chunks[1]) + }) + + t.test('should process two chunks with an overflowing needle', t => { + t.plan(13) + const expected = [ + [false, Buffer.from('bar\r'), 0, 3], + [true, undefined, undefined, undefined], + [false, Buffer.from('\nhello'), 1, 6] + ] + const needle = '\r\n' + const s = new Streamsearch(needle) + const chunks = [ + Buffer.from('bar\r'), + Buffer.from('\nhello') + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 3) { + t.pass() + } + }) + + s.push(chunks[0]) + s.push(chunks[1]) + }) + + t.test('should process two chunks with a potentially overflowing needle', t => { + t.plan(13) + + const expected = [ + [false, Buffer.from('bar\r'), 0, 3], + [false, Buffer.from('\r\0\0'), 0, 1], + [false, Buffer.from('\n\r\nhello'), 0, 8] + ] + const needle = '\r\n\n' + const s = new Streamsearch(needle) + const chunks = [ + Buffer.from('bar\r'), + Buffer.from('\n\r\nhello') + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 3) { + t.pass() + } + }) + + s.push(chunks[0]) + s.push(chunks[1]) + }) + + t.test('should process three chunks with a overflowing needle', t => { + t.plan(13) + + const expected = [ + [false, Buffer.from('bar\r'), 0, 3], + [true, undefined, undefined, undefined], + [false, Buffer.from('\nhello'), 1, 6] + ] + const needle = '\r\n\n' + const s = new Streamsearch(needle) + const chunks = [ + Buffer.from('bar\r'), + Buffer.from('\n'), + Buffer.from('\nhello') + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 3) { + t.pass() + } + }) + + s.push(chunks[0]) + s.push(chunks[1]) + s.push(chunks[2]) + }) + + t.test('should process four chunks with a overflowing needle', t => { + t.plan(13) + + const expected = [ + [false, Buffer.from('bar\r'), 0, 3], + [true, undefined, undefined, undefined], + [false, Buffer.from('hello'), 0, 5] + ] + const needle = '\r\n\n' + const s = new Streamsearch(needle) + const chunks = [ + Buffer.from('bar\r'), + Buffer.from('\n'), + Buffer.from('\n'), + Buffer.from('hello') + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 3) { + t.pass() + } + }) + + s.push(chunks[0]) + s.push(chunks[1]) + s.push(chunks[2]) + s.push(chunks[3]) + }) + + t.test('should process four chunks with a potentially overflowing needle', t => { + t.plan(17) + + const expected = [ + [false, Buffer.from('bar\r'), 0, 3], + [false, Buffer.from('\r\n\0'), 0, 2], + [false, Buffer.from('\r\n\0'), 0, 1], + [false, Buffer.from('hello'), 0, 5] + ] + const needle = '\r\n\n' + const s = new Streamsearch(needle) + const chunks = [ + Buffer.from('bar\r'), + Buffer.from('\n'), + Buffer.from('\r'), + Buffer.from('hello') + ] + let i = 0 + s.on('info', (isMatched, data, start, end) => { + t.strictSame(isMatched, expected[i][0]) + t.strictSame(data, expected[i][1]) + t.strictSame(start, expected[i][2]) + t.strictSame(end, expected[i][3]) + i++ + if (i >= 4) { + t.pass() + } + }) + + s.push(chunks[0]) + s.push(chunks[1]) + s.push(chunks[2]) + s.push(chunks[3]) + }) + + t.test('should reset the internal values if .reset() is called', t => { + t.plan(9) + + const s = new Streamsearch('test') + + t.strictSame(s._lookbehind_size, 0) + t.strictSame(s.matches, 0) + t.strictSame(s._bufpos, 0) + + s._lookbehind_size = 1 + s._bufpos = 1 + s.matches = 1 + + t.strictSame(s._lookbehind_size, 1) + t.strictSame(s.matches, 1) + t.strictSame(s._bufpos, 1) + + s.reset() + + t.strictSame(s._lookbehind_size, 0) + t.strictSame(s.matches, 0) + t.strictSame(s._bufpos, 0) + }) +}) diff --git a/fastify-busboy/test/types-multipart.test.js b/fastify-busboy/test/types-multipart.test.js new file mode 100644 index 0000000..dc7ae88 --- /dev/null +++ b/fastify-busboy/test/types-multipart.test.js @@ -0,0 +1,678 @@ +'use strict' + +const Busboy = require('..') + +const { test } = require('tap') +const { inspect } = require('util') + +const EMPTY_FN = function () { +} + +const tests = [ + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_1"', + '', + 'super beta file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_1"; filename="1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + ['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain'], + ['field', 'file_name_1', 'super beta file', false, false, '7bit', 'text/plain'], + ['file', 'upload_file_0', 1023, 0, '1k_a.dat', '7bit', 'application/octet-stream'], + ['file', 'upload_file_1', 1023, 0, '1k_b.dat', '7bit', 'application/octet-stream'] + ], + what: 'Fields and files', + plan: 11 + }, + { + source: [ + ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="cont"', + '', + 'some random content', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="pass"', + '', + 'some random pass', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="bit"', + '', + '2', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--' + ].join('\r\n') + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [ + ['field', 'cont', 'some random content', false, false, '7bit', 'text/plain'], + ['field', 'pass', 'some random pass', false, false, '7bit', 'text/plain'], + ['field', 'bit', '2', false, false, '7bit', 'text/plain'] + ], + what: 'Fields only', + plan: 6 + }, + { + source: [ + '' + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [], + shouldError: 'Unexpected end of multipart data', + what: 'No fields and no files', + plan: 3 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + fileSize: 13, + fieldSize: 5 + }, + expected: [ + ['field', 'file_name_0', 'super', false, true, '7bit', 'text/plain'], + ['file', 'upload_file_0', 13, 2, '1k_a.dat', '7bit', 'application/octet-stream'] + ], + what: 'Fields and files (limits)', + plan: 7 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + fields: 0 + }, + events: ['file'], + expected: [ + ['file', 'upload_file_0', 26, 0, '1k_a.dat', '7bit', 'application/octet-stream'] + ], + what: 'should not emit fieldsLimit if no field was sent', + plan: 6 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + fields: 0 + }, + events: ['file', 'fieldsLimit'], + expected: [ + ['file', 'upload_file_0', 26, 0, '1k_a.dat', '7bit', 'application/octet-stream'] + ], + what: 'should respect fields limit of 0', + plan: 6 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_1"', + '', + 'super beta file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + fields: 1 + }, + events: ['field', 'file', 'fieldsLimit'], + expected: [ + ['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain'], + ['file', 'upload_file_0', 26, 0, '1k_a.dat', '7bit', 'application/octet-stream'] + ], + what: 'should respect fields limit of 7', + plan: 7 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + files: 0 + }, + events: ['field'], + expected: [ + ['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain'] + ], + what: 'should not emit filesLimit if no file was sent', + plan: 4 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + files: 0 + }, + events: ['field', 'filesLimit'], + expected: [ + ['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain'] + ], + what: 'should respect fields limit of 0', + plan: 4 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_b"; filename="1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + files: 1 + }, + events: ['field', 'file', 'filesLimit'], + expected: [ + ['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain'], + ['file', 'upload_file_0', 26, 0, '1k_a.dat', '7bit', 'application/octet-stream'] + ], + what: 'should respect fields limit of 1', + plan: 7 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_1"', + '', + 'super beta file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_1"; filename="1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + ['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain'], + ['field', 'file_name_1', 'super beta file', false, false, '7bit', 'text/plain'] + ], + events: ['field'], + what: 'Fields and (ignored) files', + plan: 5 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="/tmp/1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_1"; filename="C:\\files\\1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_2"; filename="relative/1k_c.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + ['file', 'upload_file_0', 26, 0, '1k_a.dat', '7bit', 'application/octet-stream'], + ['file', 'upload_file_1', 26, 0, '1k_b.dat', '7bit', 'application/octet-stream'], + ['file', 'upload_file_2', 26, 0, '1k_c.dat', '7bit', 'application/octet-stream'] + ], + what: 'Files with filenames containing paths', + plan: 12 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="/absolute/1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_1"; filename="C:\\absolute\\1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_2"; filename="relative/1k_c.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + preservePath: true, + expected: [ + ['file', 'upload_file_0', 26, 0, '/absolute/1k_a.dat', '7bit', 'application/octet-stream'], + ['file', 'upload_file_1', 26, 0, 'C:\\absolute\\1k_b.dat', '7bit', 'application/octet-stream'], + ['file', 'upload_file_2', 26, 0, 'relative/1k_c.dat', '7bit', 'application/octet-stream'] + ], + what: 'Paths to be preserved through the preservePath option', + plan: 12 + }, + { + source: [ + ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="cont"', + 'Content-Type: ', + '', + 'some random content', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: ', + '', + 'some random pass', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--' + ].join('\r\n') + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [ + ['field', 'cont', 'some random content', false, false, '7bit', 'text/plain'] + ], + what: 'Empty content-type and empty content-disposition', + plan: 4 + }, + { + config: { + isPartAFile: (fieldName) => (fieldName !== 'upload_file_0') + }, + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="blob"', + 'Content-Type: application/json', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + ['field', 'upload_file_0', 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', false, false, '7bit', 'application/json'] + ], + what: 'Blob uploads should be handled as fields if isPartAFile is provided.', + plan: 4 + }, + { + config: { + isPartAFile: (fieldName) => (fieldName !== 'upload_file_0') + }, + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="blob"', + 'Content-Type: application/json', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file"; filename*=utf-8\'\'n%C3%A4me.txt', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + ['field', 'upload_file_0', 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', false, false, '7bit', 'application/json'], + ['file', 'file', 26, 0, 'näme.txt', '7bit', 'application/octet-stream'] + ], + what: 'Blob uploads should be handled as fields if isPartAFile is provided. Other parts should be files.', + plan: 7 + }, + { + config: { + isPartAFile: (fieldName) => (fieldName === 'upload_file_0') + }, + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="upload_file_0"; filename="blob"', + 'Content-Type: application/json', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file"; filename*=utf-8\'\'n%C3%A4me.txt', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + ['file', 'upload_file_0', 26, 0, 'blob', '7bit', 'application/json'], + ['field', 'file', 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', false, false, '7bit', 'application/octet-stream'] + ], + what: 'Blob uploads sould be handled as files if corresponding isPartAFile is provided. Other parts should be fields.', + plan: 7 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file"; filename*=utf-8\'\'n%C3%A4me.txt', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + ['file', 'file', 26, 0, 'näme.txt', '7bit', 'application/octet-stream'] + ], + what: 'Unicode filenames', + plan: 6 + }, + { + source: [ + ['--asdasdasdasd\r\n', + 'Content-Type: text/plain\r\n', + 'Content-Disposition: form-data; name="foo"\r\n', + '\r\n', + 'asd\r\n', + '--asdasdasdasd--' + ].join(':)') + ], + boundary: 'asdasdasdasd', + expected: [], + shouldError: 'Unexpected end of multipart data', + what: 'Stopped mid-header', + plan: 3 + }, + { + source: [ + ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="cont"', + 'Content-Type: application/json', + '', + '{}', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--' + ].join('\r\n') + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [ + ['field', 'cont', '{}', false, false, '7bit', 'application/json'] + ], + what: 'content-type for fields', + plan: 4 + }, + { + source: [ + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--\r\n' + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [], + what: 'empty form', + plan: 3 + }, + { + source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="field1"', + 'content-type: text/plain; charset=utf-8', + '', + 'Aufklärung ist der Ausgang des Menschen aus seiner selbstverschuldeten Unmündigkeit.', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="field2"', + 'content-type: text/plain; charset=iso-8859-1', + '', + 'sapere aude!', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + ['field', 'field1', 'Aufklärung ist der Ausgang des Menschen aus seiner selbstverschuldeten Unmündigkeit.', false, false, '7bit', 'text/plain'], + ['field', 'field2', 'sapere aude!', false, false, '7bit', 'text/plain'] + ], + what: 'Fields and files', + plan: 5 + }, + { + source: [[ + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + 'Content-Disposition: form-data; name="regsubmit"', + '', + 'yes', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + 'Content-Disposition: form-data; name="referer"', + '', + 'http://domainExample/./', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + 'Content-Disposition: form-data; name="activationauth"', + '', + '', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + 'Content-Disposition: form-data; name="seccodemodid"', + '', + 'member::register', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7--'].join('\r\n') + ], + boundary: '----WebKitFormBoundaryzca7IDMnT6QwqBp7', + expected: [ + ['field', 'regsubmit', 'yes', false, false, '7bit', 'text/plain'], + ['field', 'referer', 'http://domainExample/./', false, false, '7bit', 'text/plain'], + ['field', 'activationauth', '', false, false, '7bit', 'text/plain'], + ['field', 'seccodemodid', 'member::register', false, false, '7bit', 'text/plain'] + ], + what: 'one empty part should get ignored', + plan: 7 + }, + { + source: [ + ' ------WebKitFormBoundaryTB2MiQ36fnSJlrhY--\r\n' + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [], + shouldError: 'Unexpected end of multipart data', + what: 'empty form with preceding whitespace', + plan: 3 + }, + { + source: [ + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--\r\n' + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhYY', + expected: [], + shouldError: 'Unexpected end of multipart data', + what: 'empty form with wrong boundary (extra Y)', + plan: 3 + }, + { + source: [[ + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + 'Content-Disposition: form-data; name="regsubmit"', + '', + 'yes', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + 'Content-Disposition: form-data; name="referer"', + '', + 'http://domainExample/./', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + 'Content-Disposition: form-data; name="activationauth"', + '', + '', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7', + 'Content-Disposition: form-data; name="seccodemodid"', + '', + 'member::register', + '------WebKitFormBoundaryzca7IDMnT6QwqBp7--'].join('\r\n') + ], + boundary: '----WebKitFormBoundaryzca7IDMnT6QwqBp7', + expected: [ + ['field', 'regsubmit', 'yes', false, false, '7bit', 'text/plain'], + ['field', 'referer', 'http://domainExample/./', false, false, '7bit', 'text/plain'], + ['field', 'activationauth', '', false, false, '7bit', 'text/plain'], + ['field', 'seccodemodid', 'member::register', false, false, '7bit', 'text/plain'] + ], + what: 'multiple empty parts should get ignored', + plan: 7 + } +] + +tests.forEach((v) => { + test(v.what, t => { + t.plan(v.plan) + const busboy = new Busboy({ + ...v.config, + limits: v.limits, + preservePath: v.preservePath, + headers: { + 'content-type': 'multipart/form-data; boundary=' + v.boundary + } + }) + let finishes = 0 + const results = [] + + if (v.events === undefined || v.events.indexOf('field') > -1) { + busboy.on('field', function (key, val, keyTrunc, valTrunc, encoding, contype) { + results.push(['field', key, val, keyTrunc, valTrunc, encoding, contype]) + }) + } + if (v.events === undefined || v.events.indexOf('file') > -1) { + busboy.on('file', function (fieldname, stream, filename, encoding, mimeType) { + let nb = 0 + const info = ['file', + fieldname, + nb, + 0, + filename, + encoding, + mimeType] + results.push(info) + stream.on('data', function (d) { + nb += d.length + }).on('limit', function () { + ++info[3] + }).on('end', function () { + info[2] = nb + t.ok(typeof (stream.bytesRead) === 'number', 'file.bytesRead is missing') + t.ok(stream.bytesRead === nb, 'file.bytesRead is not equal to filesize') + if (stream.truncated) { ++info[3] } + }) + }) + } + busboy.on('finish', function () { + t.ok(finishes++ === 0, 'finish emitted multiple times') + t.equal(results.length, + v.expected.length, + 'Parsed result count mismatch. Saw ' + + results.length + + '. Expected: ' + v.expected.length) + + results.forEach(function (result, i) { + t.strictSame(result, + v.expected[i], + 'Result mismatch:\nParsed: ' + inspect(result) + + '\nExpected: ' + inspect(v.expected[i]) + ) + }) + t.pass() + }).on('error', function (err) { + if (!v.shouldError || v.shouldError !== err.message) { t.error(err) } + }) + + v.source.forEach(function (s) { + busboy.write(Buffer.from(s, 'utf8'), EMPTY_FN) + }) + busboy.end() + }) +}) diff --git a/fastify-busboy/test/types-urlencoded.test.js b/fastify-busboy/test/types-urlencoded.test.js new file mode 100644 index 0000000..73cc286 --- /dev/null +++ b/fastify-busboy/test/types-urlencoded.test.js @@ -0,0 +1,210 @@ +'use strict' + +const { inspect } = require('util') +const Busboy = require('..') +const { test } = require('tap') + +const EMPTY_FN = function () { +} + +const tests = [ + { + source: ['foo'], + expected: [['foo', '', false, false]], + what: 'Unassigned value', + plan: 4 + }, + { + source: ['foo=bar'], + expected: [['foo', 'bar', false, false]], + what: 'Assigned value', + plan: 4 + }, + { + source: ['foo&bar=baz'], + expected: [['foo', '', false, false], + ['bar', 'baz', false, false]], + what: 'Unassigned and assigned value', + plan: 5 + }, + { + source: ['foo=bar&baz'], + expected: [['foo', 'bar', false, false], + ['baz', '', false, false]], + what: 'Assigned and unassigned value', + plan: 5 + }, + { + source: ['foo=bar&baz=bla'], + expected: [['foo', 'bar', false, false], + ['baz', 'bla', false, false]], + what: 'Two assigned values', + plan: 5 + }, + { + source: ['foo&bar'], + expected: [['foo', '', false, false], + ['bar', '', false, false]], + what: 'Two unassigned values', + plan: 5 + }, + { + source: ['foo&bar&'], + expected: [['foo', '', false, false], + ['bar', '', false, false]], + what: 'Two unassigned values and ampersand', + plan: 5 + }, + { + source: ['foo=bar+baz%2Bquux'], + expected: [['foo', 'bar baz+quux', false, false]], + what: 'Assigned value with (plus) space', + plan: 4 + }, + { + source: ['foo=bar%20baz%21'], + expected: [['foo', 'bar baz!', false, false]], + what: 'Assigned value with encoded bytes', + plan: 4 + }, + { + source: ['foo%20bar=baz%20bla%21'], + expected: [['foo bar', 'baz bla!', false, false]], + what: 'Assigned value with encoded bytes #2', + plan: 4 + }, + { + source: ['foo=bar%20baz%21&num=1000'], + expected: [['foo', 'bar baz!', false, false], + ['num', '1000', false, false]], + what: 'Two assigned values, one with encoded bytes', + plan: 5 + }, + { + source: ['foo=bar&baz=bla'], + expected: [], + what: 'Limits: zero fields', + limits: { fields: 0 }, + plan: 3 + }, + { + source: ['foo=bar&baz=bla'], + expected: [['foo', 'bar', false, false]], + what: 'Limits: one field', + limits: { fields: 1 }, + plan: 4 + }, + { + source: ['foo=bar&baz=bla'], + expected: [['foo', 'bar', false, false], + ['baz', 'bla', false, false]], + what: 'Limits: field part lengths match limits', + limits: { fieldNameSize: 3, fieldSize: 3 }, + plan: 5 + }, + { + source: ['foo=bar&baz=bla'], + expected: [['fo', 'bar', true, false], + ['ba', 'bla', true, false]], + what: 'Limits: truncated field name', + limits: { fieldNameSize: 2 }, + plan: 5 + }, + { + source: ['foo=bar&baz=bla'], + expected: [['foo', 'ba', false, true], + ['baz', 'bl', false, true]], + what: 'Limits: truncated field value', + limits: { fieldSize: 2 }, + plan: 5 + }, + { + source: ['foo=bar&baz=bla'], + expected: [['fo', 'ba', true, true], + ['ba', 'bl', true, true]], + what: 'Limits: truncated field name and value', + limits: { fieldNameSize: 2, fieldSize: 2 }, + plan: 5 + }, + { + source: ['foo=bar&baz=bla'], + expected: [['fo', '', true, true], + ['ba', '', true, true]], + what: 'Limits: truncated field name and zero value limit', + limits: { fieldNameSize: 2, fieldSize: 0 }, + plan: 5 + }, + { + source: ['foo=bar&baz=bla'], + expected: [['', '', true, true], + ['', '', true, true]], + what: 'Limits: truncated zero field name and zero value limit', + limits: { fieldNameSize: 0, fieldSize: 0 }, + plan: 5 + }, + { + source: ['&'], + expected: [], + what: 'Ampersand', + plan: 3 + }, + { + source: ['&&&&&'], + expected: [], + what: 'Many ampersands', + plan: 3 + }, + { + source: ['='], + expected: [['', '', false, false]], + what: 'Assigned value, empty name and value', + plan: 4 + }, + { + source: [''], + expected: [], + what: 'Nothing', + plan: 3 + } +] + +tests.forEach((v) => { + test(v.what, t => { + t.plan(v.plan || 20) + const busboy = new Busboy({ + limits: v.limits, + headers: { + 'content-type': 'application/x-www-form-urlencoded; charset=utf-8' + } + }) + let finishes = 0 + const results = [] + + busboy.on('field', function (key, val, keyTrunc, valTrunc) { + results.push([key, val, keyTrunc, valTrunc]) + }) + busboy.on('file', function () { + throw new Error('Unexpected file') + }) + busboy.on('finish', function () { + t.ok(finishes++ === 0, 'finish emitted multiple times') + t.equal(results.length, v.expected.length) + + let i = 0 + results.forEach(function (result) { + t.strictSame(result, + v.expected[i], + 'Result mismatch:\nParsed: ' + inspect(result) + + '\nExpected: ' + inspect(v.expected[i]) + ) + ++i + }) + t.pass() + }) + + v.source.forEach(function (s) { + busboy.write(Buffer.from(s, 'utf8'), EMPTY_FN) + }) + busboy.end() + }) +}) diff --git a/fastify-busboy/test/types/dicer.test-d.ts b/fastify-busboy/test/types/dicer.test-d.ts new file mode 100644 index 0000000..466c1e1 --- /dev/null +++ b/fastify-busboy/test/types/dicer.test-d.ts @@ -0,0 +1,81 @@ +import { Dicer } from "../../lib/main"; +import * as fs from "fs"; +import * as stream from "stream"; + +function testDicerSyntax() { + const opts: Dicer.Config = { + boundary: "testing", + }; + const dicer = new Dicer(opts); + const opts2: Dicer.Config = { + headerFirst: true, + maxHeaderPairs: 1, + }; + const opts3: Dicer.Config = { + boundary: "more-testing", + headerFirst: false, + maxHeaderPairs: 8, + }; + dicer.setBoundary("new-testing-boundary"); + dicer.on("part", handleDicerPartStream); + dicer.on("finish", () => { + console.log("dicer parsing finished"); + }); + dicer.on("preamble", part => { + console.log("dicer preamble to new part"); + }); + dicer.on("trailer", data => { + console.log(`dicer trailing data found: ${data.length} bytes`); + }); + dicer.on("close", () => { + console.log("dicer close"); + }); + dicer.on("drain", () => { + console.log("dicer drain"); + }); + dicer.on("error", err => { + console.error(`dicer error: ${err.message || JSON.stringify(err)}`); + }); + dicer.on("finish", () => { + console.log("dicer finish"); + }); + dicer.on("pipe", (src: stream.Readable) => { + console.log("dicer pipe"); + }); + dicer.on("unpipe", (src: stream.Readable) => { + console.log("dicer unpipe"); + }); + const inputFileStream = fs.createReadStream("in-test-file.txt"); + inputFileStream.pipe(dicer); +} +/** + * Handle a part found by a Dicer parser + * + * @param part Part found + */ +function handleDicerPartStream(part: Dicer.PartStream) { + console.log("dicer part found"); + const outputFileStream = fs.createWriteStream("out-test-file.txt"); + part.on("readable", () => { + console.log("part readable"); + }); + part.on("header", header => { + console.log(`part header found:\n${JSON.stringify(header)}`); + }); + part.on("data", () => { + console.log("part data"); + }); + part.on("finish", () => { + console.log("part finished"); + }); + part.on("error", err => { + console.error(`part error: ${err.message || JSON.stringify(err)}`); + }); + part.on("end", () => { + console.log("part ended"); + }); + part.on("close", () => { + console.log("part closed"); + }); + part.pipe(outputFileStream); +} \ No newline at end of file diff --git a/fastify-busboy/test/types/main.test-d.ts b/fastify-busboy/test/types/main.test-d.ts new file mode 100644 index 0000000..fb58b3f --- /dev/null +++ b/fastify-busboy/test/types/main.test-d.ts @@ -0,0 +1,241 @@ +import BusboyDefault, { BusboyConstructor, BusboyConfig, BusboyHeaders, Busboy, BusboyEvents, BusboyFileStream } from '../..'; +import {expectError, expectType} from "tsd"; +import BusboyESM from "../.."; + +// test type exports +type Constructor = BusboyConstructor; +type Config = BusboyConfig; +type Headers = BusboyHeaders; +type Events = BusboyEvents; +type BB = Busboy; + +expectType(new BusboyESM({ headers: { 'content-type': 'foo' } })); +expectType(new Busboy({ headers: { 'content-type': 'foo' } })); + +expectError(new BusboyDefault({})); +const busboy = BusboyDefault({ headers: { 'content-type': 'foo' } }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' } }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, highWaterMark: 1000 }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, fileHwm: 1000 }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, defCharset: 'utf8' }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, preservePath: true }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, limits: { fieldNameSize: 200 } }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, limits: { fieldSize: 200 } }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, limits: { fields: 200 } }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, limits: { fileSize: 200 } }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, limits: { files: 200 } }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, limits: { parts: 200 } }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, limits: { headerPairs: 200 } }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, limits: { headerSize: 200 } }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, isPartAFile: (fieldName, contentType, fileName) => fieldName === 'my-special-field' || fileName !== 'not-so-special.txt' }); // $ExpectType Busboy +new BusboyDefault({ headers: { 'content-type': 'foo' }, isPartAFile: (fieldName, contentType, fileName) => fileName !== undefined }); // $ExpectType Busboy + +busboy.addListener('file', (fieldname, file, filename, encoding, mimetype) => { + expectType (fieldname) + expectType(file); + expectType(filename); + expectType(encoding); + expectType(mimetype); +}); +busboy.addListener('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + expectType (fieldname); + expectType (val); + expectType (fieldnameTruncated); + expectType (valTruncated); + expectType (encoding); + expectType (mimetype); +}); +busboy.addListener('partsLimit', () => {}); +busboy.addListener('filesLimit', () => {}); +busboy.addListener('fieldsLimit', () => {}); +busboy.addListener('error', e => { + expectType (e); +}); +busboy.addListener('finish', () => {}); +// test fallback +busboy.on('foo', foo => { + expectType (foo); +}); +busboy.on(Symbol('foo'), foo => { + expectType(foo); +}); + +busboy.on('file', (fieldname, file, filename, encoding, mimetype) => { + expectType (fieldname); + expectType (file); + expectType (filename); + expectType (encoding); + expectType (mimetype); +}); +busboy.on('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + expectType (fieldname); + expectType (val); + expectType (fieldnameTruncated); + expectType (valTruncated); + expectType (encoding); + expectType (mimetype); +}); +busboy.on('partsLimit', () => {}); +busboy.on('filesLimit', () => {}); +busboy.on('fieldsLimit', () => {}); +busboy.on('error', e => { + expectType (e); +}); +busboy.on('finish', () => {}); +// test fallback +busboy.on('foo', foo => { + expectType (foo); +}); +busboy.on(Symbol('foo'), foo => { + expectType (foo); +}); + +busboy.once('file', (fieldname, file, filename, encoding, mimetype) => { + expectType (fieldname); + expectType (file); + expectType (filename); + expectType (encoding); + expectType (mimetype); +}); +busboy.once('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + expectType (fieldname); + expectType (val); + expectType (fieldnameTruncated); + expectType (valTruncated); + expectType (encoding); + expectType (mimetype); +}); +busboy.once('partsLimit', () => {}); +busboy.once('filesLimit', () => {}); +busboy.once('fieldsLimit', () => {}); +busboy.once('error', e => { + expectType (e); +}); +busboy.once('finish', () => {}); +// test fallback +busboy.once('foo', foo => { + expectType (foo); +}); +busboy.once(Symbol('foo'), foo => { + expectType (foo); +}); + +busboy.removeListener('file', (fieldname, file, filename, encoding, mimetype) => { + expectType (fieldname); + expectType (file); + expectType (filename); + expectType (encoding); + expectType (mimetype); +}); +busboy.removeListener('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + expectType (fieldname); + expectType (val); + expectType (fieldnameTruncated); + expectType (valTruncated); + expectType (encoding); + expectType (mimetype); +}); +busboy.removeListener('partsLimit', () => {}); +busboy.removeListener('filesLimit', () => {}); +busboy.removeListener('fieldsLimit', () => {}); +busboy.removeListener('error', e => { + expectType (e); +}); +busboy.removeListener('finish', () => {}); +// test fallback +busboy.removeListener('foo', foo => { + expectType (foo); +}); +busboy.removeListener(Symbol('foo'), foo => { + expectType (foo); +}); + +busboy.off('file', (fieldname, file, filename, encoding, mimetype) => { + expectType (fieldname); + expectType (file); + expectType (filename); + expectType (encoding); + expectType (mimetype); +}); +busboy.off('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + expectType (fieldname); + expectType (val); + expectType (fieldnameTruncated); + expectType (valTruncated); + expectType (encoding); + expectType (mimetype); +}); +busboy.off('partsLimit', () => {}); +busboy.off('filesLimit', () => {}); +busboy.off('fieldsLimit', () => {}); +busboy.off('error', e => { + expectType (e); +}); +busboy.off('finish', () => {}); +// test fallback +busboy.off('foo', foo => { + expectType (foo); +}); +busboy.off(Symbol('foo'), foo => { + expectType (foo); +}); + +busboy.prependListener('file', (fieldname, file, filename, encoding, mimetype) => { + expectType (fieldname); + expectType (file); + expectType (filename); + expectType (encoding); + expectType (mimetype); +}); +busboy.prependListener('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + expectType (fieldname); + expectType (val); + expectType (fieldnameTruncated); + expectType (valTruncated); + expectType (encoding); + expectType (mimetype); +}); +busboy.prependListener('partsLimit', () => {}); +busboy.prependListener('filesLimit', () => {}); +busboy.prependListener('fieldsLimit', () => {}); +busboy.prependListener('error', e => { + expectType (e); +}); +busboy.prependListener('finish', () => {}); +// test fallback +busboy.prependListener('foo', foo => { + expectType (foo); +}); +busboy.prependListener(Symbol('foo'), foo => { + expectType (foo); +}); + +busboy.prependOnceListener('file', (fieldname, file, filename, encoding, mimetype) => { + expectType (fieldname); + expectType (file); + expectType (filename); + expectType (encoding); + expectType (mimetype); +}); +busboy.prependOnceListener('field', (fieldname, val, fieldnameTruncated, valTruncated, encoding, mimetype) => { + expectType (fieldname); + expectType (val); + expectType (fieldnameTruncated); + expectType (valTruncated); + expectType (encoding); + expectType (mimetype); +}); +busboy.prependOnceListener('partsLimit', () => {}); +busboy.prependOnceListener('filesLimit', () => {}); +busboy.prependOnceListener('fieldsLimit', () => {}); +busboy.prependOnceListener('error', e => { + expectType (e); +}); +busboy.prependOnceListener('finish', () => {}); +// test fallback +busboy.prependOnceListener('foo', foo => { + expectType (foo); +}); +busboy.prependOnceListener(Symbol('foo'), foo => { + expectType (foo); +}); diff --git a/fastify-busboy/tsconfig.json b/fastify-busboy/tsconfig.json new file mode 100644 index 0000000..eec9314 --- /dev/null +++ b/fastify-busboy/tsconfig.json @@ -0,0 +1,30 @@ +{ + "compilerOptions": { + "outDir": "dist", + "module": "commonjs", + "target": "es2015", + "sourceMap": false, + "declaration": true, + "declarationMap": false, + "types": ["node"], + "strict": true, + "moduleResolution": "node", + "noUnusedLocals": false, + "noUnusedParameters": false, + "noFallthroughCasesInSwitch": true, + "noImplicitReturns": true, + "noImplicitAny": true, + "noImplicitThis": true, + "strictNullChecks": true, + "importHelpers": true, + "baseUrl": ".", + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true + }, + "exclude": [ + "node_modules", + "test", + "dist" + ] +} diff --git a/index-fetch.js b/index-fetch.js new file mode 100644 index 0000000..ba31a65 --- /dev/null +++ b/index-fetch.js @@ -0,0 +1,15 @@ +'use strict' + +const fetchImpl = require('./lib/fetch').fetch + +module.exports.fetch = function fetch (resource, init = undefined) { + return fetchImpl(resource, init).catch((err) => { + Error.captureStackTrace(err, this) + throw err + }) +} +module.exports.FormData = require('./lib/fetch/formdata').FormData +module.exports.Headers = require('./lib/fetch/headers').Headers +module.exports.Response = require('./lib/fetch/response').Response +module.exports.Request = require('./lib/fetch/request').Request +module.exports.WebSocket = require('./lib/websocket/websocket').WebSocket diff --git a/index.d.ts b/index.d.ts new file mode 100644 index 0000000..83a786d --- /dev/null +++ b/index.d.ts @@ -0,0 +1,3 @@ +export * from './types/index' +import Undici from './types/index' +export default Undici diff --git a/index.html b/index.html new file mode 100644 index 0000000..672a592 --- /dev/null +++ b/index.html @@ -0,0 +1,35 @@ + + + + + Node.js Undici + + + + + + + + + +
+ + + + + diff --git a/index.js b/index.js new file mode 100644 index 0000000..26302cc --- /dev/null +++ b/index.js @@ -0,0 +1,167 @@ +'use strict' + +const Client = require('./lib/client') +const Dispatcher = require('./lib/dispatcher') +const errors = require('./lib/core/errors') +const Pool = require('./lib/pool') +const BalancedPool = require('./lib/balanced-pool') +const Agent = require('./lib/agent') +const util = require('./lib/core/util') +const { InvalidArgumentError } = errors +const api = require('./lib/api') +const buildConnector = require('./lib/core/connect') +const MockClient = require('./lib/mock/mock-client') +const MockAgent = require('./lib/mock/mock-agent') +const MockPool = require('./lib/mock/mock-pool') +const mockErrors = require('./lib/mock/mock-errors') +const ProxyAgent = require('./lib/proxy-agent') +const RetryHandler = require('./lib/handler/RetryHandler') +const { getGlobalDispatcher, setGlobalDispatcher } = require('./lib/global') +const DecoratorHandler = require('./lib/handler/DecoratorHandler') +const RedirectHandler = require('./lib/handler/RedirectHandler') +const createRedirectInterceptor = require('./lib/interceptor/redirectInterceptor') + +let hasCrypto +try { + require('crypto') + hasCrypto = true +} catch { + hasCrypto = false +} + +Object.assign(Dispatcher.prototype, api) + +module.exports.Dispatcher = Dispatcher +module.exports.Client = Client +module.exports.Pool = Pool +module.exports.BalancedPool = BalancedPool +module.exports.Agent = Agent +module.exports.ProxyAgent = ProxyAgent +module.exports.RetryHandler = RetryHandler + +module.exports.DecoratorHandler = DecoratorHandler +module.exports.RedirectHandler = RedirectHandler +module.exports.createRedirectInterceptor = createRedirectInterceptor + +module.exports.buildConnector = buildConnector +module.exports.errors = errors + +function makeDispatcher (fn) { + return (url, opts, handler) => { + if (typeof opts === 'function') { + handler = opts + opts = null + } + + if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) { + throw new InvalidArgumentError('invalid url') + } + + if (opts != null && typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (opts && opts.path != null) { + if (typeof opts.path !== 'string') { + throw new InvalidArgumentError('invalid opts.path') + } + + let path = opts.path + if (!opts.path.startsWith('/')) { + path = `/${path}` + } + + url = new URL(util.parseOrigin(url).origin + path) + } else { + if (!opts) { + opts = typeof url === 'object' ? url : {} + } + + url = util.parseURL(url) + } + + const { agent, dispatcher = getGlobalDispatcher() } = opts + + if (agent) { + throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?') + } + + return fn.call(dispatcher, { + ...opts, + origin: url.origin, + path: url.search ? `${url.pathname}${url.search}` : url.pathname, + method: opts.method || (opts.body ? 'PUT' : 'GET') + }, handler) + } +} + +module.exports.setGlobalDispatcher = setGlobalDispatcher +module.exports.getGlobalDispatcher = getGlobalDispatcher + +if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) { + let fetchImpl = null + module.exports.fetch = async function fetch (resource) { + if (!fetchImpl) { + fetchImpl = require('./lib/fetch').fetch + } + + try { + return await fetchImpl(...arguments) + } catch (err) { + if (typeof err === 'object') { + Error.captureStackTrace(err, this) + } + + throw err + } + } + module.exports.Headers = require('./lib/fetch/headers').Headers + module.exports.Response = require('./lib/fetch/response').Response + module.exports.Request = require('./lib/fetch/request').Request + module.exports.FormData = require('./lib/fetch/formdata').FormData + module.exports.File = require('./lib/fetch/file').File + module.exports.FileReader = require('./lib/fileapi/filereader').FileReader + + const { setGlobalOrigin, getGlobalOrigin } = require('./lib/fetch/global') + + module.exports.setGlobalOrigin = setGlobalOrigin + module.exports.getGlobalOrigin = getGlobalOrigin + + const { CacheStorage } = require('./lib/cache/cachestorage') + const { kConstruct } = require('./lib/cache/symbols') + + // Cache & CacheStorage are tightly coupled with fetch. Even if it may run + // in an older version of Node, it doesn't have any use without fetch. + module.exports.caches = new CacheStorage(kConstruct) +} + +if (util.nodeMajor >= 16) { + const { deleteCookie, getCookies, getSetCookies, setCookie } = require('./lib/cookies') + + module.exports.deleteCookie = deleteCookie + module.exports.getCookies = getCookies + module.exports.getSetCookies = getSetCookies + module.exports.setCookie = setCookie + + const { parseMIMEType, serializeAMimeType } = require('./lib/fetch/dataURL') + + module.exports.parseMIMEType = parseMIMEType + module.exports.serializeAMimeType = serializeAMimeType +} + +if (util.nodeMajor >= 18 && hasCrypto) { + const { WebSocket } = require('./lib/websocket/websocket') + + module.exports.WebSocket = WebSocket +} + +module.exports.request = makeDispatcher(api.request) +module.exports.stream = makeDispatcher(api.stream) +module.exports.pipeline = makeDispatcher(api.pipeline) +module.exports.connect = makeDispatcher(api.connect) +module.exports.upgrade = makeDispatcher(api.upgrade) + +module.exports.MockClient = MockClient +module.exports.MockPool = MockPool +module.exports.MockAgent = MockAgent +module.exports.mockErrors = mockErrors diff --git a/lib/agent.js b/lib/agent.js new file mode 100644 index 0000000..0b18f2a --- /dev/null +++ b/lib/agent.js @@ -0,0 +1,148 @@ +'use strict' + +const { InvalidArgumentError } = require('./core/errors') +const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = require('./core/symbols') +const DispatcherBase = require('./dispatcher-base') +const Pool = require('./pool') +const Client = require('./client') +const util = require('./core/util') +const createRedirectInterceptor = require('./interceptor/redirectInterceptor') +const { WeakRef, FinalizationRegistry } = require('./compat/dispatcher-weakref')() + +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kMaxRedirections = Symbol('maxRedirections') +const kOnDrain = Symbol('onDrain') +const kFactory = Symbol('factory') +const kFinalizer = Symbol('finalizer') +const kOptions = Symbol('options') + +function defaultFactory (origin, opts) { + return opts && opts.connections === 1 + ? new Client(origin, opts) + : new Pool(origin, opts) +} + +class Agent extends DispatcherBase { + constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) { + super() + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } + + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } + + if (!Number.isInteger(maxRedirections) || maxRedirections < 0) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + if (connect && typeof connect !== 'function') { + connect = { ...connect } + } + + this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent) + ? options.interceptors.Agent + : [createRedirectInterceptor({ maxRedirections })] + + this[kOptions] = { ...util.deepClone(options), connect } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kMaxRedirections] = maxRedirections + this[kFactory] = factory + this[kClients] = new Map() + this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => { + const ref = this[kClients].get(key) + if (ref !== undefined && ref.deref() === undefined) { + this[kClients].delete(key) + } + }) + + const agent = this + + this[kOnDrain] = (origin, targets) => { + agent.emit('drain', origin, [agent, ...targets]) + } + + this[kOnConnect] = (origin, targets) => { + agent.emit('connect', origin, [agent, ...targets]) + } + + this[kOnDisconnect] = (origin, targets, err) => { + agent.emit('disconnect', origin, [agent, ...targets], err) + } + + this[kOnConnectionError] = (origin, targets, err) => { + agent.emit('connectionError', origin, [agent, ...targets], err) + } + } + + get [kRunning] () { + let ret = 0 + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore next: gc is undeterministic */ + if (client) { + ret += client[kRunning] + } + } + return ret + } + + [kDispatch] (opts, handler) { + let key + if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) { + key = String(opts.origin) + } else { + throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.') + } + + const ref = this[kClients].get(key) + + let dispatcher = ref ? ref.deref() : null + if (!dispatcher) { + dispatcher = this[kFactory](opts.origin, this[kOptions]) + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].set(key, new WeakRef(dispatcher)) + this[kFinalizer].register(dispatcher, key) + } + + return dispatcher.dispatch(opts, handler) + } + + async [kClose] () { + const closePromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + closePromises.push(client.close()) + } + } + + await Promise.all(closePromises) + } + + async [kDestroy] (err) { + const destroyPromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + destroyPromises.push(client.destroy(err)) + } + } + + await Promise.all(destroyPromises) + } +} + +module.exports = Agent diff --git a/lib/api/abort-signal.js b/lib/api/abort-signal.js new file mode 100644 index 0000000..2985c1e --- /dev/null +++ b/lib/api/abort-signal.js @@ -0,0 +1,54 @@ +const { addAbortListener } = require('../core/util') +const { RequestAbortedError } = require('../core/errors') + +const kListener = Symbol('kListener') +const kSignal = Symbol('kSignal') + +function abort (self) { + if (self.abort) { + self.abort() + } else { + self.onError(new RequestAbortedError()) + } +} + +function addSignal (self, signal) { + self[kSignal] = null + self[kListener] = null + + if (!signal) { + return + } + + if (signal.aborted) { + abort(self) + return + } + + self[kSignal] = signal + self[kListener] = () => { + abort(self) + } + + addAbortListener(self[kSignal], self[kListener]) +} + +function removeSignal (self) { + if (!self[kSignal]) { + return + } + + if ('removeEventListener' in self[kSignal]) { + self[kSignal].removeEventListener('abort', self[kListener]) + } else { + self[kSignal].removeListener('abort', self[kListener]) + } + + self[kSignal] = null + self[kListener] = null +} + +module.exports = { + addSignal, + removeSignal +} diff --git a/lib/api/api-connect.js b/lib/api/api-connect.js new file mode 100644 index 0000000..fd2b6ad --- /dev/null +++ b/lib/api/api-connect.js @@ -0,0 +1,104 @@ +'use strict' + +const { AsyncResource } = require('async_hooks') +const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors') +const util = require('../core/util') +const { addSignal, removeSignal } = require('./abort-signal') + +class ConnectHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + const { signal, opaque, responseHeaders } = opts + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + super('UNDICI_CONNECT') + + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.callback = callback + this.abort = null + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders () { + throw new SocketError('bad connect', null) + } + + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this + + removeSignal(this) + + this.callback = null + + let headers = rawHeaders + // Indicates is an HTTP2Session + if (headers != null) { + headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + } + + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + socket, + opaque, + context + }) + } + + onError (err) { + const { callback, opaque } = this + + removeSignal(this) + + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + } +} + +function connect (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + connect.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + const connectHandler = new ConnectHandler(opts, callback) + this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = connect diff --git a/lib/api/api-pipeline.js b/lib/api/api-pipeline.js new file mode 100644 index 0000000..af4a180 --- /dev/null +++ b/lib/api/api-pipeline.js @@ -0,0 +1,249 @@ +'use strict' + +const { + Readable, + Duplex, + PassThrough +} = require('stream') +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = require('../core/errors') +const util = require('../core/util') +const { AsyncResource } = require('async_hooks') +const { addSignal, removeSignal } = require('./abort-signal') +const assert = require('assert') + +const kResume = Symbol('resume') + +class PipelineRequest extends Readable { + constructor () { + super({ autoDestroy: true }) + + this[kResume] = null + } + + _read () { + const { [kResume]: resume } = this + + if (resume) { + this[kResume] = null + resume() + } + } + + _destroy (err, callback) { + this._read() + + callback(err) + } +} + +class PipelineResponse extends Readable { + constructor (resume) { + super({ autoDestroy: true }) + this[kResume] = resume + } + + _read () { + this[kResume]() + } + + _destroy (err, callback) { + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() + } + + callback(err) + } +} + +class PipelineHandler extends AsyncResource { + constructor (opts, handler) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (typeof handler !== 'function') { + throw new InvalidArgumentError('invalid handler') + } + + const { signal, method, opaque, onInfo, responseHeaders } = opts + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_PIPELINE') + + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.handler = handler + this.abort = null + this.context = null + this.onInfo = onInfo || null + + this.req = new PipelineRequest().on('error', util.nop) + + this.ret = new Duplex({ + readableObjectMode: opts.objectMode, + autoDestroy: true, + read: () => { + const { body } = this + + if (body && body.resume) { + body.resume() + } + }, + write: (chunk, encoding, callback) => { + const { req } = this + + if (req.push(chunk, encoding) || req._readableState.destroyed) { + callback() + } else { + req[kResume] = callback + } + }, + destroy: (err, callback) => { + const { body, req, res, ret, abort } = this + + if (!err && !ret._readableState.endEmitted) { + err = new RequestAbortedError() + } + + if (abort && err) { + abort() + } + + util.destroy(body, err) + util.destroy(req, err) + util.destroy(res, err) + + removeSignal(this) + + callback(err) + } + }).on('prefinish', () => { + const { req } = this + + // Node < 15 does not call _final in same tick. + req.push(null) + }) + + this.res = null + + addSignal(this, signal) + } + + onConnect (abort, context) { + const { ret, res } = this + + assert(!res, 'pipeline cannot be retried') + + if (ret.destroyed) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders (statusCode, rawHeaders, resume) { + const { opaque, handler, context } = this + + if (statusCode < 200) { + if (this.onInfo) { + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.onInfo({ statusCode, headers }) + } + return + } + + this.res = new PipelineResponse(resume) + + let body + try { + this.handler = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + body = this.runInAsyncScope(handler, null, { + statusCode, + headers, + opaque, + body: this.res, + context + }) + } catch (err) { + this.res.on('error', util.nop) + throw err + } + + if (!body || typeof body.on !== 'function') { + throw new InvalidReturnValueError('expected Readable') + } + + body + .on('data', (chunk) => { + const { ret, body } = this + + if (!ret.push(chunk) && body.pause) { + body.pause() + } + }) + .on('error', (err) => { + const { ret } = this + + util.destroy(ret, err) + }) + .on('end', () => { + const { ret } = this + + ret.push(null) + }) + .on('close', () => { + const { ret } = this + + if (!ret._readableState.ended) { + util.destroy(ret, new RequestAbortedError()) + } + }) + + this.body = body + } + + onData (chunk) { + const { res } = this + return res.push(chunk) + } + + onComplete (trailers) { + const { res } = this + res.push(null) + } + + onError (err) { + const { ret } = this + this.handler = null + util.destroy(ret, err) + } +} + +function pipeline (opts, handler) { + try { + const pipelineHandler = new PipelineHandler(opts, handler) + this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler) + return pipelineHandler.ret + } catch (err) { + return new PassThrough().destroy(err) + } +} + +module.exports = pipeline diff --git a/lib/api/api-request.js b/lib/api/api-request.js new file mode 100644 index 0000000..d4281ce --- /dev/null +++ b/lib/api/api-request.js @@ -0,0 +1,180 @@ +'use strict' + +const Readable = require('./readable') +const { + InvalidArgumentError, + RequestAbortedError +} = require('../core/errors') +const util = require('../core/util') +const { getResolveErrorBodyCallback } = require('./util') +const { AsyncResource } = require('async_hooks') +const { addSignal, removeSignal } = require('./abort-signal') + +class RequestHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts + + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) { + throw new InvalidArgumentError('invalid highWaterMark') + } + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_REQUEST') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.res = null + this.abort = null + this.body = body + this.trailers = {} + this.context = null + this.onInfo = onInfo || null + this.throwOnError = throwOnError + this.highWaterMark = highWaterMark + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) + } + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this + + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } + + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + const body = new Readable({ resume, abort, contentType, highWaterMark }) + + this.callback = null + this.res = body + if (callback !== null) { + if (this.throwOnError && statusCode >= 400) { + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body, contentType, statusCode, statusMessage, headers } + ) + } else { + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + trailers: this.trailers, + opaque, + body, + context + }) + } + } + } + + onData (chunk) { + const { res } = this + return res.push(chunk) + } + + onComplete (trailers) { + const { res } = this + + removeSignal(this) + + util.parseHeaders(trailers, this.trailers) + + res.push(null) + } + + onError (err) { + const { res, callback, body, opaque } = this + + removeSignal(this) + + if (callback) { + // TODO: Does this need queueMicrotask? + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + + if (res) { + this.res = null + // Ensure all queued handlers are invoked before destroying res. + queueMicrotask(() => { + util.destroy(res, err) + }) + } + + if (body) { + this.body = null + util.destroy(body, err) + } + } +} + +function request (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + request.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + this.dispatch(opts, new RequestHandler(opts, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = request +module.exports.RequestHandler = RequestHandler diff --git a/lib/api/api-stream.js b/lib/api/api-stream.js new file mode 100644 index 0000000..c571a6f --- /dev/null +++ b/lib/api/api-stream.js @@ -0,0 +1,220 @@ +'use strict' + +const { finished, PassThrough } = require('stream') +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = require('../core/errors') +const util = require('../core/util') +const { getResolveErrorBodyCallback } = require('./util') +const { AsyncResource } = require('async_hooks') +const { addSignal, removeSignal } = require('./abort-signal') + +class StreamHandler extends AsyncResource { + constructor (opts, factory, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts + + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('invalid factory') + } + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_STREAM') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.factory = factory + this.callback = callback + this.res = null + this.abort = null + this.context = null + this.trailers = null + this.body = body + this.onInfo = onInfo || null + this.throwOnError = throwOnError || false + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) + } + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { factory, opaque, context, callback, responseHeaders } = this + + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } + + this.factory = null + + let res + + if (this.throwOnError && statusCode >= 400) { + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + res = new PassThrough() + + this.callback = null + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body: res, contentType, statusCode, statusMessage, headers } + ) + } else { + if (factory === null) { + return + } + + res = this.runInAsyncScope(factory, null, { + statusCode, + headers, + opaque, + context + }) + + if ( + !res || + typeof res.write !== 'function' || + typeof res.end !== 'function' || + typeof res.on !== 'function' + ) { + throw new InvalidReturnValueError('expected Writable') + } + + // TODO: Avoid finished. It registers an unnecessary amount of listeners. + finished(res, { readable: false }, (err) => { + const { callback, res, opaque, trailers, abort } = this + + this.res = null + if (err || !res.readable) { + util.destroy(res, err) + } + + this.callback = null + this.runInAsyncScope(callback, null, err || null, { opaque, trailers }) + + if (err) { + abort() + } + }) + } + + res.on('drain', resume) + + this.res = res + + const needDrain = res.writableNeedDrain !== undefined + ? res.writableNeedDrain + : res._writableState && res._writableState.needDrain + + return needDrain !== true + } + + onData (chunk) { + const { res } = this + + return res ? res.write(chunk) : true + } + + onComplete (trailers) { + const { res } = this + + removeSignal(this) + + if (!res) { + return + } + + this.trailers = util.parseHeaders(trailers) + + res.end() + } + + onError (err) { + const { res, callback, opaque, body } = this + + removeSignal(this) + + this.factory = null + + if (res) { + this.res = null + util.destroy(res, err) + } else if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + + if (body) { + this.body = null + util.destroy(body, err) + } + } +} + +function stream (opts, factory, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + stream.call(this, opts, factory, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + this.dispatch(opts, new StreamHandler(opts, factory, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = stream diff --git a/lib/api/api-upgrade.js b/lib/api/api-upgrade.js new file mode 100644 index 0000000..ef783e8 --- /dev/null +++ b/lib/api/api-upgrade.js @@ -0,0 +1,105 @@ +'use strict' + +const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors') +const { AsyncResource } = require('async_hooks') +const util = require('../core/util') +const { addSignal, removeSignal } = require('./abort-signal') +const assert = require('assert') + +class UpgradeHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + const { signal, opaque, responseHeaders } = opts + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + super('UNDICI_UPGRADE') + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.abort = null + this.context = null + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = null + } + + onHeaders () { + throw new SocketError('bad upgrade', null) + } + + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this + + assert.strictEqual(statusCode, 101) + + removeSignal(this) + + this.callback = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.runInAsyncScope(callback, null, null, { + headers, + socket, + opaque, + context + }) + } + + onError (err) { + const { callback, opaque } = this + + removeSignal(this) + + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + } +} + +function upgrade (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + upgrade.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + const upgradeHandler = new UpgradeHandler(opts, callback) + this.dispatch({ + ...opts, + method: opts.method || 'GET', + upgrade: opts.protocol || 'Websocket' + }, upgradeHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = upgrade diff --git a/lib/api/index.js b/lib/api/index.js new file mode 100644 index 0000000..8983a5e --- /dev/null +++ b/lib/api/index.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports.request = require('./api-request') +module.exports.stream = require('./api-stream') +module.exports.pipeline = require('./api-pipeline') +module.exports.upgrade = require('./api-upgrade') +module.exports.connect = require('./api-connect') diff --git a/lib/api/readable.js b/lib/api/readable.js new file mode 100644 index 0000000..5269dfa --- /dev/null +++ b/lib/api/readable.js @@ -0,0 +1,322 @@ +// Ported from https://github.com/nodejs/undici/pull/907 + +'use strict' + +const assert = require('assert') +const { Readable } = require('stream') +const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = require('../core/errors') +const util = require('../core/util') +const { ReadableStreamFrom, toUSVString } = require('../core/util') + +let Blob + +const kConsume = Symbol('kConsume') +const kReading = Symbol('kReading') +const kBody = Symbol('kBody') +const kAbort = Symbol('abort') +const kContentType = Symbol('kContentType') + +const noop = () => {} + +module.exports = class BodyReadable extends Readable { + constructor ({ + resume, + abort, + contentType = '', + highWaterMark = 64 * 1024 // Same as nodejs fs streams. + }) { + super({ + autoDestroy: true, + read: resume, + highWaterMark + }) + + this._readableState.dataEmitted = false + + this[kAbort] = abort + this[kConsume] = null + this[kBody] = null + this[kContentType] = contentType + + // Is stream being consumed through Readable API? + // This is an optimization so that we avoid checking + // for 'data' and 'readable' listeners in the hot path + // inside push(). + this[kReading] = false + } + + destroy (err) { + if (this.destroyed) { + // Node < 16 + return this + } + + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() + } + + if (err) { + this[kAbort]() + } + + return super.destroy(err) + } + + emit (ev, ...args) { + if (ev === 'data') { + // Node < 16.7 + this._readableState.dataEmitted = true + } else if (ev === 'error') { + // Node < 16 + this._readableState.errorEmitted = true + } + return super.emit(ev, ...args) + } + + on (ev, ...args) { + if (ev === 'data' || ev === 'readable') { + this[kReading] = true + } + return super.on(ev, ...args) + } + + addListener (ev, ...args) { + return this.on(ev, ...args) + } + + off (ev, ...args) { + const ret = super.off(ev, ...args) + if (ev === 'data' || ev === 'readable') { + this[kReading] = ( + this.listenerCount('data') > 0 || + this.listenerCount('readable') > 0 + ) + } + return ret + } + + removeListener (ev, ...args) { + return this.off(ev, ...args) + } + + push (chunk) { + if (this[kConsume] && chunk !== null && this.readableLength === 0) { + consumePush(this[kConsume], chunk) + return this[kReading] ? super.push(chunk) : true + } + return super.push(chunk) + } + + // https://fetch.spec.whatwg.org/#dom-body-text + async text () { + return consume(this, 'text') + } + + // https://fetch.spec.whatwg.org/#dom-body-json + async json () { + return consume(this, 'json') + } + + // https://fetch.spec.whatwg.org/#dom-body-blob + async blob () { + return consume(this, 'blob') + } + + // https://fetch.spec.whatwg.org/#dom-body-arraybuffer + async arrayBuffer () { + return consume(this, 'arrayBuffer') + } + + // https://fetch.spec.whatwg.org/#dom-body-formdata + async formData () { + // TODO: Implement. + throw new NotSupportedError() + } + + // https://fetch.spec.whatwg.org/#dom-body-bodyused + get bodyUsed () { + return util.isDisturbed(this) + } + + // https://fetch.spec.whatwg.org/#dom-body-body + get body () { + if (!this[kBody]) { + this[kBody] = ReadableStreamFrom(this) + if (this[kConsume]) { + // TODO: Is this the best way to force a lock? + this[kBody].getReader() // Ensure stream is locked. + assert(this[kBody].locked) + } + } + return this[kBody] + } + + dump (opts) { + let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 + const signal = opts && opts.signal + + if (signal) { + try { + if (typeof signal !== 'object' || !('aborted' in signal)) { + throw new InvalidArgumentError('signal must be an AbortSignal') + } + util.throwIfAborted(signal) + } catch (err) { + return Promise.reject(err) + } + } + + if (this.closed) { + return Promise.resolve(null) + } + + return new Promise((resolve, reject) => { + const signalListenerCleanup = signal + ? util.addAbortListener(signal, () => { + this.destroy() + }) + : noop + + this + .on('close', function () { + signalListenerCleanup() + if (signal && signal.aborted) { + reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) + } else { + resolve(null) + } + }) + .on('error', noop) + .on('data', function (chunk) { + limit -= chunk.length + if (limit <= 0) { + this.destroy() + } + }) + .resume() + }) + } +} + +// https://streams.spec.whatwg.org/#readablestream-locked +function isLocked (self) { + // Consume is an implicit lock. + return (self[kBody] && self[kBody].locked === true) || self[kConsume] +} + +// https://fetch.spec.whatwg.org/#body-unusable +function isUnusable (self) { + return util.isDisturbed(self) || isLocked(self) +} + +async function consume (stream, type) { + if (isUnusable(stream)) { + throw new TypeError('unusable') + } + + assert(!stream[kConsume]) + + return new Promise((resolve, reject) => { + stream[kConsume] = { + type, + stream, + resolve, + reject, + length: 0, + body: [] + } + + stream + .on('error', function (err) { + consumeFinish(this[kConsume], err) + }) + .on('close', function () { + if (this[kConsume].body !== null) { + consumeFinish(this[kConsume], new RequestAbortedError()) + } + }) + + process.nextTick(consumeStart, stream[kConsume]) + }) +} + +function consumeStart (consume) { + if (consume.body === null) { + return + } + + const { _readableState: state } = consume.stream + + for (const chunk of state.buffer) { + consumePush(consume, chunk) + } + + if (state.endEmitted) { + consumeEnd(this[kConsume]) + } else { + consume.stream.on('end', function () { + consumeEnd(this[kConsume]) + }) + } + + consume.stream.resume() + + while (consume.stream.read() != null) { + // Loop + } +} + +function consumeEnd (consume) { + const { type, body, resolve, stream, length } = consume + + try { + if (type === 'text') { + resolve(toUSVString(Buffer.concat(body))) + } else if (type === 'json') { + resolve(JSON.parse(Buffer.concat(body))) + } else if (type === 'arrayBuffer') { + const dst = new Uint8Array(length) + + let pos = 0 + for (const buf of body) { + dst.set(buf, pos) + pos += buf.byteLength + } + + resolve(dst.buffer) + } else if (type === 'blob') { + if (!Blob) { + Blob = require('buffer').Blob + } + resolve(new Blob(body, { type: stream[kContentType] })) + } + + consumeFinish(consume) + } catch (err) { + stream.destroy(err) + } +} + +function consumePush (consume, chunk) { + consume.length += chunk.length + consume.body.push(chunk) +} + +function consumeFinish (consume, err) { + if (consume.body === null) { + return + } + + if (err) { + consume.reject(err) + } else { + consume.resolve() + } + + consume.type = null + consume.stream = null + consume.resolve = null + consume.reject = null + consume.length = 0 + consume.body = null +} diff --git a/lib/api/util.js b/lib/api/util.js new file mode 100644 index 0000000..bffd702 --- /dev/null +++ b/lib/api/util.js @@ -0,0 +1,46 @@ +const assert = require('assert') +const { + ResponseStatusCodeError +} = require('../core/errors') +const { toUSVString } = require('../core/util') + +async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) { + assert(body) + + let chunks = [] + let limit = 0 + + for await (const chunk of body) { + chunks.push(chunk) + limit += chunk.length + if (limit > 128 * 1024) { + chunks = null + break + } + } + + if (statusCode === 204 || !contentType || !chunks) { + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) + return + } + + try { + if (contentType.startsWith('application/json')) { + const payload = JSON.parse(toUSVString(Buffer.concat(chunks))) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } + + if (contentType.startsWith('text/')) { + const payload = toUSVString(Buffer.concat(chunks)) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } + } catch (err) { + // Process in a fallback if error + } + + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) +} + +module.exports = { getResolveErrorBodyCallback } diff --git a/lib/balanced-pool.js b/lib/balanced-pool.js new file mode 100644 index 0000000..10bc6a4 --- /dev/null +++ b/lib/balanced-pool.js @@ -0,0 +1,190 @@ +'use strict' + +const { + BalancedPoolMissingUpstreamError, + InvalidArgumentError +} = require('./core/errors') +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} = require('./pool-base') +const Pool = require('./pool') +const { kUrl, kInterceptors } = require('./core/symbols') +const { parseOrigin } = require('./core/util') +const kFactory = Symbol('factory') + +const kOptions = Symbol('options') +const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor') +const kCurrentWeight = Symbol('kCurrentWeight') +const kIndex = Symbol('kIndex') +const kWeight = Symbol('kWeight') +const kMaxWeightPerServer = Symbol('kMaxWeightPerServer') +const kErrorPenalty = Symbol('kErrorPenalty') + +function getGreatestCommonDivisor (a, b) { + if (b === 0) return a + return getGreatestCommonDivisor(b, a % b) +} + +function defaultFactory (origin, opts) { + return new Pool(origin, opts) +} + +class BalancedPool extends PoolBase { + constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) { + super() + + this[kOptions] = opts + this[kIndex] = -1 + this[kCurrentWeight] = 0 + + this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100 + this[kErrorPenalty] = this[kOptions].errorPenalty || 15 + + if (!Array.isArray(upstreams)) { + upstreams = [upstreams] + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } + + this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool) + ? opts.interceptors.BalancedPool + : [] + this[kFactory] = factory + + for (const upstream of upstreams) { + this.addUpstream(upstream) + } + this._updateBalancedPoolStats() + } + + addUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin + + if (this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + ))) { + return this + } + const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions])) + + this[kAddClient](pool) + pool.on('connect', () => { + pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty]) + }) + + pool.on('connectionError', () => { + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + }) + + pool.on('disconnect', (...args) => { + const err = args[2] + if (err && err.code === 'UND_ERR_SOCKET') { + // decrease the weight of the pool. + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + } + }) + + for (const client of this[kClients]) { + client[kWeight] = this[kMaxWeightPerServer] + } + + this._updateBalancedPoolStats() + + return this + } + + _updateBalancedPoolStats () { + this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0) + } + + removeUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin + + const pool = this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + )) + + if (pool) { + this[kRemoveClient](pool) + } + + return this + } + + get upstreams () { + return this[kClients] + .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true) + .map((p) => p[kUrl].origin) + } + + [kGetDispatcher] () { + // We validate that pools is greater than 0, + // otherwise we would have to wait until an upstream + // is added, which might never happen. + if (this[kClients].length === 0) { + throw new BalancedPoolMissingUpstreamError() + } + + const dispatcher = this[kClients].find(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) + + if (!dispatcher) { + return + } + + const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true) + + if (allClientsBusy) { + return + } + + let counter = 0 + + let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain]) + + while (counter++ < this[kClients].length) { + this[kIndex] = (this[kIndex] + 1) % this[kClients].length + const pool = this[kClients][this[kIndex]] + + // find pool index with the largest weight + if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) { + maxWeightIndex = this[kIndex] + } + + // decrease the current weight every `this[kClients].length`. + if (this[kIndex] === 0) { + // Set the current weight to the next lower weight. + this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor] + + if (this[kCurrentWeight] <= 0) { + this[kCurrentWeight] = this[kMaxWeightPerServer] + } + } + if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) { + return pool + } + } + + this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight] + this[kIndex] = maxWeightIndex + return this[kClients][maxWeightIndex] + } +} + +module.exports = BalancedPool diff --git a/lib/cache/cache.js b/lib/cache/cache.js new file mode 100644 index 0000000..9b31108 --- /dev/null +++ b/lib/cache/cache.js @@ -0,0 +1,838 @@ +'use strict' + +const { kConstruct } = require('./symbols') +const { urlEquals, fieldValues: getFieldValues } = require('./util') +const { kEnumerableProperty, isDisturbed } = require('../core/util') +const { kHeadersList } = require('../core/symbols') +const { webidl } = require('../fetch/webidl') +const { Response, cloneResponse } = require('../fetch/response') +const { Request } = require('../fetch/request') +const { kState, kHeaders, kGuard, kRealm } = require('../fetch/symbols') +const { fetching } = require('../fetch/index') +const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = require('../fetch/util') +const assert = require('assert') +const { getGlobalDispatcher } = require('../global') + +/** + * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation + * @typedef {Object} CacheBatchOperation + * @property {'delete' | 'put'} type + * @property {any} request + * @property {any} response + * @property {import('../../types/cache').CacheQueryOptions} options + */ + +/** + * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list + * @typedef {[any, any][]} requestResponseList + */ + +class Cache { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list + * @type {requestResponseList} + */ + #relevantRequestResponseList + + constructor () { + if (arguments[0] !== kConstruct) { + webidl.illegalConstructor() + } + + this.#relevantRequestResponseList = arguments[1] + } + + async match (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' }) + + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + const p = await this.matchAll(request, options) + + if (p.length === 0) { + return + } + + return p[0] + } + + async matchAll (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) + + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + // 1. + let r = null + + // 2. + if (request !== undefined) { + if (request instanceof Request) { + // 2.1.1 + r = request[kState] + + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { + // 2.2.1 + r = new Request(request)[kState] + } + } + + // 5. + // 5.1 + const responses = [] + + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + responses.push(requestResponse[1]) + } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) + + // 5.3.2 + for (const requestResponse of requestResponses) { + responses.push(requestResponse[1]) + } + } + + // 5.4 + // We don't implement CORs so we don't need to loop over the responses, yay! + + // 5.5.1 + const responseList = [] + + // 5.5.2 + for (const response of responses) { + // 5.5.2.1 + const responseObject = new Response(response.body?.source ?? null) + const body = responseObject[kState].body + responseObject[kState] = response + responseObject[kState].body = body + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + + responseList.push(responseObject) + } + + // 6. + return Object.freeze(responseList) + } + + async add (request) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' }) + + request = webidl.converters.RequestInfo(request) + + // 1. + const requests = [request] + + // 2. + const responseArrayPromise = this.addAll(requests) + + // 3. + return await responseArrayPromise + } + + async addAll (requests) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' }) + + requests = webidl.converters['sequence'](requests) + + // 1. + const responsePromises = [] + + // 2. + const requestList = [] + + // 3. + for (const request of requests) { + if (typeof request === 'string') { + continue + } + + // 3.1 + const r = request[kState] + + // 3.2 + if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme when method is not GET.' + }) + } + } + + // 4. + /** @type {ReturnType[]} */ + const fetchControllers = [] + + // 5. + for (const request of requests) { + // 5.1 + const r = new Request(request)[kState] + + // 5.2 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme.' + }) + } + + // 5.4 + r.initiator = 'fetch' + r.destination = 'subresource' + + // 5.5 + requestList.push(r) + + // 5.6 + const responsePromise = createDeferredPromise() + + // 5.7 + fetchControllers.push(fetching({ + request: r, + dispatcher: getGlobalDispatcher(), + processResponse (response) { + // 1. + if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Received an invalid status code or the request failed.' + })) + } else if (response.headersList.contains('vary')) { // 2. + // 2.1 + const fieldValues = getFieldValues(response.headersList.get('vary')) + + // 2.2 + for (const fieldValue of fieldValues) { + // 2.2.1 + if (fieldValue === '*') { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'invalid vary field value' + })) + + for (const controller of fetchControllers) { + controller.abort() + } + + return + } + } + } + }, + processResponseEndOfBody (response) { + // 1. + if (response.aborted) { + responsePromise.reject(new DOMException('aborted', 'AbortError')) + return + } + + // 2. + responsePromise.resolve(response) + } + })) + + // 5.8 + responsePromises.push(responsePromise.promise) + } + + // 6. + const p = Promise.all(responsePromises) + + // 7. + const responses = await p + + // 7.1 + const operations = [] + + // 7.2 + let index = 0 + + // 7.3 + for (const response of responses) { + // 7.3.1 + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 7.3.2 + request: requestList[index], // 7.3.3 + response // 7.3.4 + } + + operations.push(operation) // 7.3.5 + + index++ // 7.3.6 + } + + // 7.5 + const cacheJobPromise = createDeferredPromise() + + // 7.6.1 + let errorData = null + + // 7.6.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } + + // 7.6.3 + queueMicrotask(() => { + // 7.6.3.1 + if (errorData === null) { + cacheJobPromise.resolve(undefined) + } else { + // 7.6.3.2 + cacheJobPromise.reject(errorData) + } + }) + + // 7.7 + return cacheJobPromise.promise + } + + async put (request, response) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' }) + + request = webidl.converters.RequestInfo(request) + response = webidl.converters.Response(response) + + // 1. + let innerRequest = null + + // 2. + if (request instanceof Request) { + innerRequest = request[kState] + } else { // 3. + innerRequest = new Request(request)[kState] + } + + // 4. + if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Expected an http/s scheme when method is not GET' + }) + } + + // 5. + const innerResponse = response[kState] + + // 6. + if (innerResponse.status === 206) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got 206 status' + }) + } + + // 7. + if (innerResponse.headersList.contains('vary')) { + // 7.1. + const fieldValues = getFieldValues(innerResponse.headersList.get('vary')) + + // 7.2. + for (const fieldValue of fieldValues) { + // 7.2.1 + if (fieldValue === '*') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got * vary field value' + }) + } + } + } + + // 8. + if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Response body is locked or disturbed' + }) + } + + // 9. + const clonedResponse = cloneResponse(innerResponse) + + // 10. + const bodyReadPromise = createDeferredPromise() + + // 11. + if (innerResponse.body != null) { + // 11.1 + const stream = innerResponse.body.stream + + // 11.2 + const reader = stream.getReader() + + // 11.3 + readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject) + } else { + bodyReadPromise.resolve(undefined) + } + + // 12. + /** @type {CacheBatchOperation[]} */ + const operations = [] + + // 13. + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 14. + request: innerRequest, // 15. + response: clonedResponse // 16. + } + + // 17. + operations.push(operation) + + // 19. + const bytes = await bodyReadPromise.promise + + if (clonedResponse.body != null) { + clonedResponse.body.source = bytes + } + + // 19.1 + const cacheJobPromise = createDeferredPromise() + + // 19.2.1 + let errorData = null + + // 19.2.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } + + // 19.2.3 + queueMicrotask(() => { + // 19.2.3.1 + if (errorData === null) { + cacheJobPromise.resolve() + } else { // 19.2.3.2 + cacheJobPromise.reject(errorData) + } + }) + + return cacheJobPromise.promise + } + + async delete (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' }) + + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + /** + * @type {Request} + */ + let r = null + + if (request instanceof Request) { + r = request[kState] + + if (r.method !== 'GET' && !options.ignoreMethod) { + return false + } + } else { + assert(typeof request === 'string') + + r = new Request(request)[kState] + } + + /** @type {CacheBatchOperation[]} */ + const operations = [] + + /** @type {CacheBatchOperation} */ + const operation = { + type: 'delete', + request: r, + options + } + + operations.push(operation) + + const cacheJobPromise = createDeferredPromise() + + let errorData = null + let requestResponses + + try { + requestResponses = this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } + + queueMicrotask(() => { + if (errorData === null) { + cacheJobPromise.resolve(!!requestResponses?.length) + } else { + cacheJobPromise.reject(errorData) + } + }) + + return cacheJobPromise.promise + } + + /** + * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys + * @param {any} request + * @param {import('../../types/cache').CacheQueryOptions} options + * @returns {readonly Request[]} + */ + async keys (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) + + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + // 1. + let r = null + + // 2. + if (request !== undefined) { + // 2.1 + if (request instanceof Request) { + // 2.1.1 + r = request[kState] + + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { // 2.2 + r = new Request(request)[kState] + } + } + + // 4. + const promise = createDeferredPromise() + + // 5. + // 5.1 + const requests = [] + + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + // 5.2.1.1 + requests.push(requestResponse[0]) + } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) + + // 5.3.2 + for (const requestResponse of requestResponses) { + // 5.3.2.1 + requests.push(requestResponse[0]) + } + } + + // 5.4 + queueMicrotask(() => { + // 5.4.1 + const requestList = [] + + // 5.4.2 + for (const request of requests) { + const requestObject = new Request('https://a') + requestObject[kState] = request + requestObject[kHeaders][kHeadersList] = request.headersList + requestObject[kHeaders][kGuard] = 'immutable' + requestObject[kRealm] = request.client + + // 5.4.2.1 + requestList.push(requestObject) + } + + // 5.4.3 + promise.resolve(Object.freeze(requestList)) + }) + + return promise.promise + } + + /** + * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm + * @param {CacheBatchOperation[]} operations + * @returns {requestResponseList} + */ + #batchCacheOperations (operations) { + // 1. + const cache = this.#relevantRequestResponseList + + // 2. + const backupCache = [...cache] + + // 3. + const addedItems = [] + + // 4.1 + const resultList = [] + + try { + // 4.2 + for (const operation of operations) { + // 4.2.1 + if (operation.type !== 'delete' && operation.type !== 'put') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'operation type does not match "delete" or "put"' + }) + } + + // 4.2.2 + if (operation.type === 'delete' && operation.response != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'delete operation should not have an associated response' + }) + } + + // 4.2.3 + if (this.#queryCache(operation.request, operation.options, addedItems).length) { + throw new DOMException('???', 'InvalidStateError') + } + + // 4.2.4 + let requestResponses + + // 4.2.5 + if (operation.type === 'delete') { + // 4.2.5.1 + requestResponses = this.#queryCache(operation.request, operation.options) + + // TODO: the spec is wrong, this is needed to pass WPTs + if (requestResponses.length === 0) { + return [] + } + + // 4.2.5.2 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) + + // 4.2.5.2.1 + cache.splice(idx, 1) + } + } else if (operation.type === 'put') { // 4.2.6 + // 4.2.6.1 + if (operation.response == null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'put operation should have an associated response' + }) + } + + // 4.2.6.2 + const r = operation.request + + // 4.2.6.3 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'expected http or https scheme' + }) + } + + // 4.2.6.4 + if (r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'not get method' + }) + } + + // 4.2.6.5 + if (operation.options != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'options must not be defined' + }) + } + + // 4.2.6.6 + requestResponses = this.#queryCache(operation.request) + + // 4.2.6.7 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) + + // 4.2.6.7.1 + cache.splice(idx, 1) + } + + // 4.2.6.8 + cache.push([operation.request, operation.response]) + + // 4.2.6.10 + addedItems.push([operation.request, operation.response]) + } + + // 4.2.7 + resultList.push([operation.request, operation.response]) + } + + // 4.3 + return resultList + } catch (e) { // 5. + // 5.1 + this.#relevantRequestResponseList.length = 0 + + // 5.2 + this.#relevantRequestResponseList = backupCache + + // 5.3 + throw e + } + } + + /** + * @see https://w3c.github.io/ServiceWorker/#query-cache + * @param {any} requestQuery + * @param {import('../../types/cache').CacheQueryOptions} options + * @param {requestResponseList} targetStorage + * @returns {requestResponseList} + */ + #queryCache (requestQuery, options, targetStorage) { + /** @type {requestResponseList} */ + const resultList = [] + + const storage = targetStorage ?? this.#relevantRequestResponseList + + for (const requestResponse of storage) { + const [cachedRequest, cachedResponse] = requestResponse + if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) { + resultList.push(requestResponse) + } + } + + return resultList + } + + /** + * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm + * @param {any} requestQuery + * @param {any} request + * @param {any | null} response + * @param {import('../../types/cache').CacheQueryOptions | undefined} options + * @returns {boolean} + */ + #requestMatchesCachedItem (requestQuery, request, response = null, options) { + // if (options?.ignoreMethod === false && request.method === 'GET') { + // return false + // } + + const queryURL = new URL(requestQuery.url) + + const cachedURL = new URL(request.url) + + if (options?.ignoreSearch) { + cachedURL.search = '' + + queryURL.search = '' + } + + if (!urlEquals(queryURL, cachedURL, true)) { + return false + } + + if ( + response == null || + options?.ignoreVary || + !response.headersList.contains('vary') + ) { + return true + } + + const fieldValues = getFieldValues(response.headersList.get('vary')) + + for (const fieldValue of fieldValues) { + if (fieldValue === '*') { + return false + } + + const requestValue = request.headersList.get(fieldValue) + const queryValue = requestQuery.headersList.get(fieldValue) + + // If one has the header and the other doesn't, or one has + // a different value than the other, return false + if (requestValue !== queryValue) { + return false + } + } + + return true + } +} + +Object.defineProperties(Cache.prototype, { + [Symbol.toStringTag]: { + value: 'Cache', + configurable: true + }, + match: kEnumerableProperty, + matchAll: kEnumerableProperty, + add: kEnumerableProperty, + addAll: kEnumerableProperty, + put: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) + +const cacheQueryOptionConverters = [ + { + key: 'ignoreSearch', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreMethod', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreVary', + converter: webidl.converters.boolean, + defaultValue: false + } +] + +webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters) + +webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([ + ...cacheQueryOptionConverters, + { + key: 'cacheName', + converter: webidl.converters.DOMString + } +]) + +webidl.converters.Response = webidl.interfaceConverter(Response) + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.RequestInfo +) + +module.exports = { + Cache +} diff --git a/lib/cache/cachestorage.js b/lib/cache/cachestorage.js new file mode 100644 index 0000000..7e7f0cf --- /dev/null +++ b/lib/cache/cachestorage.js @@ -0,0 +1,144 @@ +'use strict' + +const { kConstruct } = require('./symbols') +const { Cache } = require('./cache') +const { webidl } = require('../fetch/webidl') +const { kEnumerableProperty } = require('../core/util') + +class CacheStorage { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map + * @type {Map} + */ + async has (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' }) + + cacheName = webidl.converters.DOMString(cacheName) + + // 2.1.1 + // 2.2 + return this.#caches.has(cacheName) + } + + /** + * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open + * @param {string} cacheName + * @returns {Promise} + */ + async open (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' }) + + cacheName = webidl.converters.DOMString(cacheName) + + // 2.1 + if (this.#caches.has(cacheName)) { + // await caches.open('v1') !== await caches.open('v1') + + // 2.1.1 + const cache = this.#caches.get(cacheName) + + // 2.1.1.1 + return new Cache(kConstruct, cache) + } + + // 2.2 + const cache = [] + + // 2.3 + this.#caches.set(cacheName, cache) + + // 2.4 + return new Cache(kConstruct, cache) + } + + /** + * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete + * @param {string} cacheName + * @returns {Promise} + */ + async delete (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' }) + + cacheName = webidl.converters.DOMString(cacheName) + + return this.#caches.delete(cacheName) + } + + /** + * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys + * @returns {string[]} + */ + async keys () { + webidl.brandCheck(this, CacheStorage) + + // 2.1 + const keys = this.#caches.keys() + + // 2.2 + return [...keys] + } +} + +Object.defineProperties(CacheStorage.prototype, { + [Symbol.toStringTag]: { + value: 'CacheStorage', + configurable: true + }, + match: kEnumerableProperty, + has: kEnumerableProperty, + open: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) + +module.exports = { + CacheStorage +} diff --git a/lib/cache/symbols.js b/lib/cache/symbols.js new file mode 100644 index 0000000..40448d6 --- /dev/null +++ b/lib/cache/symbols.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports = { + kConstruct: require('../core/symbols').kConstruct +} diff --git a/lib/cache/util.js b/lib/cache/util.js new file mode 100644 index 0000000..44d52b7 --- /dev/null +++ b/lib/cache/util.js @@ -0,0 +1,49 @@ +'use strict' + +const assert = require('assert') +const { URLSerializer } = require('../fetch/dataURL') +const { isValidHeaderName } = require('../fetch/util') + +/** + * @see https://url.spec.whatwg.org/#concept-url-equals + * @param {URL} A + * @param {URL} B + * @param {boolean | undefined} excludeFragment + * @returns {boolean} + */ +function urlEquals (A, B, excludeFragment = false) { + const serializedA = URLSerializer(A, excludeFragment) + + const serializedB = URLSerializer(B, excludeFragment) + + return serializedA === serializedB +} + +/** + * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262 + * @param {string} header + */ +function fieldValues (header) { + assert(header !== null) + + const values = [] + + for (let value of header.split(',')) { + value = value.trim() + + if (!value.length) { + continue + } else if (!isValidHeaderName(value)) { + continue + } + + values.push(value) + } + + return values +} + +module.exports = { + urlEquals, + fieldValues +} diff --git a/lib/client.js b/lib/client.js new file mode 100644 index 0000000..22cb390 --- /dev/null +++ b/lib/client.js @@ -0,0 +1,2283 @@ +// @ts-check + +'use strict' + +/* global WebAssembly */ + +const assert = require('assert') +const net = require('net') +const http = require('http') +const { pipeline } = require('stream') +const util = require('./core/util') +const timers = require('./timers') +const Request = require('./core/request') +const DispatcherBase = require('./dispatcher-base') +const { + RequestContentLengthMismatchError, + ResponseContentLengthMismatchError, + InvalidArgumentError, + RequestAbortedError, + HeadersTimeoutError, + HeadersOverflowError, + SocketError, + InformationalError, + BodyTimeoutError, + HTTPParserError, + ResponseExceededMaxSizeError, + ClientDestroyedError +} = require('./core/errors') +const buildConnector = require('./core/connect') +const { + kUrl, + kReset, + kServerName, + kClient, + kBusy, + kParser, + kConnect, + kBlocking, + kResuming, + kRunning, + kPending, + kSize, + kWriting, + kQueue, + kConnected, + kConnecting, + kNeedDrain, + kNoRef, + kKeepAliveDefaultTimeout, + kHostHeader, + kPendingIdx, + kRunningIdx, + kError, + kPipelining, + kSocket, + kKeepAliveTimeoutValue, + kMaxHeadersSize, + kKeepAliveMaxTimeout, + kKeepAliveTimeoutThreshold, + kHeadersTimeout, + kBodyTimeout, + kStrictContentLength, + kConnector, + kMaxRedirections, + kMaxRequests, + kCounter, + kClose, + kDestroy, + kDispatch, + kInterceptors, + kLocalAddress, + kMaxResponseSize, + kHTTPConnVersion, + // HTTP2 + kHost, + kHTTP2Session, + kHTTP2SessionState, + kHTTP2BuildRequest, + kHTTP2CopyHeaders, + kHTTP1BuildRequest +} = require('./core/symbols') + +/** @type {import('http2')} */ +let http2 +try { + http2 = require('http2') +} catch { + // @ts-ignore + http2 = { constants: {} } +} + +const { + constants: { + HTTP2_HEADER_AUTHORITY, + HTTP2_HEADER_METHOD, + HTTP2_HEADER_PATH, + HTTP2_HEADER_SCHEME, + HTTP2_HEADER_CONTENT_LENGTH, + HTTP2_HEADER_EXPECT, + HTTP2_HEADER_STATUS + } +} = http2 + +// Experimental +let h2ExperimentalWarned = false + +const FastBuffer = Buffer[Symbol.species] + +const kClosedResolve = Symbol('kClosedResolve') + +const channels = {} + +try { + const diagnosticsChannel = require('diagnostics_channel') + channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders') + channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect') + channels.connectError = diagnosticsChannel.channel('undici:client:connectError') + channels.connected = diagnosticsChannel.channel('undici:client:connected') +} catch { + channels.sendHeaders = { hasSubscribers: false } + channels.beforeConnect = { hasSubscribers: false } + channels.connectError = { hasSubscribers: false } + channels.connected = { hasSubscribers: false } +} + +/** + * @type {import('../types/client').default} + */ +class Client extends DispatcherBase { + /** + * + * @param {string|URL} url + * @param {import('../types/client').Client.Options} options + */ + constructor (url, { + interceptors, + maxHeaderSize, + headersTimeout, + socketTimeout, + requestTimeout, + connectTimeout, + bodyTimeout, + idleTimeout, + keepAlive, + keepAliveTimeout, + maxKeepAliveTimeout, + keepAliveMaxTimeout, + keepAliveTimeoutThreshold, + socketPath, + pipelining, + tls, + strictContentLength, + maxCachedSessions, + maxRedirections, + connect, + maxRequestsPerClient, + localAddress, + maxResponseSize, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + // h2 + allowH2, + maxConcurrentStreams + } = {}) { + super() + + if (keepAlive !== undefined) { + throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead') + } + + if (socketTimeout !== undefined) { + throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead') + } + + if (requestTimeout !== undefined) { + throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead') + } + + if (idleTimeout !== undefined) { + throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead') + } + + if (maxKeepAliveTimeout !== undefined) { + throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead') + } + + if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) { + throw new InvalidArgumentError('invalid maxHeaderSize') + } + + if (socketPath != null && typeof socketPath !== 'string') { + throw new InvalidArgumentError('invalid socketPath') + } + + if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) { + throw new InvalidArgumentError('invalid connectTimeout') + } + + if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveTimeout') + } + + if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveMaxTimeout') + } + + if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) { + throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold') + } + + if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('headersTimeout must be a positive integer or zero') + } + + if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero') + } + + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } + + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) { + throw new InvalidArgumentError('maxRequestsPerClient must be a positive number') + } + + if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) { + throw new InvalidArgumentError('localAddress must be valid string IP address') + } + + if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) { + throw new InvalidArgumentError('maxResponseSize must be a positive number') + } + + if ( + autoSelectFamilyAttemptTimeout != null && + (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1) + ) { + throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number') + } + + // h2 + if (allowH2 != null && typeof allowH2 !== 'boolean') { + throw new InvalidArgumentError('allowH2 must be a valid boolean value') + } + + if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) { + throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0') + } + + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) + } + + this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client) + ? interceptors.Client + : [createRedirectInterceptor({ maxRedirections })] + this[kUrl] = util.parseOrigin(url) + this[kConnector] = connect + this[kSocket] = null + this[kPipelining] = pipelining != null ? pipelining : 1 + this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize + this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout + this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout + this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold + this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout] + this[kServerName] = null + this[kLocalAddress] = localAddress != null ? localAddress : null + this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n` + this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3 + this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3 + this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength + this[kMaxRedirections] = maxRedirections + this[kMaxRequests] = maxRequestsPerClient + this[kClosedResolve] = null + this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1 + this[kHTTPConnVersion] = 'h1' + + // HTTP/2 + this[kHTTP2Session] = null + this[kHTTP2SessionState] = !allowH2 + ? null + : { + // streams: null, // Fixed queue of streams - For future support of `push` + openStreams: 0, // Keep track of them to decide wether or not unref the session + maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server + } + this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}` + + // kQueue is built up of 3 sections separated by + // the kRunningIdx and kPendingIdx indices. + // | complete | running | pending | + // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length + // kRunningIdx points to the first running element. + // kPendingIdx points to the first pending element. + // This implements a fast queue with an amortized + // time of O(1). + + this[kQueue] = [] + this[kRunningIdx] = 0 + this[kPendingIdx] = 0 + } + + get pipelining () { + return this[kPipelining] + } + + set pipelining (value) { + this[kPipelining] = value + resume(this, true) + } + + get [kPending] () { + return this[kQueue].length - this[kPendingIdx] + } + + get [kRunning] () { + return this[kPendingIdx] - this[kRunningIdx] + } + + get [kSize] () { + return this[kQueue].length - this[kRunningIdx] + } + + get [kConnected] () { + return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed + } + + get [kBusy] () { + const socket = this[kSocket] + return ( + (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) || + (this[kSize] >= (this[kPipelining] || 1)) || + this[kPending] > 0 + ) + } + + /* istanbul ignore: only used for test */ + [kConnect] (cb) { + connect(this) + this.once('connect', cb) + } + + [kDispatch] (opts, handler) { + const origin = opts.origin || this[kUrl].origin + + const request = this[kHTTPConnVersion] === 'h2' + ? Request[kHTTP2BuildRequest](origin, opts, handler) + : Request[kHTTP1BuildRequest](origin, opts, handler) + + this[kQueue].push(request) + if (this[kResuming]) { + // Do nothing. + } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) { + // Wait a tick in case stream/iterator is ended in the same tick. + this[kResuming] = 1 + process.nextTick(resume, this) + } else { + resume(this, true) + } + + if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) { + this[kNeedDrain] = 2 + } + + return this[kNeedDrain] < 2 + } + + async [kClose] () { + // TODO: for H2 we need to gracefully flush the remaining enqueued + // request and close each stream. + return new Promise((resolve) => { + if (!this[kSize]) { + resolve(null) + } else { + this[kClosedResolve] = resolve + } + }) + } + + async [kDestroy] (err) { + return new Promise((resolve) => { + const requests = this[kQueue].splice(this[kPendingIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) + } + + const callback = () => { + if (this[kClosedResolve]) { + // TODO (fix): Should we error here with ClientDestroyedError? + this[kClosedResolve]() + this[kClosedResolve] = null + } + resolve() + } + + if (this[kHTTP2Session] != null) { + util.destroy(this[kHTTP2Session], err) + this[kHTTP2Session] = null + this[kHTTP2SessionState] = null + } + + if (!this[kSocket]) { + queueMicrotask(callback) + } else { + util.destroy(this[kSocket].on('close', callback), err) + } + + resume(this) + }) + } +} + +function onHttp2SessionError (err) { + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') + + this[kSocket][kError] = err + + onError(this[kClient], err) +} + +function onHttp2FrameError (type, code, id) { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + + if (id === 0) { + this[kSocket][kError] = err + onError(this[kClient], err) + } +} + +function onHttp2SessionEnd () { + util.destroy(this, new SocketError('other side closed')) + util.destroy(this[kSocket], new SocketError('other side closed')) +} + +function onHTTP2GoAway (code) { + const client = this[kClient] + const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`) + client[kSocket] = null + client[kHTTP2Session] = null + + if (client.destroyed) { + assert(this[kPending] === 0) + + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) + } + } else if (client[kRunning] > 0) { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null + + errorRequest(client, request, err) + } + + client[kPendingIdx] = client[kRunningIdx] + + assert(client[kRunning] === 0) + + client.emit('disconnect', + client[kUrl], + [client], + err + ) + + resume(client) +} + +const constants = require('./llhttp/constants') +const createRedirectInterceptor = require('./interceptor/redirectInterceptor') +const EMPTY_BUF = Buffer.alloc(0) + +async function lazyllhttp () { + const llhttpWasmData = process.env.JEST_WORKER_ID ? require('./llhttp/llhttp-wasm.js') : undefined + + let mod + try { + mod = await WebAssembly.compile(Buffer.from(require('./llhttp/llhttp_simd-wasm.js'), 'base64')) + } catch (e) { + /* istanbul ignore next */ + + // We could check if the error was caused by the simd option not + // being enabled, but the occurring of this other error + // * https://github.com/emscripten-core/emscripten/issues/11495 + // got me to remove that check to avoid breaking Node 12. + mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || require('./llhttp/llhttp-wasm.js'), 'base64')) + } + + return await WebAssembly.instantiate(mod, { + env: { + /* eslint-disable camelcase */ + + wasm_on_url: (p, at, len) => { + /* istanbul ignore next */ + return 0 + }, + wasm_on_status: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_begin: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageBegin() || 0 + }, + wasm_on_header_field: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_header_value: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0 + }, + wasm_on_body: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_complete: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageComplete() || 0 + } + + /* eslint-enable camelcase */ + } + }) +} + +let llhttpInstance = null +let llhttpPromise = lazyllhttp() +llhttpPromise.catch() + +let currentParser = null +let currentBufferRef = null +let currentBufferSize = 0 +let currentBufferPtr = null + +const TIMEOUT_HEADERS = 1 +const TIMEOUT_BODY = 2 +const TIMEOUT_IDLE = 3 + +class Parser { + constructor (client, socket, { exports }) { + assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0) + + this.llhttp = exports + this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE) + this.client = client + this.socket = socket + this.timeout = null + this.timeoutValue = null + this.timeoutType = null + this.statusCode = null + this.statusText = '' + this.upgrade = false + this.headers = [] + this.headersSize = 0 + this.headersMaxSize = client[kMaxHeadersSize] + this.shouldKeepAlive = false + this.paused = false + this.resume = this.resume.bind(this) + + this.bytesRead = 0 + + this.keepAlive = '' + this.contentLength = '' + this.connection = '' + this.maxResponseSize = client[kMaxResponseSize] + } + + setTimeout (value, type) { + this.timeoutType = type + if (value !== this.timeoutValue) { + timers.clearTimeout(this.timeout) + if (value) { + this.timeout = timers.setTimeout(onParserTimeout, value, this) + // istanbul ignore else: only for jest + if (this.timeout.unref) { + this.timeout.unref() + } + } else { + this.timeout = null + } + this.timeoutValue = value + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + } + + resume () { + if (this.socket.destroyed || !this.paused) { + return + } + + assert(this.ptr != null) + assert(currentParser == null) + + this.llhttp.llhttp_resume(this.ptr) + + assert(this.timeoutType === TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + + this.paused = false + this.execute(this.socket.read() || EMPTY_BUF) // Flush parser. + this.readMore() + } + + readMore () { + while (!this.paused && this.ptr) { + const chunk = this.socket.read() + if (chunk === null) { + break + } + this.execute(chunk) + } + } + + execute (data) { + assert(this.ptr != null) + assert(currentParser == null) + assert(!this.paused) + + const { socket, llhttp } = this + + if (data.length > currentBufferSize) { + if (currentBufferPtr) { + llhttp.free(currentBufferPtr) + } + currentBufferSize = Math.ceil(data.length / 4096) * 4096 + currentBufferPtr = llhttp.malloc(currentBufferSize) + } + + new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data) + + // Call `execute` on the wasm parser. + // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data, + // and finally the length of bytes to parse. + // The return value is an error code or `constants.ERROR.OK`. + try { + let ret + + try { + currentBufferRef = data + currentParser = this + ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length) + /* eslint-disable-next-line no-useless-catch */ + } catch (err) { + /* istanbul ignore next: difficult to make a test case for */ + throw err + } finally { + currentParser = null + currentBufferRef = null + } + + const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr + + if (ret === constants.ERROR.PAUSED_UPGRADE) { + this.onUpgrade(data.slice(offset)) + } else if (ret === constants.ERROR.PAUSED) { + this.paused = true + socket.unshift(data.slice(offset)) + } else if (ret !== constants.ERROR.OK) { + const ptr = llhttp.llhttp_get_error_reason(this.ptr) + let message = '' + /* istanbul ignore else: difficult to make a test case for */ + if (ptr) { + const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0) + message = + 'Response does not match the HTTP/1.1 protocol (' + + Buffer.from(llhttp.memory.buffer, ptr, len).toString() + + ')' + } + throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset)) + } + } catch (err) { + util.destroy(socket, err) + } + } + + destroy () { + assert(this.ptr != null) + assert(currentParser == null) + + this.llhttp.llhttp_free(this.ptr) + this.ptr = null + + timers.clearTimeout(this.timeout) + this.timeout = null + this.timeoutValue = null + this.timeoutType = null + + this.paused = false + } + + onStatus (buf) { + this.statusText = buf.toString() + } + + onMessageBegin () { + const { socket, client } = this + + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 + } + + const request = client[kQueue][client[kRunningIdx]] + if (!request) { + return -1 + } + } + + onHeaderField (buf) { + const len = this.headers.length + + if ((len & 1) === 0) { + this.headers.push(buf) + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) + } + + this.trackHeader(buf.length) + } + + onHeaderValue (buf) { + let len = this.headers.length + + if ((len & 1) === 1) { + this.headers.push(buf) + len += 1 + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) + } + + const key = this.headers[len - 2] + if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') { + this.keepAlive += buf.toString() + } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') { + this.connection += buf.toString() + } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') { + this.contentLength += buf.toString() + } + + this.trackHeader(buf.length) + } + + trackHeader (len) { + this.headersSize += len + if (this.headersSize >= this.headersMaxSize) { + util.destroy(this.socket, new HeadersOverflowError()) + } + } + + onUpgrade (head) { + const { upgrade, client, socket, headers, statusCode } = this + + assert(upgrade) + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert(!socket.destroyed) + assert(socket === client[kSocket]) + assert(!this.paused) + assert(request.upgrade || request.method === 'CONNECT') + + this.statusCode = null + this.statusText = '' + this.shouldKeepAlive = null + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + socket.unshift(head) + + socket[kParser].destroy() + socket[kParser] = null + + socket[kClient] = null + socket[kError] = null + socket + .removeListener('error', onSocketError) + .removeListener('readable', onSocketReadable) + .removeListener('end', onSocketEnd) + .removeListener('close', onSocketClose) + + client[kSocket] = null + client[kQueue][client[kRunningIdx]++] = null + client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade')) + + try { + request.onUpgrade(statusCode, headers, socket) + } catch (err) { + util.destroy(socket, err) + } + + resume(client) + } + + onHeadersComplete (statusCode, upgrade, shouldKeepAlive) { + const { client, socket, headers, statusText } = this + + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 + } + + const request = client[kQueue][client[kRunningIdx]] + + /* istanbul ignore next: difficult to make a test case for */ + if (!request) { + return -1 + } + + assert(!this.upgrade) + assert(this.statusCode < 200) + + if (statusCode === 100) { + util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket))) + return -1 + } + + /* this can only happen if server is misbehaving */ + if (upgrade && !request.upgrade) { + util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket))) + return -1 + } + + assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS) + + this.statusCode = statusCode + this.shouldKeepAlive = ( + shouldKeepAlive || + // Override llhttp value which does not allow keepAlive for HEAD. + (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive') + ) + + if (this.statusCode >= 200) { + const bodyTimeout = request.bodyTimeout != null + ? request.bodyTimeout + : client[kBodyTimeout] + this.setTimeout(bodyTimeout, TIMEOUT_BODY) + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + + if (request.method === 'CONNECT') { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 + } + + if (upgrade) { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 + } + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + if (this.shouldKeepAlive && client[kPipelining]) { + const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null + + if (keepAliveTimeout != null) { + const timeout = Math.min( + keepAliveTimeout - client[kKeepAliveTimeoutThreshold], + client[kKeepAliveMaxTimeout] + ) + if (timeout <= 0) { + socket[kReset] = true + } else { + client[kKeepAliveTimeoutValue] = timeout + } + } else { + client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout] + } + } else { + // Stop more requests from being dispatched. + socket[kReset] = true + } + + const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false + + if (request.aborted) { + return -1 + } + + if (request.method === 'HEAD') { + return 1 + } + + if (statusCode < 200) { + return 1 + } + + if (socket[kBlocking]) { + socket[kBlocking] = false + resume(client) + } + + return pause ? constants.ERROR.PAUSED : 0 + } + + onBody (buf) { + const { client, socket, statusCode, maxResponseSize } = this + + if (socket.destroyed) { + return -1 + } + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert.strictEqual(this.timeoutType, TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + + assert(statusCode >= 200) + + if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) { + util.destroy(socket, new ResponseExceededMaxSizeError()) + return -1 + } + + this.bytesRead += buf.length + + if (request.onData(buf) === false) { + return constants.ERROR.PAUSED + } + } + + onMessageComplete () { + const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this + + if (socket.destroyed && (!statusCode || shouldKeepAlive)) { + return -1 + } + + if (upgrade) { + return + } + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert(statusCode >= 100) + + this.statusCode = null + this.statusText = '' + this.bytesRead = 0 + this.contentLength = '' + this.keepAlive = '' + this.connection = '' + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + if (statusCode < 200) { + return + } + + /* istanbul ignore next: should be handled by llhttp? */ + if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) { + util.destroy(socket, new ResponseContentLengthMismatchError()) + return -1 + } + + request.onComplete(headers) + + client[kQueue][client[kRunningIdx]++] = null + + if (socket[kWriting]) { + assert.strictEqual(client[kRunning], 0) + // Response completed before request. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (!shouldKeepAlive) { + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (socket[kReset] && client[kRunning] === 0) { + // Destroy socket once all requests have completed. + // The request at the tail of the pipeline is the one + // that requested reset and no further requests should + // have been queued since then. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (client[kPipelining] === 1) { + // We must wait a full event loop cycle to reuse this socket to make sure + // that non-spec compliant servers are not closing the connection even if they + // said they won't. + setImmediate(resume, client) + } else { + resume(client) + } + } +} + +function onParserTimeout (parser) { + const { socket, timeoutType, client } = parser + + /* istanbul ignore else */ + if (timeoutType === TIMEOUT_HEADERS) { + if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { + assert(!parser.paused, 'cannot be paused while waiting for headers') + util.destroy(socket, new HeadersTimeoutError()) + } + } else if (timeoutType === TIMEOUT_BODY) { + if (!parser.paused) { + util.destroy(socket, new BodyTimeoutError()) + } + } else if (timeoutType === TIMEOUT_IDLE) { + assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue]) + util.destroy(socket, new InformationalError('socket idle timeout')) + } +} + +function onSocketReadable () { + const { [kParser]: parser } = this + if (parser) { + parser.readMore() + } +} + +function onSocketError (err) { + const { [kClient]: client, [kParser]: parser } = this + + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') + + if (client[kHTTPConnVersion] !== 'h2') { + // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded + // to the user. + if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so for as a valid response. + parser.onMessageComplete() + return + } + } + + this[kError] = err + + onError(this[kClient], err) +} + +function onError (client, err) { + if ( + client[kRunning] === 0 && + err.code !== 'UND_ERR_INFO' && + err.code !== 'UND_ERR_SOCKET' + ) { + // Error is not caused by running request and not a recoverable + // socket error. + + assert(client[kPendingIdx] === client[kRunningIdx]) + + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) + } + assert(client[kSize] === 0) + } +} + +function onSocketEnd () { + const { [kParser]: parser, [kClient]: client } = this + + if (client[kHTTPConnVersion] !== 'h2') { + if (parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() + return + } + } + + util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this))) +} + +function onSocketClose () { + const { [kClient]: client, [kParser]: parser } = this + + if (client[kHTTPConnVersion] === 'h1' && parser) { + if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() + } + + this[kParser].destroy() + this[kParser] = null + } + + const err = this[kError] || new SocketError('closed', util.getSocketInfo(this)) + + client[kSocket] = null + + if (client.destroyed) { + assert(client[kPending] === 0) + + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) + } + } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null + + errorRequest(client, request, err) + } + + client[kPendingIdx] = client[kRunningIdx] + + assert(client[kRunning] === 0) + + client.emit('disconnect', client[kUrl], [client], err) + + resume(client) +} + +async function connect (client) { + assert(!client[kConnecting]) + assert(!client[kSocket]) + + let { host, hostname, protocol, port } = client[kUrl] + + // Resolve ipv6 + if (hostname[0] === '[') { + const idx = hostname.indexOf(']') + + assert(idx !== -1) + const ip = hostname.substring(1, idx) + + assert(net.isIP(ip)) + hostname = ip + } + + client[kConnecting] = true + + if (channels.beforeConnect.hasSubscribers) { + channels.beforeConnect.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector] + }) + } + + try { + const socket = await new Promise((resolve, reject) => { + client[kConnector]({ + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, (err, socket) => { + if (err) { + reject(err) + } else { + resolve(socket) + } + }) + }) + + if (client.destroyed) { + util.destroy(socket.on('error', () => {}), new ClientDestroyedError()) + return + } + + client[kConnecting] = false + + assert(socket) + + const isH2 = socket.alpnProtocol === 'h2' + if (isH2) { + if (!h2ExperimentalWarned) { + h2ExperimentalWarned = true + process.emitWarning('H2 support is experimental, expect them to change at any time.', { + code: 'UNDICI-H2' + }) + } + + const session = http2.connect(client[kUrl], { + createConnection: () => socket, + peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams + }) + + client[kHTTPConnVersion] = 'h2' + session[kClient] = client + session[kSocket] = socket + session.on('error', onHttp2SessionError) + session.on('frameError', onHttp2FrameError) + session.on('end', onHttp2SessionEnd) + session.on('goaway', onHTTP2GoAway) + session.on('close', onSocketClose) + session.unref() + + client[kHTTP2Session] = session + socket[kHTTP2Session] = session + } else { + if (!llhttpInstance) { + llhttpInstance = await llhttpPromise + llhttpPromise = null + } + + socket[kNoRef] = false + socket[kWriting] = false + socket[kReset] = false + socket[kBlocking] = false + socket[kParser] = new Parser(client, socket, llhttpInstance) + } + + socket[kCounter] = 0 + socket[kMaxRequests] = client[kMaxRequests] + socket[kClient] = client + socket[kError] = null + + socket + .on('error', onSocketError) + .on('readable', onSocketReadable) + .on('end', onSocketEnd) + .on('close', onSocketClose) + + client[kSocket] = socket + + if (channels.connected.hasSubscribers) { + channels.connected.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + socket + }) + } + client.emit('connect', client[kUrl], [client]) + } catch (err) { + if (client.destroyed) { + return + } + + client[kConnecting] = false + + if (channels.connectError.hasSubscribers) { + channels.connectError.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + error: err + }) + } + + if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') { + assert(client[kRunning] === 0) + while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { + const request = client[kQueue][client[kPendingIdx]++] + errorRequest(client, request, err) + } + } else { + onError(client, err) + } + + client.emit('connectionError', client[kUrl], [client], err) + } + + resume(client) +} + +function emitDrain (client) { + client[kNeedDrain] = 0 + client.emit('drain', client[kUrl], [client]) +} + +function resume (client, sync) { + if (client[kResuming] === 2) { + return + } + + client[kResuming] = 2 + + _resume(client, sync) + client[kResuming] = 0 + + if (client[kRunningIdx] > 256) { + client[kQueue].splice(0, client[kRunningIdx]) + client[kPendingIdx] -= client[kRunningIdx] + client[kRunningIdx] = 0 + } +} + +function _resume (client, sync) { + while (true) { + if (client.destroyed) { + assert(client[kPending] === 0) + return + } + + if (client[kClosedResolve] && !client[kSize]) { + client[kClosedResolve]() + client[kClosedResolve] = null + return + } + + const socket = client[kSocket] + + if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') { + if (client[kSize] === 0) { + if (!socket[kNoRef] && socket.unref) { + socket.unref() + socket[kNoRef] = true + } + } else if (socket[kNoRef] && socket.ref) { + socket.ref() + socket[kNoRef] = false + } + + if (client[kSize] === 0) { + if (socket[kParser].timeoutType !== TIMEOUT_IDLE) { + socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE) + } + } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { + if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { + const request = client[kQueue][client[kRunningIdx]] + const headersTimeout = request.headersTimeout != null + ? request.headersTimeout + : client[kHeadersTimeout] + socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS) + } + } + } + + if (client[kBusy]) { + client[kNeedDrain] = 2 + } else if (client[kNeedDrain] === 2) { + if (sync) { + client[kNeedDrain] = 1 + process.nextTick(emitDrain, client) + } else { + emitDrain(client) + } + continue + } + + if (client[kPending] === 0) { + return + } + + if (client[kRunning] >= (client[kPipelining] || 1)) { + return + } + + const request = client[kQueue][client[kPendingIdx]] + + if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) { + if (client[kRunning] > 0) { + return + } + + client[kServerName] = request.servername + + if (socket && socket.servername !== request.servername) { + util.destroy(socket, new InformationalError('servername changed')) + return + } + } + + if (client[kConnecting]) { + return + } + + if (!socket && !client[kHTTP2Session]) { + connect(client) + return + } + + if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) { + return + } + + if (client[kRunning] > 0 && !request.idempotent) { + // Non-idempotent request cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } + + if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) { + // Don't dispatch an upgrade until all preceding requests have completed. + // A misbehaving server might upgrade the connection before all pipelined + // request has completed. + return + } + + if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 && + (util.isStream(request.body) || util.isAsyncIterable(request.body))) { + // Request with stream or iterator body can error while other requests + // are inflight and indirectly error those as well. + // Ensure this doesn't happen by waiting for inflight + // to complete before dispatching. + + // Request with stream or iterator body cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } + + if (!request.aborted && write(client, request)) { + client[kPendingIdx]++ + } else { + client[kQueue].splice(client[kPendingIdx], 1) + } + } +} + +// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2 +function shouldSendContentLength (method) { + return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT' +} + +function write (client, request) { + if (client[kHTTPConnVersion] === 'h2') { + writeH2(client, client[kHTTP2Session], request) + return + } + + const { body, method, path, host, upgrade, headers, blocking, reset } = request + + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 + + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. + + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) + + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) + } + + const bodyLength = util.bodyLength(body) + + let contentLength = bodyLength + + if (contentLength === null) { + contentLength = request.contentLength + } + + if (contentLength === 0 && !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. + + contentLength = null + } + + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) + return false + } + + process.emitWarning(new RequestContentLengthMismatchError()) + } + + const socket = client[kSocket] + + try { + request.onConnect((err) => { + if (request.aborted || request.completed) { + return + } + + errorRequest(client, request, err || new RequestAbortedError()) + + util.destroy(socket, new InformationalError('aborted')) + }) + } catch (err) { + errorRequest(client, request, err) + } + + if (request.aborted) { + return false + } + + if (method === 'HEAD') { + // https://github.com/mcollina/undici/issues/258 + // Close after a HEAD request to interop with misbehaving servers + // that may send a body in the response. + + socket[kReset] = true + } + + if (upgrade || method === 'CONNECT') { + // On CONNECT or upgrade, block pipeline from dispatching further + // requests on this connection. + + socket[kReset] = true + } + + if (reset != null) { + socket[kReset] = reset + } + + if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) { + socket[kReset] = true + } + + if (blocking) { + socket[kBlocking] = true + } + + let header = `${method} ${path} HTTP/1.1\r\n` + + if (typeof host === 'string') { + header += `host: ${host}\r\n` + } else { + header += client[kHostHeader] + } + + if (upgrade) { + header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n` + } else if (client[kPipelining] && !socket[kReset]) { + header += 'connection: keep-alive\r\n' + } else { + header += 'connection: close\r\n' + } + + if (headers) { + header += headers + } + + if (channels.sendHeaders.hasSubscribers) { + channels.sendHeaders.publish({ request, headers: header, socket }) + } + + /* istanbul ignore else: assertion */ + if (!body || bodyLength === 0) { + if (contentLength === 0) { + socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') + } else { + assert(contentLength === null, 'no body must not have content length') + socket.write(`${header}\r\n`, 'latin1') + } + request.onRequestSent() + } else if (util.isBuffer(body)) { + assert(contentLength === body.byteLength, 'buffer body must have content length') + + socket.cork() + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + socket.write(body) + socket.uncork() + request.onBodySent(body) + request.onRequestSent() + if (!expectsPayload) { + socket[kReset] = true + } + } else if (util.isBlobLike(body)) { + if (typeof body.stream === 'function') { + writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload }) + } else { + writeBlob({ body, client, request, socket, contentLength, header, expectsPayload }) + } + } else if (util.isStream(body)) { + writeStream({ body, client, request, socket, contentLength, header, expectsPayload }) + } else if (util.isIterable(body)) { + writeIterable({ body, client, request, socket, contentLength, header, expectsPayload }) + } else { + assert(false) + } + + return true +} + +function writeH2 (client, session, request) { + const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request + + let headers + if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()) + else headers = reqHeaders + + if (upgrade) { + errorRequest(client, request, new Error('Upgrade not supported for H2')) + return false + } + + try { + // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event? + request.onConnect((err) => { + if (request.aborted || request.completed) { + return + } + + errorRequest(client, request, err || new RequestAbortedError()) + }) + } catch (err) { + errorRequest(client, request, err) + } + + if (request.aborted) { + return false + } + + /** @type {import('node:http2').ClientHttp2Stream} */ + let stream + const h2State = client[kHTTP2SessionState] + + headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost] + headers[HTTP2_HEADER_METHOD] = method + + if (method === 'CONNECT') { + session.ref() + // we are already connected, streams are pending, first request + // will create a new stream. We trigger a request to create the stream and wait until + // `ready` event is triggered + // We disabled endStream to allow the user to write to the stream + stream = session.request(headers, { endStream: false, signal }) + + if (stream.id && !stream.pending) { + request.onUpgrade(null, null, stream) + ++h2State.openStreams + } else { + stream.once('ready', () => { + request.onUpgrade(null, null, stream) + ++h2State.openStreams + }) + } + + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) session.unref() + }) + + return true + } + + // https://tools.ietf.org/html/rfc7540#section-8.3 + // :path and :scheme headers must be omited when sending CONNECT + + headers[HTTP2_HEADER_PATH] = path + headers[HTTP2_HEADER_SCHEME] = 'https' + + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 + + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. + + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) + + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) + } + + let contentLength = util.bodyLength(body) + + if (contentLength == null) { + contentLength = request.contentLength + } + + if (contentLength === 0 || !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. + + contentLength = null + } + + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) + return false + } + + process.emitWarning(new RequestContentLengthMismatchError()) + } + + if (contentLength != null) { + assert(body, 'no body must not have content length') + headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}` + } + + session.ref() + + const shouldEndStream = method === 'GET' || method === 'HEAD' + if (expectContinue) { + headers[HTTP2_HEADER_EXPECT] = '100-continue' + stream = session.request(headers, { endStream: shouldEndStream, signal }) + + stream.once('continue', writeBodyH2) + } else { + stream = session.request(headers, { + endStream: shouldEndStream, + signal + }) + writeBodyH2() + } + + // Increment counter as we have new several streams open + ++h2State.openStreams + + stream.once('response', headers => { + const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers + + if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { + stream.pause() + } + }) + + stream.once('end', () => { + request.onComplete([]) + }) + + stream.on('data', (chunk) => { + if (request.onData(chunk) === false) { + stream.pause() + } + }) + + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) { + session.unref() + } + }) + + stream.once('error', function (err) { + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) + + stream.once('frameError', (type, code) => { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + errorRequest(client, request, err) + + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) + + // stream.on('aborted', () => { + // // TODO(HTTP/2): Support aborted + // }) + + // stream.on('timeout', () => { + // // TODO(HTTP/2): Support timeout + // }) + + // stream.on('push', headers => { + // // TODO(HTTP/2): Suppor push + // }) + + // stream.on('trailers', headers => { + // // TODO(HTTP/2): Support trailers + // }) + + return true + + function writeBodyH2 () { + /* istanbul ignore else: assertion */ + if (!body) { + request.onRequestSent() + } else if (util.isBuffer(body)) { + assert(contentLength === body.byteLength, 'buffer body must have content length') + stream.cork() + stream.write(body) + stream.uncork() + stream.end() + request.onBodySent(body) + request.onRequestSent() + } else if (util.isBlobLike(body)) { + if (typeof body.stream === 'function') { + writeIterable({ + client, + request, + contentLength, + h2stream: stream, + expectsPayload, + body: body.stream(), + socket: client[kSocket], + header: '' + }) + } else { + writeBlob({ + body, + client, + request, + contentLength, + expectsPayload, + h2stream: stream, + header: '', + socket: client[kSocket] + }) + } + } else if (util.isStream(body)) { + writeStream({ + body, + client, + request, + contentLength, + expectsPayload, + socket: client[kSocket], + h2stream: stream, + header: '' + }) + } else if (util.isIterable(body)) { + writeIterable({ + body, + client, + request, + contentLength, + expectsPayload, + header: '', + h2stream: stream, + socket: client[kSocket] + }) + } else { + assert(false) + } + } +} + +function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined') + + if (client[kHTTPConnVersion] === 'h2') { + // For HTTP/2, is enough to pipe the stream + const pipe = pipeline( + body, + h2stream, + (err) => { + if (err) { + util.destroy(body, err) + util.destroy(h2stream, err) + } else { + request.onRequestSent() + } + } + ) + + pipe.on('data', onPipeData) + pipe.once('end', () => { + pipe.removeListener('data', onPipeData) + util.destroy(pipe) + }) + + function onPipeData (chunk) { + request.onBodySent(chunk) + } + + return + } + + let finished = false + + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + + const onData = function (chunk) { + if (finished) { + return + } + + try { + if (!writer.write(chunk) && this.pause) { + this.pause() + } + } catch (err) { + util.destroy(this, err) + } + } + const onDrain = function () { + if (finished) { + return + } + + if (body.resume) { + body.resume() + } + } + const onAbort = function () { + if (finished) { + return + } + const err = new RequestAbortedError() + queueMicrotask(() => onFinished(err)) + } + const onFinished = function (err) { + if (finished) { + return + } + + finished = true + + assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1)) + + socket + .off('drain', onDrain) + .off('error', onFinished) + + body + .removeListener('data', onData) + .removeListener('end', onFinished) + .removeListener('error', onFinished) + .removeListener('close', onAbort) + + if (!err) { + try { + writer.end() + } catch (er) { + err = er + } + } + + writer.destroy(err) + + if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) { + util.destroy(body, err) + } else { + util.destroy(body) + } + } + + body + .on('data', onData) + .on('end', onFinished) + .on('error', onFinished) + .on('close', onAbort) + + if (body.resume) { + body.resume() + } + + socket + .on('drain', onDrain) + .on('error', onFinished) +} + +async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength === body.size, 'blob body must have content length') + + const isH2 = client[kHTTPConnVersion] === 'h2' + try { + if (contentLength != null && contentLength !== body.size) { + throw new RequestContentLengthMismatchError() + } + + const buffer = Buffer.from(await body.arrayBuffer()) + + if (isH2) { + h2stream.cork() + h2stream.write(buffer) + h2stream.uncork() + } else { + socket.cork() + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + socket.write(buffer) + socket.uncork() + } + + request.onBodySent(buffer) + request.onRequestSent() + + if (!expectsPayload) { + socket[kReset] = true + } + + resume(client) + } catch (err) { + util.destroy(isH2 ? h2stream : socket, err) + } +} + +async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined') + + let callback = null + function onDrain () { + if (callback) { + const cb = callback + callback = null + cb() + } + } + + const waitForDrain = () => new Promise((resolve, reject) => { + assert(callback === null) + + if (socket[kError]) { + reject(socket[kError]) + } else { + callback = resolve + } + }) + + if (client[kHTTPConnVersion] === 'h2') { + h2stream + .on('close', onDrain) + .on('drain', onDrain) + + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] + } + + const res = h2stream.write(chunk) + request.onBodySent(chunk) + if (!res) { + await waitForDrain() + } + } + } catch (err) { + h2stream.destroy(err) + } finally { + request.onRequestSent() + h2stream.end() + h2stream + .off('close', onDrain) + .off('drain', onDrain) + } + + return + } + + socket + .on('close', onDrain) + .on('drain', onDrain) + + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] + } + + if (!writer.write(chunk)) { + await waitForDrain() + } + } + + writer.end() + } catch (err) { + writer.destroy(err) + } finally { + socket + .off('close', onDrain) + .off('drain', onDrain) + } +} + +class AsyncWriter { + constructor ({ socket, request, contentLength, client, expectsPayload, header }) { + this.socket = socket + this.request = request + this.contentLength = contentLength + this.client = client + this.bytesWritten = 0 + this.expectsPayload = expectsPayload + this.header = header + + socket[kWriting] = true + } + + write (chunk) { + const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this + + if (socket[kError]) { + throw socket[kError] + } + + if (socket.destroyed) { + return false + } + + const len = Buffer.byteLength(chunk) + if (!len) { + return true + } + + // We should defer writing chunks. + if (contentLength !== null && bytesWritten + len > contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } + + process.emitWarning(new RequestContentLengthMismatchError()) + } + + socket.cork() + + if (bytesWritten === 0) { + if (!expectsPayload) { + socket[kReset] = true + } + + if (contentLength === null) { + socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1') + } else { + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + } + } + + if (contentLength === null) { + socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1') + } + + this.bytesWritten += len + + const ret = socket.write(chunk) + + socket.uncork() + + request.onBodySent(chunk) + + if (!ret) { + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() + } + } + } + + return ret + } + + end () { + const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this + request.onRequestSent() + + socket[kWriting] = false + + if (socket[kError]) { + throw socket[kError] + } + + if (socket.destroyed) { + return + } + + if (bytesWritten === 0) { + if (expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD send a Content-Length in a request message when + // no Transfer-Encoding is sent and the request method defines a meaning + // for an enclosed payload body. + + socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') + } else { + socket.write(`${header}\r\n`, 'latin1') + } + } else if (contentLength === null) { + socket.write('\r\n0\r\n\r\n', 'latin1') + } + + if (contentLength !== null && bytesWritten !== contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } else { + process.emitWarning(new RequestContentLengthMismatchError()) + } + } + + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() + } + } + + resume(client) + } + + destroy (err) { + const { socket, client } = this + + socket[kWriting] = false + + if (err) { + assert(client[kRunning] <= 1, 'pipeline should only contain this request') + util.destroy(socket, err) + } + } +} + +function errorRequest (client, request, err) { + try { + request.onError(err) + assert(request.aborted) + } catch (err) { + client.emit('error', err) + } +} + +module.exports = Client diff --git a/lib/compat/dispatcher-weakref.js b/lib/compat/dispatcher-weakref.js new file mode 100644 index 0000000..8cb99e2 --- /dev/null +++ b/lib/compat/dispatcher-weakref.js @@ -0,0 +1,48 @@ +'use strict' + +/* istanbul ignore file: only for Node 12 */ + +const { kConnected, kSize } = require('../core/symbols') + +class CompatWeakRef { + constructor (value) { + this.value = value + } + + deref () { + return this.value[kConnected] === 0 && this.value[kSize] === 0 + ? undefined + : this.value + } +} + +class CompatFinalizer { + constructor (finalizer) { + this.finalizer = finalizer + } + + register (dispatcher, key) { + if (dispatcher.on) { + dispatcher.on('disconnect', () => { + if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) { + this.finalizer(key) + } + }) + } + } +} + +module.exports = function () { + // FIXME: remove workaround when the Node bug is fixed + // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 + if (process.env.NODE_V8_COVERAGE) { + return { + WeakRef: CompatWeakRef, + FinalizationRegistry: CompatFinalizer + } + } + return { + WeakRef: global.WeakRef || CompatWeakRef, + FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer + } +} diff --git a/lib/cookies/constants.js b/lib/cookies/constants.js new file mode 100644 index 0000000..85f1fec --- /dev/null +++ b/lib/cookies/constants.js @@ -0,0 +1,12 @@ +'use strict' + +// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size +const maxAttributeValueSize = 1024 + +// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size +const maxNameValuePairSize = 4096 + +module.exports = { + maxAttributeValueSize, + maxNameValuePairSize +} diff --git a/lib/cookies/index.js b/lib/cookies/index.js new file mode 100644 index 0000000..c9c1f28 --- /dev/null +++ b/lib/cookies/index.js @@ -0,0 +1,184 @@ +'use strict' + +const { parseSetCookie } = require('./parse') +const { stringify, getHeadersList } = require('./util') +const { webidl } = require('../fetch/webidl') +const { Headers } = require('../fetch/headers') + +/** + * @typedef {Object} Cookie + * @property {string} name + * @property {string} value + * @property {Date|number|undefined} expires + * @property {number|undefined} maxAge + * @property {string|undefined} domain + * @property {string|undefined} path + * @property {boolean|undefined} secure + * @property {boolean|undefined} httpOnly + * @property {'Strict'|'Lax'|'None'} sameSite + * @property {string[]} unparsed + */ + +/** + * @param {Headers} headers + * @returns {Record} + */ +function getCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + const cookie = headers.get('cookie') + const out = {} + + if (!cookie) { + return out + } + + for (const piece of cookie.split(';')) { + const [name, ...value] = piece.split('=') + + out[name.trim()] = value.join('=') + } + + return out +} + +/** + * @param {Headers} headers + * @param {string} name + * @param {{ path?: string, domain?: string }|undefined} attributes + * @returns {void} + */ +function deleteCookie (headers, name, attributes) { + webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + name = webidl.converters.DOMString(name) + attributes = webidl.converters.DeleteCookieAttributes(attributes) + + // Matches behavior of + // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278 + setCookie(headers, { + name, + value: '', + expires: new Date(0), + ...attributes + }) +} + +/** + * @param {Headers} headers + * @returns {Cookie[]} + */ +function getSetCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + const cookies = getHeadersList(headers).cookies + + if (!cookies) { + return [] + } + + // In older versions of undici, cookies is a list of name:value. + return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair)) +} + +/** + * @param {Headers} headers + * @param {Cookie} cookie + * @returns {void} + */ +function setCookie (headers, cookie) { + webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + cookie = webidl.converters.Cookie(cookie) + + const str = stringify(cookie) + + if (str) { + headers.append('Set-Cookie', stringify(cookie)) + } +} + +webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([ + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null + } +]) + +webidl.converters.Cookie = webidl.dictionaryConverter([ + { + converter: webidl.converters.DOMString, + key: 'name' + }, + { + converter: webidl.converters.DOMString, + key: 'value' + }, + { + converter: webidl.nullableConverter((value) => { + if (typeof value === 'number') { + return webidl.converters['unsigned long long'](value) + } + + return new Date(value) + }), + key: 'expires', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters['long long']), + key: 'maxAge', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'secure', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'httpOnly', + defaultValue: null + }, + { + converter: webidl.converters.USVString, + key: 'sameSite', + allowedValues: ['Strict', 'Lax', 'None'] + }, + { + converter: webidl.sequenceConverter(webidl.converters.DOMString), + key: 'unparsed', + defaultValue: [] + } +]) + +module.exports = { + getCookies, + deleteCookie, + getSetCookies, + setCookie +} diff --git a/lib/cookies/parse.js b/lib/cookies/parse.js new file mode 100644 index 0000000..aae2750 --- /dev/null +++ b/lib/cookies/parse.js @@ -0,0 +1,317 @@ +'use strict' + +const { maxNameValuePairSize, maxAttributeValueSize } = require('./constants') +const { isCTLExcludingHtab } = require('./util') +const { collectASequenceOfCodePointsFast } = require('../fetch/dataURL') +const assert = require('assert') + +/** + * @description Parses the field-value attributes of a set-cookie header string. + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} header + * @returns if the header is invalid, null will be returned + */ +function parseSetCookie (header) { + // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F + // character (CTL characters excluding HTAB): Abort these steps and + // ignore the set-cookie-string entirely. + if (isCTLExcludingHtab(header)) { + return null + } + + let nameValuePair = '' + let unparsedAttributes = '' + let name = '' + let value = '' + + // 2. If the set-cookie-string contains a %x3B (";") character: + if (header.includes(';')) { + // 1. The name-value-pair string consists of the characters up to, + // but not including, the first %x3B (";"), and the unparsed- + // attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question). + const position = { position: 0 } + + nameValuePair = collectASequenceOfCodePointsFast(';', header, position) + unparsedAttributes = header.slice(position.position) + } else { + // Otherwise: + + // 1. The name-value-pair string consists of all the characters + // contained in the set-cookie-string, and the unparsed- + // attributes is the empty string. + nameValuePair = header + } + + // 3. If the name-value-pair string lacks a %x3D ("=") character, then + // the name string is empty, and the value string is the value of + // name-value-pair. + if (!nameValuePair.includes('=')) { + value = nameValuePair + } else { + // Otherwise, the name string consists of the characters up to, but + // not including, the first %x3D ("=") character, and the (possibly + // empty) value string consists of the characters after the first + // %x3D ("=") character. + const position = { position: 0 } + name = collectASequenceOfCodePointsFast( + '=', + nameValuePair, + position + ) + value = nameValuePair.slice(position.position + 1) + } + + // 4. Remove any leading or trailing WSP characters from the name + // string and the value string. + name = name.trim() + value = value.trim() + + // 5. If the sum of the lengths of the name string and the value string + // is more than 4096 octets, abort these steps and ignore the set- + // cookie-string entirely. + if (name.length + value.length > maxNameValuePairSize) { + return null + } + + // 6. The cookie-name is the name string, and the cookie-value is the + // value string. + return { + name, value, ...parseUnparsedAttributes(unparsedAttributes) + } +} + +/** + * Parses the remaining attributes of a set-cookie header + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} unparsedAttributes + * @param {[Object.]={}} cookieAttributeList + */ +function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) { + // 1. If the unparsed-attributes string is empty, skip the rest of + // these steps. + if (unparsedAttributes.length === 0) { + return cookieAttributeList + } + + // 2. Discard the first character of the unparsed-attributes (which + // will be a %x3B (";") character). + assert(unparsedAttributes[0] === ';') + unparsedAttributes = unparsedAttributes.slice(1) + + let cookieAv = '' + + // 3. If the remaining unparsed-attributes contains a %x3B (";") + // character: + if (unparsedAttributes.includes(';')) { + // 1. Consume the characters of the unparsed-attributes up to, but + // not including, the first %x3B (";") character. + cookieAv = collectASequenceOfCodePointsFast( + ';', + unparsedAttributes, + { position: 0 } + ) + unparsedAttributes = unparsedAttributes.slice(cookieAv.length) + } else { + // Otherwise: + + // 1. Consume the remainder of the unparsed-attributes. + cookieAv = unparsedAttributes + unparsedAttributes = '' + } + + // Let the cookie-av string be the characters consumed in this step. + + let attributeName = '' + let attributeValue = '' + + // 4. If the cookie-av string contains a %x3D ("=") character: + if (cookieAv.includes('=')) { + // 1. The (possibly empty) attribute-name string consists of the + // characters up to, but not including, the first %x3D ("=") + // character, and the (possibly empty) attribute-value string + // consists of the characters after the first %x3D ("=") + // character. + const position = { position: 0 } + + attributeName = collectASequenceOfCodePointsFast( + '=', + cookieAv, + position + ) + attributeValue = cookieAv.slice(position.position + 1) + } else { + // Otherwise: + + // 1. The attribute-name string consists of the entire cookie-av + // string, and the attribute-value string is empty. + attributeName = cookieAv + } + + // 5. Remove any leading or trailing WSP characters from the attribute- + // name string and the attribute-value string. + attributeName = attributeName.trim() + attributeValue = attributeValue.trim() + + // 6. If the attribute-value is longer than 1024 octets, ignore the + // cookie-av string and return to Step 1 of this algorithm. + if (attributeValue.length > maxAttributeValueSize) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 7. Process the attribute-name and attribute-value according to the + // requirements in the following subsections. (Notice that + // attributes with unrecognized attribute-names are ignored.) + const attributeNameLowercase = attributeName.toLowerCase() + + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1 + // If the attribute-name case-insensitively matches the string + // "Expires", the user agent MUST process the cookie-av as follows. + if (attributeNameLowercase === 'expires') { + // 1. Let the expiry-time be the result of parsing the attribute-value + // as cookie-date (see Section 5.1.1). + const expiryTime = new Date(attributeValue) + + // 2. If the attribute-value failed to parse as a cookie date, ignore + // the cookie-av. + + cookieAttributeList.expires = expiryTime + } else if (attributeNameLowercase === 'max-age') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2 + // If the attribute-name case-insensitively matches the string "Max- + // Age", the user agent MUST process the cookie-av as follows. + + // 1. If the first character of the attribute-value is not a DIGIT or a + // "-" character, ignore the cookie-av. + const charCode = attributeValue.charCodeAt(0) + + if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 2. If the remainder of attribute-value contains a non-DIGIT + // character, ignore the cookie-av. + if (!/^\d+$/.test(attributeValue)) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 3. Let delta-seconds be the attribute-value converted to an integer. + const deltaSeconds = Number(attributeValue) + + // 4. Let cookie-age-limit be the maximum age of the cookie (which + // SHOULD be 400 days or less, see Section 4.1.2.2). + + // 5. Set delta-seconds to the smaller of its present value and cookie- + // age-limit. + // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs) + + // 6. If delta-seconds is less than or equal to zero (0), let expiry- + // time be the earliest representable date and time. Otherwise, let + // the expiry-time be the current date and time plus delta-seconds + // seconds. + // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds + + // 7. Append an attribute to the cookie-attribute-list with an + // attribute-name of Max-Age and an attribute-value of expiry-time. + cookieAttributeList.maxAge = deltaSeconds + } else if (attributeNameLowercase === 'domain') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3 + // If the attribute-name case-insensitively matches the string "Domain", + // the user agent MUST process the cookie-av as follows. + + // 1. Let cookie-domain be the attribute-value. + let cookieDomain = attributeValue + + // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be + // cookie-domain without its leading %x2E ("."). + if (cookieDomain[0] === '.') { + cookieDomain = cookieDomain.slice(1) + } + + // 3. Convert the cookie-domain to lower case. + cookieDomain = cookieDomain.toLowerCase() + + // 4. Append an attribute to the cookie-attribute-list with an + // attribute-name of Domain and an attribute-value of cookie-domain. + cookieAttributeList.domain = cookieDomain + } else if (attributeNameLowercase === 'path') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4 + // If the attribute-name case-insensitively matches the string "Path", + // the user agent MUST process the cookie-av as follows. + + // 1. If the attribute-value is empty or if the first character of the + // attribute-value is not %x2F ("/"): + let cookiePath = '' + if (attributeValue.length === 0 || attributeValue[0] !== '/') { + // 1. Let cookie-path be the default-path. + cookiePath = '/' + } else { + // Otherwise: + + // 1. Let cookie-path be the attribute-value. + cookiePath = attributeValue + } + + // 2. Append an attribute to the cookie-attribute-list with an + // attribute-name of Path and an attribute-value of cookie-path. + cookieAttributeList.path = cookiePath + } else if (attributeNameLowercase === 'secure') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5 + // If the attribute-name case-insensitively matches the string "Secure", + // the user agent MUST append an attribute to the cookie-attribute-list + // with an attribute-name of Secure and an empty attribute-value. + + cookieAttributeList.secure = true + } else if (attributeNameLowercase === 'httponly') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6 + // If the attribute-name case-insensitively matches the string + // "HttpOnly", the user agent MUST append an attribute to the cookie- + // attribute-list with an attribute-name of HttpOnly and an empty + // attribute-value. + + cookieAttributeList.httpOnly = true + } else if (attributeNameLowercase === 'samesite') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7 + // If the attribute-name case-insensitively matches the string + // "SameSite", the user agent MUST process the cookie-av as follows: + + // 1. Let enforcement be "Default". + let enforcement = 'Default' + + const attributeValueLowercase = attributeValue.toLowerCase() + // 2. If cookie-av's attribute-value is a case-insensitive match for + // "None", set enforcement to "None". + if (attributeValueLowercase.includes('none')) { + enforcement = 'None' + } + + // 3. If cookie-av's attribute-value is a case-insensitive match for + // "Strict", set enforcement to "Strict". + if (attributeValueLowercase.includes('strict')) { + enforcement = 'Strict' + } + + // 4. If cookie-av's attribute-value is a case-insensitive match for + // "Lax", set enforcement to "Lax". + if (attributeValueLowercase.includes('lax')) { + enforcement = 'Lax' + } + + // 5. Append an attribute to the cookie-attribute-list with an + // attribute-name of "SameSite" and an attribute-value of + // enforcement. + cookieAttributeList.sameSite = enforcement + } else { + cookieAttributeList.unparsed ??= [] + + cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`) + } + + // 8. Return to Step 1 of this algorithm. + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) +} + +module.exports = { + parseSetCookie, + parseUnparsedAttributes +} diff --git a/lib/cookies/util.js b/lib/cookies/util.js new file mode 100644 index 0000000..2290329 --- /dev/null +++ b/lib/cookies/util.js @@ -0,0 +1,291 @@ +'use strict' + +const assert = require('assert') +const { kHeadersList } = require('../core/symbols') + +function isCTLExcludingHtab (value) { + if (value.length === 0) { + return false + } + + for (const char of value) { + const code = char.charCodeAt(0) + + if ( + (code >= 0x00 || code <= 0x08) || + (code >= 0x0A || code <= 0x1F) || + code === 0x7F + ) { + return false + } + } +} + +/** + CHAR = + token = 1* + separators = "(" | ")" | "<" | ">" | "@" + | "," | ";" | ":" | "\" | <"> + | "/" | "[" | "]" | "?" | "=" + | "{" | "}" | SP | HT + * @param {string} name + */ +function validateCookieName (name) { + for (const char of name) { + const code = char.charCodeAt(0) + + if ( + (code <= 0x20 || code > 0x7F) || + char === '(' || + char === ')' || + char === '>' || + char === '<' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' + ) { + throw new Error('Invalid cookie name') + } + } +} + +/** + cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) + cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E + ; US-ASCII characters excluding CTLs, + ; whitespace DQUOTE, comma, semicolon, + ; and backslash + * @param {string} value + */ +function validateCookieValue (value) { + for (const char of value) { + const code = char.charCodeAt(0) + + if ( + code < 0x21 || // exclude CTLs (0-31) + code === 0x22 || + code === 0x2C || + code === 0x3B || + code === 0x5C || + code > 0x7E // non-ascii + ) { + throw new Error('Invalid header value') + } + } +} + +/** + * path-value = + * @param {string} path + */ +function validateCookiePath (path) { + for (const char of path) { + const code = char.charCodeAt(0) + + if (code < 0x21 || char === ';') { + throw new Error('Invalid cookie path') + } + } +} + +/** + * I have no idea why these values aren't allowed to be honest, + * but Deno tests these. - Khafra + * @param {string} domain + */ +function validateCookieDomain (domain) { + if ( + domain.startsWith('-') || + domain.endsWith('.') || + domain.endsWith('-') + ) { + throw new Error('Invalid cookie domain') + } +} + +/** + * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1 + * @param {number|Date} date + IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT + ; fixed length/zone/capitalization subset of the format + ; see Section 3.3 of [RFC5322] + + day-name = %x4D.6F.6E ; "Mon", case-sensitive + / %x54.75.65 ; "Tue", case-sensitive + / %x57.65.64 ; "Wed", case-sensitive + / %x54.68.75 ; "Thu", case-sensitive + / %x46.72.69 ; "Fri", case-sensitive + / %x53.61.74 ; "Sat", case-sensitive + / %x53.75.6E ; "Sun", case-sensitive + date1 = day SP month SP year + ; e.g., 02 Jun 1982 + + day = 2DIGIT + month = %x4A.61.6E ; "Jan", case-sensitive + / %x46.65.62 ; "Feb", case-sensitive + / %x4D.61.72 ; "Mar", case-sensitive + / %x41.70.72 ; "Apr", case-sensitive + / %x4D.61.79 ; "May", case-sensitive + / %x4A.75.6E ; "Jun", case-sensitive + / %x4A.75.6C ; "Jul", case-sensitive + / %x41.75.67 ; "Aug", case-sensitive + / %x53.65.70 ; "Sep", case-sensitive + / %x4F.63.74 ; "Oct", case-sensitive + / %x4E.6F.76 ; "Nov", case-sensitive + / %x44.65.63 ; "Dec", case-sensitive + year = 4DIGIT + + GMT = %x47.4D.54 ; "GMT", case-sensitive + + time-of-day = hour ":" minute ":" second + ; 00:00:00 - 23:59:60 (leap second) + + hour = 2DIGIT + minute = 2DIGIT + second = 2DIGIT + */ +function toIMFDate (date) { + if (typeof date === 'number') { + date = new Date(date) + } + + const days = [ + 'Sun', 'Mon', 'Tue', 'Wed', + 'Thu', 'Fri', 'Sat' + ] + + const months = [ + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' + ] + + const dayName = days[date.getUTCDay()] + const day = date.getUTCDate().toString().padStart(2, '0') + const month = months[date.getUTCMonth()] + const year = date.getUTCFullYear() + const hour = date.getUTCHours().toString().padStart(2, '0') + const minute = date.getUTCMinutes().toString().padStart(2, '0') + const second = date.getUTCSeconds().toString().padStart(2, '0') + + return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT` +} + +/** + max-age-av = "Max-Age=" non-zero-digit *DIGIT + ; In practice, both expires-av and max-age-av + ; are limited to dates representable by the + ; user agent. + * @param {number} maxAge + */ +function validateCookieMaxAge (maxAge) { + if (maxAge < 0) { + throw new Error('Invalid cookie max-age') + } +} + +/** + * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1 + * @param {import('./index').Cookie} cookie + */ +function stringify (cookie) { + if (cookie.name.length === 0) { + return null + } + + validateCookieName(cookie.name) + validateCookieValue(cookie.value) + + const out = [`${cookie.name}=${cookie.value}`] + + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1 + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2 + if (cookie.name.startsWith('__Secure-')) { + cookie.secure = true + } + + if (cookie.name.startsWith('__Host-')) { + cookie.secure = true + cookie.domain = null + cookie.path = '/' + } + + if (cookie.secure) { + out.push('Secure') + } + + if (cookie.httpOnly) { + out.push('HttpOnly') + } + + if (typeof cookie.maxAge === 'number') { + validateCookieMaxAge(cookie.maxAge) + out.push(`Max-Age=${cookie.maxAge}`) + } + + if (cookie.domain) { + validateCookieDomain(cookie.domain) + out.push(`Domain=${cookie.domain}`) + } + + if (cookie.path) { + validateCookiePath(cookie.path) + out.push(`Path=${cookie.path}`) + } + + if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') { + out.push(`Expires=${toIMFDate(cookie.expires)}`) + } + + if (cookie.sameSite) { + out.push(`SameSite=${cookie.sameSite}`) + } + + for (const part of cookie.unparsed) { + if (!part.includes('=')) { + throw new Error('Invalid unparsed') + } + + const [key, ...value] = part.split('=') + + out.push(`${key.trim()}=${value.join('=')}`) + } + + return out.join('; ') +} + +let kHeadersListNode + +function getHeadersList (headers) { + if (headers[kHeadersList]) { + return headers[kHeadersList] + } + + if (!kHeadersListNode) { + kHeadersListNode = Object.getOwnPropertySymbols(headers).find( + (symbol) => symbol.description === 'headers list' + ) + + assert(kHeadersListNode, 'Headers cannot be parsed') + } + + const headersList = headers[kHeadersListNode] + assert(headersList) + + return headersList +} + +module.exports = { + isCTLExcludingHtab, + stringify, + getHeadersList +} diff --git a/lib/core/connect.js b/lib/core/connect.js new file mode 100644 index 0000000..3309117 --- /dev/null +++ b/lib/core/connect.js @@ -0,0 +1,189 @@ +'use strict' + +const net = require('net') +const assert = require('assert') +const util = require('./util') +const { InvalidArgumentError, ConnectTimeoutError } = require('./errors') + +let tls // include tls conditionally since it is not always available + +// TODO: session re-use does not wait for the first +// connection to resolve the session and might therefore +// resolve the same servername multiple times even when +// re-use is enabled. + +let SessionCache +// FIXME: remove workaround when the Node bug is fixed +// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 +if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) { + SessionCache = class WeakSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() + this._sessionRegistry = new global.FinalizationRegistry((key) => { + if (this._sessionCache.size < this._maxCachedSessions) { + return + } + + const ref = this._sessionCache.get(key) + if (ref !== undefined && ref.deref() === undefined) { + this._sessionCache.delete(key) + } + }) + } + + get (sessionKey) { + const ref = this._sessionCache.get(sessionKey) + return ref ? ref.deref() : null + } + + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { + return + } + + this._sessionCache.set(sessionKey, new WeakRef(session)) + this._sessionRegistry.register(session, sessionKey) + } + } +} else { + SessionCache = class SimpleSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() + } + + get (sessionKey) { + return this._sessionCache.get(sessionKey) + } + + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { + return + } + + if (this._sessionCache.size >= this._maxCachedSessions) { + // remove the oldest session + const { value: oldestKey } = this._sessionCache.keys().next() + this._sessionCache.delete(oldestKey) + } + + this._sessionCache.set(sessionKey, session) + } + } +} + +function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) { + if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) { + throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero') + } + + const options = { path: socketPath, ...opts } + const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions) + timeout = timeout == null ? 10e3 : timeout + allowH2 = allowH2 != null ? allowH2 : false + return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) { + let socket + if (protocol === 'https:') { + if (!tls) { + tls = require('tls') + } + servername = servername || options.servername || util.getServerName(host) || null + + const sessionKey = servername || hostname + const session = sessionCache.get(sessionKey) || null + + assert(sessionKey) + + socket = tls.connect({ + highWaterMark: 16384, // TLS in node can't have bigger HWM anyway... + ...options, + servername, + session, + localAddress, + // TODO(HTTP/2): Add support for h2c + ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'], + socket: httpSocket, // upgrade socket connection + port: port || 443, + host: hostname + }) + + socket + .on('session', function (session) { + // TODO (fix): Can a session become invalid once established? Don't think so? + sessionCache.set(sessionKey, session) + }) + } else { + assert(!httpSocket, 'httpSocket can only be sent on TLS update') + socket = net.connect({ + highWaterMark: 64 * 1024, // Same as nodejs fs streams. + ...options, + localAddress, + port: port || 80, + host: hostname + }) + } + + // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket + if (options.keepAlive == null || options.keepAlive) { + const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay + socket.setKeepAlive(true, keepAliveInitialDelay) + } + + const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout) + + socket + .setNoDelay(true) + .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () { + cancelTimeout() + + if (callback) { + const cb = callback + callback = null + cb(null, this) + } + }) + .on('error', function (err) { + cancelTimeout() + + if (callback) { + const cb = callback + callback = null + cb(err) + } + }) + + return socket + } +} + +function setupTimeout (onConnectTimeout, timeout) { + if (!timeout) { + return () => {} + } + + let s1 = null + let s2 = null + const timeoutId = setTimeout(() => { + // setImmediate is added to make sure that we priotorise socket error events over timeouts + s1 = setImmediate(() => { + if (process.platform === 'win32') { + // Windows needs an extra setImmediate probably due to implementation differences in the socket logic + s2 = setImmediate(() => onConnectTimeout()) + } else { + onConnectTimeout() + } + }) + }, timeout) + return () => { + clearTimeout(timeoutId) + clearImmediate(s1) + clearImmediate(s2) + } +} + +function onConnectTimeout (socket) { + util.destroy(socket, new ConnectTimeoutError()) +} + +module.exports = buildConnector diff --git a/lib/core/errors.js b/lib/core/errors.js new file mode 100644 index 0000000..7af704b --- /dev/null +++ b/lib/core/errors.js @@ -0,0 +1,230 @@ +'use strict' + +class UndiciError extends Error { + constructor (message) { + super(message) + this.name = 'UndiciError' + this.code = 'UND_ERR' + } +} + +class ConnectTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ConnectTimeoutError) + this.name = 'ConnectTimeoutError' + this.message = message || 'Connect Timeout Error' + this.code = 'UND_ERR_CONNECT_TIMEOUT' + } +} + +class HeadersTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersTimeoutError) + this.name = 'HeadersTimeoutError' + this.message = message || 'Headers Timeout Error' + this.code = 'UND_ERR_HEADERS_TIMEOUT' + } +} + +class HeadersOverflowError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersOverflowError) + this.name = 'HeadersOverflowError' + this.message = message || 'Headers Overflow Error' + this.code = 'UND_ERR_HEADERS_OVERFLOW' + } +} + +class BodyTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, BodyTimeoutError) + this.name = 'BodyTimeoutError' + this.message = message || 'Body Timeout Error' + this.code = 'UND_ERR_BODY_TIMEOUT' + } +} + +class ResponseStatusCodeError extends UndiciError { + constructor (message, statusCode, headers, body) { + super(message) + Error.captureStackTrace(this, ResponseStatusCodeError) + this.name = 'ResponseStatusCodeError' + this.message = message || 'Response Status Code Error' + this.code = 'UND_ERR_RESPONSE_STATUS_CODE' + this.body = body + this.status = statusCode + this.statusCode = statusCode + this.headers = headers + } +} + +class InvalidArgumentError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidArgumentError) + this.name = 'InvalidArgumentError' + this.message = message || 'Invalid Argument Error' + this.code = 'UND_ERR_INVALID_ARG' + } +} + +class InvalidReturnValueError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidReturnValueError) + this.name = 'InvalidReturnValueError' + this.message = message || 'Invalid Return Value Error' + this.code = 'UND_ERR_INVALID_RETURN_VALUE' + } +} + +class RequestAbortedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestAbortedError) + this.name = 'AbortError' + this.message = message || 'Request aborted' + this.code = 'UND_ERR_ABORTED' + } +} + +class InformationalError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InformationalError) + this.name = 'InformationalError' + this.message = message || 'Request information' + this.code = 'UND_ERR_INFO' + } +} + +class RequestContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestContentLengthMismatchError) + this.name = 'RequestContentLengthMismatchError' + this.message = message || 'Request body length does not match content-length header' + this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH' + } +} + +class ResponseContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseContentLengthMismatchError) + this.name = 'ResponseContentLengthMismatchError' + this.message = message || 'Response body length does not match content-length header' + this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH' + } +} + +class ClientDestroyedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientDestroyedError) + this.name = 'ClientDestroyedError' + this.message = message || 'The client is destroyed' + this.code = 'UND_ERR_DESTROYED' + } +} + +class ClientClosedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientClosedError) + this.name = 'ClientClosedError' + this.message = message || 'The client is closed' + this.code = 'UND_ERR_CLOSED' + } +} + +class SocketError extends UndiciError { + constructor (message, socket) { + super(message) + Error.captureStackTrace(this, SocketError) + this.name = 'SocketError' + this.message = message || 'Socket error' + this.code = 'UND_ERR_SOCKET' + this.socket = socket + } +} + +class NotSupportedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'NotSupportedError' + this.message = message || 'Not supported error' + this.code = 'UND_ERR_NOT_SUPPORTED' + } +} + +class BalancedPoolMissingUpstreamError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'MissingUpstreamError' + this.message = message || 'No upstream has been added to the BalancedPool' + this.code = 'UND_ERR_BPL_MISSING_UPSTREAM' + } +} + +class HTTPParserError extends Error { + constructor (message, code, data) { + super(message) + Error.captureStackTrace(this, HTTPParserError) + this.name = 'HTTPParserError' + this.code = code ? `HPE_${code}` : undefined + this.data = data ? data.toString() : undefined + } +} + +class ResponseExceededMaxSizeError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseExceededMaxSizeError) + this.name = 'ResponseExceededMaxSizeError' + this.message = message || 'Response content exceeded max size' + this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE' + } +} + +class RequestRetryError extends UndiciError { + constructor (message, code, { headers, data }) { + super(message) + Error.captureStackTrace(this, RequestRetryError) + this.name = 'RequestRetryError' + this.message = message || 'Request retry error' + this.code = 'UND_ERR_REQ_RETRY' + this.statusCode = code + this.data = data + this.headers = headers + } +} + +module.exports = { + HTTPParserError, + UndiciError, + HeadersTimeoutError, + HeadersOverflowError, + BodyTimeoutError, + RequestContentLengthMismatchError, + ConnectTimeoutError, + ResponseStatusCodeError, + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError, + ClientDestroyedError, + ClientClosedError, + InformationalError, + SocketError, + NotSupportedError, + ResponseContentLengthMismatchError, + BalancedPoolMissingUpstreamError, + ResponseExceededMaxSizeError, + RequestRetryError +} diff --git a/lib/core/request.js b/lib/core/request.js new file mode 100644 index 0000000..3697e6a --- /dev/null +++ b/lib/core/request.js @@ -0,0 +1,499 @@ +'use strict' + +const { + InvalidArgumentError, + NotSupportedError +} = require('./errors') +const assert = require('assert') +const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols') +const util = require('./util') + +// tokenRegExp and headerCharRegex have been lifted from +// https://github.com/nodejs/node/blob/main/lib/_http_common.js + +/** + * Verifies that the given val is a valid HTTP token + * per the rules defined in RFC 7230 + * See https://tools.ietf.org/html/rfc7230#section-3.2.6 + */ +const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/ + +/** + * Matches if val contains an invalid field-vchar + * field-value = *( field-content / obs-fold ) + * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] + * field-vchar = VCHAR / obs-text + */ +const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/ + +// Verifies that a given path is valid does not contain control chars \x00 to \x20 +const invalidPathRegex = /[^\u0021-\u00ff]/ + +const kHandler = Symbol('handler') + +const channels = {} + +let extractBody + +try { + const diagnosticsChannel = require('diagnostics_channel') + channels.create = diagnosticsChannel.channel('undici:request:create') + channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent') + channels.headers = diagnosticsChannel.channel('undici:request:headers') + channels.trailers = diagnosticsChannel.channel('undici:request:trailers') + channels.error = diagnosticsChannel.channel('undici:request:error') +} catch { + channels.create = { hasSubscribers: false } + channels.bodySent = { hasSubscribers: false } + channels.headers = { hasSubscribers: false } + channels.trailers = { hasSubscribers: false } + channels.error = { hasSubscribers: false } +} + +class Request { + constructor (origin, { + path, + method, + body, + headers, + query, + idempotent, + blocking, + upgrade, + headersTimeout, + bodyTimeout, + reset, + throwOnError, + expectContinue + }, handler) { + if (typeof path !== 'string') { + throw new InvalidArgumentError('path must be a string') + } else if ( + path[0] !== '/' && + !(path.startsWith('http://') || path.startsWith('https://')) && + method !== 'CONNECT' + ) { + throw new InvalidArgumentError('path must be an absolute URL or start with a slash') + } else if (invalidPathRegex.exec(path) !== null) { + throw new InvalidArgumentError('invalid request path') + } + + if (typeof method !== 'string') { + throw new InvalidArgumentError('method must be a string') + } else if (tokenRegExp.exec(method) === null) { + throw new InvalidArgumentError('invalid request method') + } + + if (upgrade && typeof upgrade !== 'string') { + throw new InvalidArgumentError('upgrade must be a string') + } + + if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('invalid headersTimeout') + } + + if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('invalid bodyTimeout') + } + + if (reset != null && typeof reset !== 'boolean') { + throw new InvalidArgumentError('invalid reset') + } + + if (expectContinue != null && typeof expectContinue !== 'boolean') { + throw new InvalidArgumentError('invalid expectContinue') + } + + this.headersTimeout = headersTimeout + + this.bodyTimeout = bodyTimeout + + this.throwOnError = throwOnError === true + + this.method = method + + this.abort = null + + if (body == null) { + this.body = null + } else if (util.isStream(body)) { + this.body = body + + const rState = this.body._readableState + if (!rState || !rState.autoDestroy) { + this.endHandler = function autoDestroy () { + util.destroy(this) + } + this.body.on('end', this.endHandler) + } + + this.errorHandler = err => { + if (this.abort) { + this.abort(err) + } else { + this.error = err + } + } + this.body.on('error', this.errorHandler) + } else if (util.isBuffer(body)) { + this.body = body.byteLength ? body : null + } else if (ArrayBuffer.isView(body)) { + this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null + } else if (body instanceof ArrayBuffer) { + this.body = body.byteLength ? Buffer.from(body) : null + } else if (typeof body === 'string') { + this.body = body.length ? Buffer.from(body) : null + } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) { + this.body = body + } else { + throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable') + } + + this.completed = false + + this.aborted = false + + this.upgrade = upgrade || null + + this.path = query ? util.buildURL(path, query) : path + + this.origin = origin + + this.idempotent = idempotent == null + ? method === 'HEAD' || method === 'GET' + : idempotent + + this.blocking = blocking == null ? false : blocking + + this.reset = reset == null ? null : reset + + this.host = null + + this.contentLength = null + + this.contentType = null + + this.headers = '' + + // Only for H2 + this.expectContinue = expectContinue != null ? expectContinue : false + + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(this, headers[i], headers[i + 1]) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(this, key, headers[key]) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') + } + + if (util.isFormDataLike(this.body)) { + if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) { + throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.') + } + + if (!extractBody) { + extractBody = require('../fetch/body.js').extractBody + } + + const [bodyStream, contentType] = extractBody(body) + if (this.contentType == null) { + this.contentType = contentType + this.headers += `content-type: ${contentType}\r\n` + } + this.body = bodyStream.stream + this.contentLength = bodyStream.length + } else if (util.isBlobLike(body) && this.contentType == null && body.type) { + this.contentType = body.type + this.headers += `content-type: ${body.type}\r\n` + } + + util.validateHandler(handler, method, upgrade) + + this.servername = util.getServerName(this.host) + + this[kHandler] = handler + + if (channels.create.hasSubscribers) { + channels.create.publish({ request: this }) + } + } + + onBodySent (chunk) { + if (this[kHandler].onBodySent) { + try { + return this[kHandler].onBodySent(chunk) + } catch (err) { + this.abort(err) + } + } + } + + onRequestSent () { + if (channels.bodySent.hasSubscribers) { + channels.bodySent.publish({ request: this }) + } + + if (this[kHandler].onRequestSent) { + try { + return this[kHandler].onRequestSent() + } catch (err) { + this.abort(err) + } + } + } + + onConnect (abort) { + assert(!this.aborted) + assert(!this.completed) + + if (this.error) { + abort(this.error) + } else { + this.abort = abort + return this[kHandler].onConnect(abort) + } + } + + onHeaders (statusCode, headers, resume, statusText) { + assert(!this.aborted) + assert(!this.completed) + + if (channels.headers.hasSubscribers) { + channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) + } + + try { + return this[kHandler].onHeaders(statusCode, headers, resume, statusText) + } catch (err) { + this.abort(err) + } + } + + onData (chunk) { + assert(!this.aborted) + assert(!this.completed) + + try { + return this[kHandler].onData(chunk) + } catch (err) { + this.abort(err) + return false + } + } + + onUpgrade (statusCode, headers, socket) { + assert(!this.aborted) + assert(!this.completed) + + return this[kHandler].onUpgrade(statusCode, headers, socket) + } + + onComplete (trailers) { + this.onFinally() + + assert(!this.aborted) + + this.completed = true + if (channels.trailers.hasSubscribers) { + channels.trailers.publish({ request: this, trailers }) + } + + try { + return this[kHandler].onComplete(trailers) + } catch (err) { + // TODO (fix): This might be a bad idea? + this.onError(err) + } + } + + onError (error) { + this.onFinally() + + if (channels.error.hasSubscribers) { + channels.error.publish({ request: this, error }) + } + + if (this.aborted) { + return + } + this.aborted = true + + return this[kHandler].onError(error) + } + + onFinally () { + if (this.errorHandler) { + this.body.off('error', this.errorHandler) + this.errorHandler = null + } + + if (this.endHandler) { + this.body.off('end', this.endHandler) + this.endHandler = null + } + } + + // TODO: adjust to support H2 + addHeader (key, value) { + processHeader(this, key, value) + return this + } + + static [kHTTP1BuildRequest] (origin, opts, handler) { + // TODO: Migrate header parsing here, to make Requests + // HTTP agnostic + return new Request(origin, opts, handler) + } + + static [kHTTP2BuildRequest] (origin, opts, handler) { + const headers = opts.headers + opts = { ...opts, headers: null } + + const request = new Request(origin, opts, handler) + + request.headers = {} + + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(request, headers[i], headers[i + 1], true) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(request, key, headers[key], true) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') + } + + return request + } + + static [kHTTP2CopyHeaders] (raw) { + const rawHeaders = raw.split('\r\n') + const headers = {} + + for (const header of rawHeaders) { + const [key, value] = header.split(': ') + + if (value == null || value.length === 0) continue + + if (headers[key]) headers[key] += `,${value}` + else headers[key] = value + } + + return headers + } +} + +function processHeaderValue (key, val, skipAppend) { + if (val && typeof val === 'object') { + throw new InvalidArgumentError(`invalid ${key} header`) + } + + val = val != null ? `${val}` : '' + + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) + } + + return skipAppend ? val : `${key}: ${val}\r\n` +} + +function processHeader (request, key, val, skipAppend = false) { + if (val && (typeof val === 'object' && !Array.isArray(val))) { + throw new InvalidArgumentError(`invalid ${key} header`) + } else if (val === undefined) { + return + } + + if ( + request.host === null && + key.length === 4 && + key.toLowerCase() === 'host' + ) { + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) + } + // Consumed by Client + request.host = val + } else if ( + request.contentLength === null && + key.length === 14 && + key.toLowerCase() === 'content-length' + ) { + request.contentLength = parseInt(val, 10) + if (!Number.isFinite(request.contentLength)) { + throw new InvalidArgumentError('invalid content-length header') + } + } else if ( + request.contentType === null && + key.length === 12 && + key.toLowerCase() === 'content-type' + ) { + request.contentType = val + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) + } else if ( + key.length === 17 && + key.toLowerCase() === 'transfer-encoding' + ) { + throw new InvalidArgumentError('invalid transfer-encoding header') + } else if ( + key.length === 10 && + key.toLowerCase() === 'connection' + ) { + const value = typeof val === 'string' ? val.toLowerCase() : null + if (value !== 'close' && value !== 'keep-alive') { + throw new InvalidArgumentError('invalid connection header') + } else if (value === 'close') { + request.reset = true + } + } else if ( + key.length === 10 && + key.toLowerCase() === 'keep-alive' + ) { + throw new InvalidArgumentError('invalid keep-alive header') + } else if ( + key.length === 7 && + key.toLowerCase() === 'upgrade' + ) { + throw new InvalidArgumentError('invalid upgrade header') + } else if ( + key.length === 6 && + key.toLowerCase() === 'expect' + ) { + throw new NotSupportedError('expect header not supported') + } else if (tokenRegExp.exec(key) === null) { + throw new InvalidArgumentError('invalid header key') + } else { + if (Array.isArray(val)) { + for (let i = 0; i < val.length; i++) { + if (skipAppend) { + if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}` + else request.headers[key] = processHeaderValue(key, val[i], skipAppend) + } else { + request.headers += processHeaderValue(key, val[i]) + } + } + } else { + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) + } + } +} + +module.exports = Request diff --git a/lib/core/symbols.js b/lib/core/symbols.js new file mode 100644 index 0000000..68d8566 --- /dev/null +++ b/lib/core/symbols.js @@ -0,0 +1,63 @@ +module.exports = { + kClose: Symbol('close'), + kDestroy: Symbol('destroy'), + kDispatch: Symbol('dispatch'), + kUrl: Symbol('url'), + kWriting: Symbol('writing'), + kResuming: Symbol('resuming'), + kQueue: Symbol('queue'), + kConnect: Symbol('connect'), + kConnecting: Symbol('connecting'), + kHeadersList: Symbol('headers list'), + kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'), + kKeepAliveMaxTimeout: Symbol('max keep alive timeout'), + kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'), + kKeepAliveTimeoutValue: Symbol('keep alive timeout'), + kKeepAlive: Symbol('keep alive'), + kHeadersTimeout: Symbol('headers timeout'), + kBodyTimeout: Symbol('body timeout'), + kServerName: Symbol('server name'), + kLocalAddress: Symbol('local address'), + kHost: Symbol('host'), + kNoRef: Symbol('no ref'), + kBodyUsed: Symbol('used'), + kRunning: Symbol('running'), + kBlocking: Symbol('blocking'), + kPending: Symbol('pending'), + kSize: Symbol('size'), + kBusy: Symbol('busy'), + kQueued: Symbol('queued'), + kFree: Symbol('free'), + kConnected: Symbol('connected'), + kClosed: Symbol('closed'), + kNeedDrain: Symbol('need drain'), + kReset: Symbol('reset'), + kDestroyed: Symbol.for('nodejs.stream.destroyed'), + kMaxHeadersSize: Symbol('max headers size'), + kRunningIdx: Symbol('running index'), + kPendingIdx: Symbol('pending index'), + kError: Symbol('error'), + kClients: Symbol('clients'), + kClient: Symbol('client'), + kParser: Symbol('parser'), + kOnDestroyed: Symbol('destroy callbacks'), + kPipelining: Symbol('pipelining'), + kSocket: Symbol('socket'), + kHostHeader: Symbol('host header'), + kConnector: Symbol('connector'), + kStrictContentLength: Symbol('strict content length'), + kMaxRedirections: Symbol('maxRedirections'), + kMaxRequests: Symbol('maxRequestsPerClient'), + kProxy: Symbol('proxy agent options'), + kCounter: Symbol('socket request counter'), + kInterceptors: Symbol('dispatch interceptors'), + kMaxResponseSize: Symbol('max response size'), + kHTTP2Session: Symbol('http2Session'), + kHTTP2SessionState: Symbol('http2Session state'), + kHTTP2BuildRequest: Symbol('http2 build request'), + kHTTP1BuildRequest: Symbol('http1 build request'), + kHTTP2CopyHeaders: Symbol('http2 copy headers'), + kHTTPConnVersion: Symbol('http connection version'), + kRetryHandlerDefaultRetry: Symbol('retry agent default retry'), + kConstruct: Symbol('constructable') +} diff --git a/lib/core/util.js b/lib/core/util.js new file mode 100644 index 0000000..8d5450b --- /dev/null +++ b/lib/core/util.js @@ -0,0 +1,511 @@ +'use strict' + +const assert = require('assert') +const { kDestroyed, kBodyUsed } = require('./symbols') +const { IncomingMessage } = require('http') +const stream = require('stream') +const net = require('net') +const { InvalidArgumentError } = require('./errors') +const { Blob } = require('buffer') +const nodeUtil = require('util') +const { stringify } = require('querystring') + +const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) + +function nop () {} + +function isStream (obj) { + return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function' +} + +// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License) +function isBlobLike (object) { + return (Blob && object instanceof Blob) || ( + object && + typeof object === 'object' && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + /^(Blob|File)$/.test(object[Symbol.toStringTag]) + ) +} + +function buildURL (url, queryParams) { + if (url.includes('?') || url.includes('#')) { + throw new Error('Query params cannot be passed when url already contains "?" or "#".') + } + + const stringified = stringify(queryParams) + + if (stringified) { + url += '?' + stringified + } + + return url +} + +function parseURL (url) { + if (typeof url === 'string') { + url = new URL(url) + + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') + } + + return url + } + + if (!url || typeof url !== 'object') { + throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.') + } + + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') + } + + if (!(url instanceof URL)) { + if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) { + throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.') + } + + if (url.path != null && typeof url.path !== 'string') { + throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.') + } + + if (url.pathname != null && typeof url.pathname !== 'string') { + throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.') + } + + if (url.hostname != null && typeof url.hostname !== 'string') { + throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.') + } + + if (url.origin != null && typeof url.origin !== 'string') { + throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.') + } + + const port = url.port != null + ? url.port + : (url.protocol === 'https:' ? 443 : 80) + let origin = url.origin != null + ? url.origin + : `${url.protocol}//${url.hostname}:${port}` + let path = url.path != null + ? url.path + : `${url.pathname || ''}${url.search || ''}` + + if (origin.endsWith('/')) { + origin = origin.substring(0, origin.length - 1) + } + + if (path && !path.startsWith('/')) { + path = `/${path}` + } + // new URL(path, origin) is unsafe when `path` contains an absolute URL + // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL: + // If first parameter is a relative URL, second param is required, and will be used as the base URL. + // If first parameter is an absolute URL, a given second param will be ignored. + url = new URL(origin + path) + } + + return url +} + +function parseOrigin (url) { + url = parseURL(url) + + if (url.pathname !== '/' || url.search || url.hash) { + throw new InvalidArgumentError('invalid url') + } + + return url +} + +function getHostname (host) { + if (host[0] === '[') { + const idx = host.indexOf(']') + + assert(idx !== -1) + return host.substring(1, idx) + } + + const idx = host.indexOf(':') + if (idx === -1) return host + + return host.substring(0, idx) +} + +// IP addresses are not valid server names per RFC6066 +// > Currently, the only server names supported are DNS hostnames +function getServerName (host) { + if (!host) { + return null + } + + assert.strictEqual(typeof host, 'string') + + const servername = getHostname(host) + if (net.isIP(servername)) { + return '' + } + + return servername +} + +function deepClone (obj) { + return JSON.parse(JSON.stringify(obj)) +} + +function isAsyncIterable (obj) { + return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function') +} + +function isIterable (obj) { + return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function')) +} + +function bodyLength (body) { + if (body == null) { + return 0 + } else if (isStream(body)) { + const state = body._readableState + return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length) + ? state.length + : null + } else if (isBlobLike(body)) { + return body.size != null ? body.size : null + } else if (isBuffer(body)) { + return body.byteLength + } + + return null +} + +function isDestroyed (stream) { + return !stream || !!(stream.destroyed || stream[kDestroyed]) +} + +function isReadableAborted (stream) { + const state = stream && stream._readableState + return isDestroyed(stream) && state && !state.endEmitted +} + +function destroy (stream, err) { + if (stream == null || !isStream(stream) || isDestroyed(stream)) { + return + } + + if (typeof stream.destroy === 'function') { + if (Object.getPrototypeOf(stream).constructor === IncomingMessage) { + // See: https://github.com/nodejs/node/pull/38505/files + stream.socket = null + } + + stream.destroy(err) + } else if (err) { + process.nextTick((stream, err) => { + stream.emit('error', err) + }, stream, err) + } + + if (stream.destroyed !== true) { + stream[kDestroyed] = true + } +} + +const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/ +function parseKeepAliveTimeout (val) { + const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR) + return m ? parseInt(m[1], 10) * 1000 : null +} + +function parseHeaders (headers, obj = {}) { + // For H2 support + if (!Array.isArray(headers)) return headers + + for (let i = 0; i < headers.length; i += 2) { + const key = headers[i].toString().toLowerCase() + let val = obj[key] + + if (!val) { + if (Array.isArray(headers[i + 1])) { + obj[key] = headers[i + 1].map(x => x.toString('utf8')) + } else { + obj[key] = headers[i + 1].toString('utf8') + } + } else { + if (!Array.isArray(val)) { + val = [val] + obj[key] = val + } + val.push(headers[i + 1].toString('utf8')) + } + } + + // See https://github.com/nodejs/node/pull/46528 + if ('content-length' in obj && 'content-disposition' in obj) { + obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1') + } + + return obj +} + +function parseRawHeaders (headers) { + const ret = [] + let hasContentLength = false + let contentDispositionIdx = -1 + + for (let n = 0; n < headers.length; n += 2) { + const key = headers[n + 0].toString() + const val = headers[n + 1].toString('utf8') + + if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) { + ret.push(key, val) + hasContentLength = true + } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) { + contentDispositionIdx = ret.push(key, val) - 1 + } else { + ret.push(key, val) + } + } + + // See https://github.com/nodejs/node/pull/46528 + if (hasContentLength && contentDispositionIdx !== -1) { + ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1') + } + + return ret +} + +function isBuffer (buffer) { + // See, https://github.com/mcollina/undici/pull/319 + return buffer instanceof Uint8Array || Buffer.isBuffer(buffer) +} + +function validateHandler (handler, method, upgrade) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') + } + + if (typeof handler.onConnect !== 'function') { + throw new InvalidArgumentError('invalid onConnect method') + } + + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } + + if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) { + throw new InvalidArgumentError('invalid onBodySent method') + } + + if (upgrade || method === 'CONNECT') { + if (typeof handler.onUpgrade !== 'function') { + throw new InvalidArgumentError('invalid onUpgrade method') + } + } else { + if (typeof handler.onHeaders !== 'function') { + throw new InvalidArgumentError('invalid onHeaders method') + } + + if (typeof handler.onData !== 'function') { + throw new InvalidArgumentError('invalid onData method') + } + + if (typeof handler.onComplete !== 'function') { + throw new InvalidArgumentError('invalid onComplete method') + } + } +} + +// A body is disturbed if it has been read from and it cannot +// be re-used without losing state or data. +function isDisturbed (body) { + return !!(body && ( + stream.isDisturbed + ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed? + : body[kBodyUsed] || + body.readableDidRead || + (body._readableState && body._readableState.dataEmitted) || + isReadableAborted(body) + )) +} + +function isErrored (body) { + return !!(body && ( + stream.isErrored + ? stream.isErrored(body) + : /state: 'errored'/.test(nodeUtil.inspect(body) + ))) +} + +function isReadable (body) { + return !!(body && ( + stream.isReadable + ? stream.isReadable(body) + : /state: 'readable'/.test(nodeUtil.inspect(body) + ))) +} + +function getSocketInfo (socket) { + return { + localAddress: socket.localAddress, + localPort: socket.localPort, + remoteAddress: socket.remoteAddress, + remotePort: socket.remotePort, + remoteFamily: socket.remoteFamily, + timeout: socket.timeout, + bytesWritten: socket.bytesWritten, + bytesRead: socket.bytesRead + } +} + +async function * convertIterableToBuffer (iterable) { + for await (const chunk of iterable) { + yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + } +} + +let ReadableStream +function ReadableStreamFrom (iterable) { + if (!ReadableStream) { + ReadableStream = require('stream/web').ReadableStream + } + + if (ReadableStream.from) { + return ReadableStream.from(convertIterableToBuffer(iterable)) + } + + let iterator + return new ReadableStream( + { + async start () { + iterator = iterable[Symbol.asyncIterator]() + }, + async pull (controller) { + const { done, value } = await iterator.next() + if (done) { + queueMicrotask(() => { + controller.close() + }) + } else { + const buf = Buffer.isBuffer(value) ? value : Buffer.from(value) + controller.enqueue(new Uint8Array(buf)) + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + } + }, + 0 + ) +} + +// The chunk should be a FormData instance and contains +// all the required methods. +function isFormDataLike (object) { + return ( + object && + typeof object === 'object' && + typeof object.append === 'function' && + typeof object.delete === 'function' && + typeof object.get === 'function' && + typeof object.getAll === 'function' && + typeof object.has === 'function' && + typeof object.set === 'function' && + object[Symbol.toStringTag] === 'FormData' + ) +} + +function throwIfAborted (signal) { + if (!signal) { return } + if (typeof signal.throwIfAborted === 'function') { + signal.throwIfAborted() + } else { + if (signal.aborted) { + // DOMException not available < v17.0.0 + const err = new Error('The operation was aborted') + err.name = 'AbortError' + throw err + } + } +} + +function addAbortListener (signal, listener) { + if ('addEventListener' in signal) { + signal.addEventListener('abort', listener, { once: true }) + return () => signal.removeEventListener('abort', listener) + } + signal.addListener('abort', listener) + return () => signal.removeListener('abort', listener) +} + +const hasToWellFormed = !!String.prototype.toWellFormed + +/** + * @param {string} val + */ +function toUSVString (val) { + if (hasToWellFormed) { + return `${val}`.toWellFormed() + } else if (nodeUtil.toUSVString) { + return nodeUtil.toUSVString(val) + } + + return `${val}` +} + +// Parsed accordingly to RFC 9110 +// https://www.rfc-editor.org/rfc/rfc9110#field.content-range +function parseRangeHeader (range) { + if (range == null || range === '') return { start: 0, end: null, size: null } + + const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null + return m + ? { + start: parseInt(m[1]), + end: m[2] ? parseInt(m[2]) : null, + size: m[3] ? parseInt(m[3]) : null + } + : null +} + +const kEnumerableProperty = Object.create(null) +kEnumerableProperty.enumerable = true + +module.exports = { + kEnumerableProperty, + nop, + isDisturbed, + isErrored, + isReadable, + toUSVString, + isReadableAborted, + isBlobLike, + parseOrigin, + parseURL, + getServerName, + isStream, + isIterable, + isAsyncIterable, + isDestroyed, + parseRawHeaders, + parseHeaders, + parseKeepAliveTimeout, + destroy, + bodyLength, + deepClone, + ReadableStreamFrom, + isBuffer, + validateHandler, + getSocketInfo, + isFormDataLike, + buildURL, + throwIfAborted, + addAbortListener, + parseRangeHeader, + nodeMajor, + nodeMinor, + nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13), + safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] +} diff --git a/lib/dispatcher-base.js b/lib/dispatcher-base.js new file mode 100644 index 0000000..5c0220b --- /dev/null +++ b/lib/dispatcher-base.js @@ -0,0 +1,192 @@ +'use strict' + +const Dispatcher = require('./dispatcher') +const { + ClientDestroyedError, + ClientClosedError, + InvalidArgumentError +} = require('./core/errors') +const { kDestroy, kClose, kDispatch, kInterceptors } = require('./core/symbols') + +const kDestroyed = Symbol('destroyed') +const kClosed = Symbol('closed') +const kOnDestroyed = Symbol('onDestroyed') +const kOnClosed = Symbol('onClosed') +const kInterceptedDispatch = Symbol('Intercepted Dispatch') + +class DispatcherBase extends Dispatcher { + constructor () { + super() + + this[kDestroyed] = false + this[kOnDestroyed] = null + this[kClosed] = false + this[kOnClosed] = [] + } + + get destroyed () { + return this[kDestroyed] + } + + get closed () { + return this[kClosed] + } + + get interceptors () { + return this[kInterceptors] + } + + set interceptors (newInterceptors) { + if (newInterceptors) { + for (let i = newInterceptors.length - 1; i >= 0; i--) { + const interceptor = this[kInterceptors][i] + if (typeof interceptor !== 'function') { + throw new InvalidArgumentError('interceptor must be an function') + } + } + } + + this[kInterceptors] = newInterceptors + } + + close (callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.close((err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (this[kDestroyed]) { + queueMicrotask(() => callback(new ClientDestroyedError(), null)) + return + } + + if (this[kClosed]) { + if (this[kOnClosed]) { + this[kOnClosed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } + + this[kClosed] = true + this[kOnClosed].push(callback) + + const onClosed = () => { + const callbacks = this[kOnClosed] + this[kOnClosed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } + + // Should not error. + this[kClose]() + .then(() => this.destroy()) + .then(() => { + queueMicrotask(onClosed) + }) + } + + destroy (err, callback) { + if (typeof err === 'function') { + callback = err + err = null + } + + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.destroy(err, (err, data) => { + return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data) + }) + }) + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (this[kDestroyed]) { + if (this[kOnDestroyed]) { + this[kOnDestroyed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } + + if (!err) { + err = new ClientDestroyedError() + } + + this[kDestroyed] = true + this[kOnDestroyed] = this[kOnDestroyed] || [] + this[kOnDestroyed].push(callback) + + const onDestroyed = () => { + const callbacks = this[kOnDestroyed] + this[kOnDestroyed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } + + // Should not error. + this[kDestroy](err).then(() => { + queueMicrotask(onDestroyed) + }) + } + + [kInterceptedDispatch] (opts, handler) { + if (!this[kInterceptors] || this[kInterceptors].length === 0) { + this[kInterceptedDispatch] = this[kDispatch] + return this[kDispatch](opts, handler) + } + + let dispatch = this[kDispatch].bind(this) + for (let i = this[kInterceptors].length - 1; i >= 0; i--) { + dispatch = this[kInterceptors][i](dispatch) + } + this[kInterceptedDispatch] = dispatch + return dispatch(opts, handler) + } + + dispatch (opts, handler) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') + } + + try { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object.') + } + + if (this[kDestroyed] || this[kOnDestroyed]) { + throw new ClientDestroyedError() + } + + if (this[kClosed]) { + throw new ClientClosedError() + } + + return this[kInterceptedDispatch](opts, handler) + } catch (err) { + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } + + handler.onError(err) + + return false + } + } +} + +module.exports = DispatcherBase diff --git a/lib/dispatcher.js b/lib/dispatcher.js new file mode 100644 index 0000000..9b809d8 --- /dev/null +++ b/lib/dispatcher.js @@ -0,0 +1,19 @@ +'use strict' + +const EventEmitter = require('events') + +class Dispatcher extends EventEmitter { + dispatch () { + throw new Error('not implemented') + } + + close () { + throw new Error('not implemented') + } + + destroy () { + throw new Error('not implemented') + } +} + +module.exports = Dispatcher diff --git a/lib/fetch/LICENSE b/lib/fetch/LICENSE new file mode 100644 index 0000000..2943500 --- /dev/null +++ b/lib/fetch/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Ethan Arrowood + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/lib/fetch/body.js b/lib/fetch/body.js new file mode 100644 index 0000000..fd8481b --- /dev/null +++ b/lib/fetch/body.js @@ -0,0 +1,605 @@ +'use strict' + +const Busboy = require('@fastify/busboy') +const util = require('../core/util') +const { + ReadableStreamFrom, + isBlobLike, + isReadableStreamLike, + readableStreamClose, + createDeferredPromise, + fullyReadBody +} = require('./util') +const { FormData } = require('./formdata') +const { kState } = require('./symbols') +const { webidl } = require('./webidl') +const { DOMException, structuredClone } = require('./constants') +const { Blob, File: NativeFile } = require('buffer') +const { kBodyUsed } = require('../core/symbols') +const assert = require('assert') +const { isErrored } = require('../core/util') +const { isUint8Array, isArrayBuffer } = require('util/types') +const { File: UndiciFile } = require('./file') +const { parseMIMEType, serializeAMimeType } = require('./dataURL') + +let ReadableStream = globalThis.ReadableStream + +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile +const textEncoder = new TextEncoder() +const textDecoder = new TextDecoder() + +// https://fetch.spec.whatwg.org/#concept-bodyinit-extract +function extractBody (object, keepalive = false) { + if (!ReadableStream) { + ReadableStream = require('stream/web').ReadableStream + } + + // 1. Let stream be null. + let stream = null + + // 2. If object is a ReadableStream object, then set stream to object. + if (object instanceof ReadableStream) { + stream = object + } else if (isBlobLike(object)) { + // 3. Otherwise, if object is a Blob object, set stream to the + // result of running object’s get stream. + stream = object.stream() + } else { + // 4. Otherwise, set stream to a new ReadableStream object, and set + // up stream. + stream = new ReadableStream({ + async pull (controller) { + controller.enqueue( + typeof source === 'string' ? textEncoder.encode(source) : source + ) + queueMicrotask(() => readableStreamClose(controller)) + }, + start () {}, + type: undefined + }) + } + + // 5. Assert: stream is a ReadableStream object. + assert(isReadableStreamLike(stream)) + + // 6. Let action be null. + let action = null + + // 7. Let source be null. + let source = null + + // 8. Let length be null. + let length = null + + // 9. Let type be null. + let type = null + + // 10. Switch on object: + if (typeof object === 'string') { + // Set source to the UTF-8 encoding of object. + // Note: setting source to a Uint8Array here breaks some mocking assumptions. + source = object + + // Set type to `text/plain;charset=UTF-8`. + type = 'text/plain;charset=UTF-8' + } else if (object instanceof URLSearchParams) { + // URLSearchParams + + // spec says to run application/x-www-form-urlencoded on body.list + // this is implemented in Node.js as apart of an URLSearchParams instance toString method + // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490 + // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100 + + // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list. + source = object.toString() + + // Set type to `application/x-www-form-urlencoded;charset=UTF-8`. + type = 'application/x-www-form-urlencoded;charset=UTF-8' + } else if (isArrayBuffer(object)) { + // BufferSource/ArrayBuffer + + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.slice()) + } else if (ArrayBuffer.isView(object)) { + // BufferSource/ArrayBufferView + + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) + } else if (util.isFormDataLike(object)) { + const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` + const prefix = `--${boundary}\r\nContent-Disposition: form-data` + + /*! formdata-polyfill. MIT License. Jimmy Wärting */ + const escape = (str) => + str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22') + const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n') + + // Set action to this step: run the multipart/form-data + // encoding algorithm, with object’s entry list and UTF-8. + // - This ensures that the body is immutable and can't be changed afterwords + // - That the content-length is calculated in advance. + // - And that all parts are pre-encoded and ready to be sent. + + const blobParts = [] + const rn = new Uint8Array([13, 10]) // '\r\n' + length = 0 + let hasUnknownSizeValue = false + + for (const [name, value] of object) { + if (typeof value === 'string') { + const chunk = textEncoder.encode(prefix + + `; name="${escape(normalizeLinefeeds(name))}"` + + `\r\n\r\n${normalizeLinefeeds(value)}\r\n`) + blobParts.push(chunk) + length += chunk.byteLength + } else { + const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` + + (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' + + `Content-Type: ${ + value.type || 'application/octet-stream' + }\r\n\r\n`) + blobParts.push(chunk, value, rn) + if (typeof value.size === 'number') { + length += chunk.byteLength + value.size + rn.byteLength + } else { + hasUnknownSizeValue = true + } + } + } + + const chunk = textEncoder.encode(`--${boundary}--`) + blobParts.push(chunk) + length += chunk.byteLength + if (hasUnknownSizeValue) { + length = null + } + + // Set source to object. + source = object + + action = async function * () { + for (const part of blobParts) { + if (part.stream) { + yield * part.stream() + } else { + yield part + } + } + } + + // Set type to `multipart/form-data; boundary=`, + // followed by the multipart/form-data boundary string generated + // by the multipart/form-data encoding algorithm. + type = 'multipart/form-data; boundary=' + boundary + } else if (isBlobLike(object)) { + // Blob + + // Set source to object. + source = object + + // Set length to object’s size. + length = object.size + + // If object’s type attribute is not the empty byte sequence, set + // type to its value. + if (object.type) { + type = object.type + } + } else if (typeof object[Symbol.asyncIterator] === 'function') { + // If keepalive is true, then throw a TypeError. + if (keepalive) { + throw new TypeError('keepalive') + } + + // If object is disturbed or locked, then throw a TypeError. + if (util.isDisturbed(object) || object.locked) { + throw new TypeError( + 'Response body object should not be disturbed or locked' + ) + } + + stream = + object instanceof ReadableStream ? object : ReadableStreamFrom(object) + } + + // 11. If source is a byte sequence, then set action to a + // step that returns source and length to source’s length. + if (typeof source === 'string' || util.isBuffer(source)) { + length = Buffer.byteLength(source) + } + + // 12. If action is non-null, then run these steps in in parallel: + if (action != null) { + // Run action. + let iterator + stream = new ReadableStream({ + async start () { + iterator = action(object)[Symbol.asyncIterator]() + }, + async pull (controller) { + const { value, done } = await iterator.next() + if (done) { + // When running action is done, close stream. + queueMicrotask(() => { + controller.close() + }) + } else { + // Whenever one or more bytes are available and stream is not errored, + // enqueue a Uint8Array wrapping an ArrayBuffer containing the available + // bytes into stream. + if (!isErrored(stream)) { + controller.enqueue(new Uint8Array(value)) + } + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + }, + type: undefined + }) + } + + // 13. Let body be a body whose stream is stream, source is source, + // and length is length. + const body = { stream, source, length } + + // 14. Return (body, type). + return [body, type] +} + +// https://fetch.spec.whatwg.org/#bodyinit-safely-extract +function safelyExtractBody (object, keepalive = false) { + if (!ReadableStream) { + // istanbul ignore next + ReadableStream = require('stream/web').ReadableStream + } + + // To safely extract a body and a `Content-Type` value from + // a byte sequence or BodyInit object object, run these steps: + + // 1. If object is a ReadableStream object, then: + if (object instanceof ReadableStream) { + // Assert: object is neither disturbed nor locked. + // istanbul ignore next + assert(!util.isDisturbed(object), 'The body has already been consumed.') + // istanbul ignore next + assert(!object.locked, 'The stream is locked.') + } + + // 2. Return the results of extracting object. + return extractBody(object, keepalive) +} + +function cloneBody (body) { + // To clone a body body, run these steps: + + // https://fetch.spec.whatwg.org/#concept-body-clone + + // 1. Let « out1, out2 » be the result of teeing body’s stream. + const [out1, out2] = body.stream.tee() + const out2Clone = structuredClone(out2, { transfer: [out2] }) + // This, for whatever reasons, unrefs out2Clone which allows + // the process to exit by itself. + const [, finalClone] = out2Clone.tee() + + // 2. Set body’s stream to out1. + body.stream = out1 + + // 3. Return a body whose stream is out2 and other members are copied from body. + return { + stream: finalClone, + length: body.length, + source: body.source + } +} + +async function * consumeBody (body) { + if (body) { + if (isUint8Array(body)) { + yield body + } else { + const stream = body.stream + + if (util.isDisturbed(stream)) { + throw new TypeError('The body has already been consumed.') + } + + if (stream.locked) { + throw new TypeError('The stream is locked.') + } + + // Compat. + stream[kBodyUsed] = true + + yield * stream + } + } +} + +function throwIfAborted (state) { + if (state.aborted) { + throw new DOMException('The operation was aborted.', 'AbortError') + } +} + +function bodyMixinMethods (instance) { + const methods = { + blob () { + // The blob() method steps are to return the result of + // running consume body with this and the following step + // given a byte sequence bytes: return a Blob whose + // contents are bytes and whose type attribute is this’s + // MIME type. + return specConsumeBody(this, (bytes) => { + let mimeType = bodyMimeType(this) + + if (mimeType === 'failure') { + mimeType = '' + } else if (mimeType) { + mimeType = serializeAMimeType(mimeType) + } + + // Return a Blob whose contents are bytes and type attribute + // is mimeType. + return new Blob([bytes], { type: mimeType }) + }, instance) + }, + + arrayBuffer () { + // The arrayBuffer() method steps are to return the result + // of running consume body with this and the following step + // given a byte sequence bytes: return a new ArrayBuffer + // whose contents are bytes. + return specConsumeBody(this, (bytes) => { + return new Uint8Array(bytes).buffer + }, instance) + }, + + text () { + // The text() method steps are to return the result of running + // consume body with this and UTF-8 decode. + return specConsumeBody(this, utf8DecodeBytes, instance) + }, + + json () { + // The json() method steps are to return the result of running + // consume body with this and parse JSON from bytes. + return specConsumeBody(this, parseJSONFromBytes, instance) + }, + + async formData () { + webidl.brandCheck(this, instance) + + throwIfAborted(this[kState]) + + const contentType = this.headers.get('Content-Type') + + // If mimeType’s essence is "multipart/form-data", then: + if (/multipart\/form-data/.test(contentType)) { + const headers = {} + for (const [key, value] of this.headers) headers[key.toLowerCase()] = value + + const responseFormData = new FormData() + + let busboy + + try { + busboy = new Busboy({ + headers, + preservePath: true + }) + } catch (err) { + throw new DOMException(`${err}`, 'AbortError') + } + + busboy.on('field', (name, value) => { + responseFormData.append(name, value) + }) + busboy.on('file', (name, value, filename, encoding, mimeType) => { + const chunks = [] + + if (encoding === 'base64' || encoding.toLowerCase() === 'base64') { + let base64chunk = '' + + value.on('data', (chunk) => { + base64chunk += chunk.toString().replace(/[\r\n]/gm, '') + + const end = base64chunk.length - base64chunk.length % 4 + chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64')) + + base64chunk = base64chunk.slice(end) + }) + value.on('end', () => { + chunks.push(Buffer.from(base64chunk, 'base64')) + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } else { + value.on('data', (chunk) => { + chunks.push(chunk) + }) + value.on('end', () => { + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } + }) + + const busboyResolve = new Promise((resolve, reject) => { + busboy.on('finish', resolve) + busboy.on('error', (err) => reject(new TypeError(err))) + }) + + if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk) + busboy.end() + await busboyResolve + + return responseFormData + } else if (/application\/x-www-form-urlencoded/.test(contentType)) { + // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then: + + // 1. Let entries be the result of parsing bytes. + let entries + try { + let text = '' + // application/x-www-form-urlencoded parser will keep the BOM. + // https://url.spec.whatwg.org/#concept-urlencoded-parser + // Note that streaming decoder is stateful and cannot be reused + const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true }) + + for await (const chunk of consumeBody(this[kState].body)) { + if (!isUint8Array(chunk)) { + throw new TypeError('Expected Uint8Array chunk') + } + text += streamingDecoder.decode(chunk, { stream: true }) + } + text += streamingDecoder.decode() + entries = new URLSearchParams(text) + } catch (err) { + // istanbul ignore next: Unclear when new URLSearchParams can fail on a string. + // 2. If entries is failure, then throw a TypeError. + throw Object.assign(new TypeError(), { cause: err }) + } + + // 3. Return a new FormData object whose entries are entries. + const formData = new FormData() + for (const [name, value] of entries) { + formData.append(name, value) + } + return formData + } else { + // Wait a tick before checking if the request has been aborted. + // Otherwise, a TypeError can be thrown when an AbortError should. + await Promise.resolve() + + throwIfAborted(this[kState]) + + // Otherwise, throw a TypeError. + throw webidl.errors.exception({ + header: `${instance.name}.formData`, + message: 'Could not parse content as FormData.' + }) + } + } + } + + return methods +} + +function mixinBody (prototype) { + Object.assign(prototype.prototype, bodyMixinMethods(prototype)) +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-body-consume-body + * @param {Response|Request} object + * @param {(value: unknown) => unknown} convertBytesToJSValue + * @param {Response|Request} instance + */ +async function specConsumeBody (object, convertBytesToJSValue, instance) { + webidl.brandCheck(object, instance) + + throwIfAborted(object[kState]) + + // 1. If object is unusable, then return a promise rejected + // with a TypeError. + if (bodyUnusable(object[kState].body)) { + throw new TypeError('Body is unusable') + } + + // 2. Let promise be a new promise. + const promise = createDeferredPromise() + + // 3. Let errorSteps given error be to reject promise with error. + const errorSteps = (error) => promise.reject(error) + + // 4. Let successSteps given a byte sequence data be to resolve + // promise with the result of running convertBytesToJSValue + // with data. If that threw an exception, then run errorSteps + // with that exception. + const successSteps = (data) => { + try { + promise.resolve(convertBytesToJSValue(data)) + } catch (e) { + errorSteps(e) + } + } + + // 5. If object’s body is null, then run successSteps with an + // empty byte sequence. + if (object[kState].body == null) { + successSteps(new Uint8Array()) + return promise.promise + } + + // 6. Otherwise, fully read object’s body given successSteps, + // errorSteps, and object’s relevant global object. + await fullyReadBody(object[kState].body, successSteps, errorSteps) + + // 7. Return promise. + return promise.promise +} + +// https://fetch.spec.whatwg.org/#body-unusable +function bodyUnusable (body) { + // An object including the Body interface mixin is + // said to be unusable if its body is non-null and + // its body’s stream is disturbed or locked. + return body != null && (body.stream.locked || util.isDisturbed(body.stream)) +} + +/** + * @see https://encoding.spec.whatwg.org/#utf-8-decode + * @param {Buffer} buffer + */ +function utf8DecodeBytes (buffer) { + if (buffer.length === 0) { + return '' + } + + // 1. Let buffer be the result of peeking three bytes from + // ioQueue, converted to a byte sequence. + + // 2. If buffer is 0xEF 0xBB 0xBF, then read three + // bytes from ioQueue. (Do nothing with those bytes.) + if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) { + buffer = buffer.subarray(3) + } + + // 3. Process a queue with an instance of UTF-8’s + // decoder, ioQueue, output, and "replacement". + const output = textDecoder.decode(buffer) + + // 4. Return output. + return output +} + +/** + * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value + * @param {Uint8Array} bytes + */ +function parseJSONFromBytes (bytes) { + return JSON.parse(utf8DecodeBytes(bytes)) +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-body-mime-type + * @param {import('./response').Response|import('./request').Request} object + */ +function bodyMimeType (object) { + const { headersList } = object[kState] + const contentType = headersList.get('content-type') + + if (contentType === null) { + return 'failure' + } + + return parseMIMEType(contentType) +} + +module.exports = { + extractBody, + safelyExtractBody, + cloneBody, + mixinBody +} diff --git a/lib/fetch/constants.js b/lib/fetch/constants.js new file mode 100644 index 0000000..218fcbe --- /dev/null +++ b/lib/fetch/constants.js @@ -0,0 +1,151 @@ +'use strict' + +const { MessageChannel, receiveMessageOnPort } = require('worker_threads') + +const corsSafeListedMethods = ['GET', 'HEAD', 'POST'] +const corsSafeListedMethodsSet = new Set(corsSafeListedMethods) + +const nullBodyStatus = [101, 204, 205, 304] + +const redirectStatus = [301, 302, 303, 307, 308] +const redirectStatusSet = new Set(redirectStatus) + +// https://fetch.spec.whatwg.org/#block-bad-port +const badPorts = [ + '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79', + '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137', + '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532', + '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723', + '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697', + '10080' +] + +const badPortsSet = new Set(badPorts) + +// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies +const referrerPolicy = [ + '', + 'no-referrer', + 'no-referrer-when-downgrade', + 'same-origin', + 'origin', + 'strict-origin', + 'origin-when-cross-origin', + 'strict-origin-when-cross-origin', + 'unsafe-url' +] +const referrerPolicySet = new Set(referrerPolicy) + +const requestRedirect = ['follow', 'manual', 'error'] + +const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE'] +const safeMethodsSet = new Set(safeMethods) + +const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors'] + +const requestCredentials = ['omit', 'same-origin', 'include'] + +const requestCache = [ + 'default', + 'no-store', + 'reload', + 'no-cache', + 'force-cache', + 'only-if-cached' +] + +// https://fetch.spec.whatwg.org/#request-body-header-name +const requestBodyHeader = [ + 'content-encoding', + 'content-language', + 'content-location', + 'content-type', + // See https://github.com/nodejs/undici/issues/2021 + // 'Content-Length' is a forbidden header name, which is typically + // removed in the Headers implementation. However, undici doesn't + // filter out headers, so we add it here. + 'content-length' +] + +// https://fetch.spec.whatwg.org/#enumdef-requestduplex +const requestDuplex = [ + 'half' +] + +// http://fetch.spec.whatwg.org/#forbidden-method +const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK'] +const forbiddenMethodsSet = new Set(forbiddenMethods) + +const subresource = [ + 'audio', + 'audioworklet', + 'font', + 'image', + 'manifest', + 'paintworklet', + 'script', + 'style', + 'track', + 'video', + 'xslt', + '' +] +const subresourceSet = new Set(subresource) + +/** @type {globalThis['DOMException']} */ +const DOMException = globalThis.DOMException ?? (() => { + // DOMException was only made a global in Node v17.0.0, + // but fetch supports >= v16.8. + try { + atob('~') + } catch (err) { + return Object.getPrototypeOf(err).constructor + } +})() + +let channel + +/** @type {globalThis['structuredClone']} */ +const structuredClone = + globalThis.structuredClone ?? + // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js + // structuredClone was added in v17.0.0, but fetch supports v16.8 + function structuredClone (value, options = undefined) { + if (arguments.length === 0) { + throw new TypeError('missing argument') + } + + if (!channel) { + channel = new MessageChannel() + } + channel.port1.unref() + channel.port2.unref() + channel.port1.postMessage(value, options?.transfer) + return receiveMessageOnPort(channel.port2).message + } + +module.exports = { + DOMException, + structuredClone, + subresource, + forbiddenMethods, + requestBodyHeader, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + redirectStatus, + corsSafeListedMethods, + nullBodyStatus, + safeMethods, + badPorts, + requestDuplex, + subresourceSet, + badPortsSet, + redirectStatusSet, + corsSafeListedMethodsSet, + safeMethodsSet, + forbiddenMethodsSet, + referrerPolicySet +} diff --git a/lib/fetch/dataURL.js b/lib/fetch/dataURL.js new file mode 100644 index 0000000..7b6a606 --- /dev/null +++ b/lib/fetch/dataURL.js @@ -0,0 +1,627 @@ +const assert = require('assert') +const { atob } = require('buffer') +const { isomorphicDecode } = require('./util') + +const encoder = new TextEncoder() + +/** + * @see https://mimesniff.spec.whatwg.org/#http-token-code-point + */ +const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/ +const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line +/** + * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point + */ +const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line + +// https://fetch.spec.whatwg.org/#data-url-processor +/** @param {URL} dataURL */ +function dataURLProcessor (dataURL) { + // 1. Assert: dataURL’s scheme is "data". + assert(dataURL.protocol === 'data:') + + // 2. Let input be the result of running the URL + // serializer on dataURL with exclude fragment + // set to true. + let input = URLSerializer(dataURL, true) + + // 3. Remove the leading "data:" string from input. + input = input.slice(5) + + // 4. Let position point at the start of input. + const position = { position: 0 } + + // 5. Let mimeType be the result of collecting a + // sequence of code points that are not equal + // to U+002C (,), given position. + let mimeType = collectASequenceOfCodePointsFast( + ',', + input, + position + ) + + // 6. Strip leading and trailing ASCII whitespace + // from mimeType. + // Undici implementation note: we need to store the + // length because if the mimetype has spaces removed, + // the wrong amount will be sliced from the input in + // step #9 + const mimeTypeLength = mimeType.length + mimeType = removeASCIIWhitespace(mimeType, true, true) + + // 7. If position is past the end of input, then + // return failure + if (position.position >= input.length) { + return 'failure' + } + + // 8. Advance position by 1. + position.position++ + + // 9. Let encodedBody be the remainder of input. + const encodedBody = input.slice(mimeTypeLength + 1) + + // 10. Let body be the percent-decoding of encodedBody. + let body = stringPercentDecode(encodedBody) + + // 11. If mimeType ends with U+003B (;), followed by + // zero or more U+0020 SPACE, followed by an ASCII + // case-insensitive match for "base64", then: + if (/;(\u0020){0,}base64$/i.test(mimeType)) { + // 1. Let stringBody be the isomorphic decode of body. + const stringBody = isomorphicDecode(body) + + // 2. Set body to the forgiving-base64 decode of + // stringBody. + body = forgivingBase64(stringBody) + + // 3. If body is failure, then return failure. + if (body === 'failure') { + return 'failure' + } + + // 4. Remove the last 6 code points from mimeType. + mimeType = mimeType.slice(0, -6) + + // 5. Remove trailing U+0020 SPACE code points from mimeType, + // if any. + mimeType = mimeType.replace(/(\u0020)+$/, '') + + // 6. Remove the last U+003B (;) code point from mimeType. + mimeType = mimeType.slice(0, -1) + } + + // 12. If mimeType starts with U+003B (;), then prepend + // "text/plain" to mimeType. + if (mimeType.startsWith(';')) { + mimeType = 'text/plain' + mimeType + } + + // 13. Let mimeTypeRecord be the result of parsing + // mimeType. + let mimeTypeRecord = parseMIMEType(mimeType) + + // 14. If mimeTypeRecord is failure, then set + // mimeTypeRecord to text/plain;charset=US-ASCII. + if (mimeTypeRecord === 'failure') { + mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII') + } + + // 15. Return a new data: URL struct whose MIME + // type is mimeTypeRecord and body is body. + // https://fetch.spec.whatwg.org/#data-url-struct + return { mimeType: mimeTypeRecord, body } +} + +// https://url.spec.whatwg.org/#concept-url-serializer +/** + * @param {URL} url + * @param {boolean} excludeFragment + */ +function URLSerializer (url, excludeFragment = false) { + if (!excludeFragment) { + return url.href + } + + const href = url.href + const hashLength = url.hash.length + + return hashLength === 0 ? href : href.substring(0, href.length - hashLength) +} + +// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points +/** + * @param {(char: string) => boolean} condition + * @param {string} input + * @param {{ position: number }} position + */ +function collectASequenceOfCodePoints (condition, input, position) { + // 1. Let result be the empty string. + let result = '' + + // 2. While position doesn’t point past the end of input and the + // code point at position within input meets the condition condition: + while (position.position < input.length && condition(input[position.position])) { + // 1. Append that code point to the end of result. + result += input[position.position] + + // 2. Advance position by 1. + position.position++ + } + + // 3. Return result. + return result +} + +/** + * A faster collectASequenceOfCodePoints that only works when comparing a single character. + * @param {string} char + * @param {string} input + * @param {{ position: number }} position + */ +function collectASequenceOfCodePointsFast (char, input, position) { + const idx = input.indexOf(char, position.position) + const start = position.position + + if (idx === -1) { + position.position = input.length + return input.slice(start) + } + + position.position = idx + return input.slice(start, position.position) +} + +// https://url.spec.whatwg.org/#string-percent-decode +/** @param {string} input */ +function stringPercentDecode (input) { + // 1. Let bytes be the UTF-8 encoding of input. + const bytes = encoder.encode(input) + + // 2. Return the percent-decoding of bytes. + return percentDecode(bytes) +} + +// https://url.spec.whatwg.org/#percent-decode +/** @param {Uint8Array} input */ +function percentDecode (input) { + // 1. Let output be an empty byte sequence. + /** @type {number[]} */ + const output = [] + + // 2. For each byte byte in input: + for (let i = 0; i < input.length; i++) { + const byte = input[i] + + // 1. If byte is not 0x25 (%), then append byte to output. + if (byte !== 0x25) { + output.push(byte) + + // 2. Otherwise, if byte is 0x25 (%) and the next two bytes + // after byte in input are not in the ranges + // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F), + // and 0x61 (a) to 0x66 (f), all inclusive, append byte + // to output. + } else if ( + byte === 0x25 && + !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2])) + ) { + output.push(0x25) + + // 3. Otherwise: + } else { + // 1. Let bytePoint be the two bytes after byte in input, + // decoded, and then interpreted as hexadecimal number. + const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2]) + const bytePoint = Number.parseInt(nextTwoBytes, 16) + + // 2. Append a byte whose value is bytePoint to output. + output.push(bytePoint) + + // 3. Skip the next two bytes in input. + i += 2 + } + } + + // 3. Return output. + return Uint8Array.from(output) +} + +// https://mimesniff.spec.whatwg.org/#parse-a-mime-type +/** @param {string} input */ +function parseMIMEType (input) { + // 1. Remove any leading and trailing HTTP whitespace + // from input. + input = removeHTTPWhitespace(input, true, true) + + // 2. Let position be a position variable for input, + // initially pointing at the start of input. + const position = { position: 0 } + + // 3. Let type be the result of collecting a sequence + // of code points that are not U+002F (/) from + // input, given position. + const type = collectASequenceOfCodePointsFast( + '/', + input, + position + ) + + // 4. If type is the empty string or does not solely + // contain HTTP token code points, then return failure. + // https://mimesniff.spec.whatwg.org/#http-token-code-point + if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) { + return 'failure' + } + + // 5. If position is past the end of input, then return + // failure + if (position.position > input.length) { + return 'failure' + } + + // 6. Advance position by 1. (This skips past U+002F (/).) + position.position++ + + // 7. Let subtype be the result of collecting a sequence of + // code points that are not U+003B (;) from input, given + // position. + let subtype = collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 8. Remove any trailing HTTP whitespace from subtype. + subtype = removeHTTPWhitespace(subtype, false, true) + + // 9. If subtype is the empty string or does not solely + // contain HTTP token code points, then return failure. + if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) { + return 'failure' + } + + const typeLowercase = type.toLowerCase() + const subtypeLowercase = subtype.toLowerCase() + + // 10. Let mimeType be a new MIME type record whose type + // is type, in ASCII lowercase, and subtype is subtype, + // in ASCII lowercase. + // https://mimesniff.spec.whatwg.org/#mime-type + const mimeType = { + type: typeLowercase, + subtype: subtypeLowercase, + /** @type {Map} */ + parameters: new Map(), + // https://mimesniff.spec.whatwg.org/#mime-type-essence + essence: `${typeLowercase}/${subtypeLowercase}` + } + + // 11. While position is not past the end of input: + while (position.position < input.length) { + // 1. Advance position by 1. (This skips past U+003B (;).) + position.position++ + + // 2. Collect a sequence of code points that are HTTP + // whitespace from input given position. + collectASequenceOfCodePoints( + // https://fetch.spec.whatwg.org/#http-whitespace + char => HTTP_WHITESPACE_REGEX.test(char), + input, + position + ) + + // 3. Let parameterName be the result of collecting a + // sequence of code points that are not U+003B (;) + // or U+003D (=) from input, given position. + let parameterName = collectASequenceOfCodePoints( + (char) => char !== ';' && char !== '=', + input, + position + ) + + // 4. Set parameterName to parameterName, in ASCII + // lowercase. + parameterName = parameterName.toLowerCase() + + // 5. If position is not past the end of input, then: + if (position.position < input.length) { + // 1. If the code point at position within input is + // U+003B (;), then continue. + if (input[position.position] === ';') { + continue + } + + // 2. Advance position by 1. (This skips past U+003D (=).) + position.position++ + } + + // 6. If position is past the end of input, then break. + if (position.position > input.length) { + break + } + + // 7. Let parameterValue be null. + let parameterValue = null + + // 8. If the code point at position within input is + // U+0022 ("), then: + if (input[position.position] === '"') { + // 1. Set parameterValue to the result of collecting + // an HTTP quoted string from input, given position + // and the extract-value flag. + parameterValue = collectAnHTTPQuotedString(input, position, true) + + // 2. Collect a sequence of code points that are not + // U+003B (;) from input, given position. + collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 9. Otherwise: + } else { + // 1. Set parameterValue to the result of collecting + // a sequence of code points that are not U+003B (;) + // from input, given position. + parameterValue = collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 2. Remove any trailing HTTP whitespace from parameterValue. + parameterValue = removeHTTPWhitespace(parameterValue, false, true) + + // 3. If parameterValue is the empty string, then continue. + if (parameterValue.length === 0) { + continue + } + } + + // 10. If all of the following are true + // - parameterName is not the empty string + // - parameterName solely contains HTTP token code points + // - parameterValue solely contains HTTP quoted-string token code points + // - mimeType’s parameters[parameterName] does not exist + // then set mimeType’s parameters[parameterName] to parameterValue. + if ( + parameterName.length !== 0 && + HTTP_TOKEN_CODEPOINTS.test(parameterName) && + (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) && + !mimeType.parameters.has(parameterName) + ) { + mimeType.parameters.set(parameterName, parameterValue) + } + } + + // 12. Return mimeType. + return mimeType +} + +// https://infra.spec.whatwg.org/#forgiving-base64-decode +/** @param {string} data */ +function forgivingBase64 (data) { + // 1. Remove all ASCII whitespace from data. + data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line + + // 2. If data’s code point length divides by 4 leaving + // no remainder, then: + if (data.length % 4 === 0) { + // 1. If data ends with one or two U+003D (=) code points, + // then remove them from data. + data = data.replace(/=?=$/, '') + } + + // 3. If data’s code point length divides by 4 leaving + // a remainder of 1, then return failure. + if (data.length % 4 === 1) { + return 'failure' + } + + // 4. If data contains a code point that is not one of + // U+002B (+) + // U+002F (/) + // ASCII alphanumeric + // then return failure. + if (/[^+/0-9A-Za-z]/.test(data)) { + return 'failure' + } + + const binary = atob(data) + const bytes = new Uint8Array(binary.length) + + for (let byte = 0; byte < binary.length; byte++) { + bytes[byte] = binary.charCodeAt(byte) + } + + return bytes +} + +// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string +// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string +/** + * @param {string} input + * @param {{ position: number }} position + * @param {boolean?} extractValue + */ +function collectAnHTTPQuotedString (input, position, extractValue) { + // 1. Let positionStart be position. + const positionStart = position.position + + // 2. Let value be the empty string. + let value = '' + + // 3. Assert: the code point at position within input + // is U+0022 ("). + assert(input[position.position] === '"') + + // 4. Advance position by 1. + position.position++ + + // 5. While true: + while (true) { + // 1. Append the result of collecting a sequence of code points + // that are not U+0022 (") or U+005C (\) from input, given + // position, to value. + value += collectASequenceOfCodePoints( + (char) => char !== '"' && char !== '\\', + input, + position + ) + + // 2. If position is past the end of input, then break. + if (position.position >= input.length) { + break + } + + // 3. Let quoteOrBackslash be the code point at position within + // input. + const quoteOrBackslash = input[position.position] + + // 4. Advance position by 1. + position.position++ + + // 5. If quoteOrBackslash is U+005C (\), then: + if (quoteOrBackslash === '\\') { + // 1. If position is past the end of input, then append + // U+005C (\) to value and break. + if (position.position >= input.length) { + value += '\\' + break + } + + // 2. Append the code point at position within input to value. + value += input[position.position] + + // 3. Advance position by 1. + position.position++ + + // 6. Otherwise: + } else { + // 1. Assert: quoteOrBackslash is U+0022 ("). + assert(quoteOrBackslash === '"') + + // 2. Break. + break + } + } + + // 6. If the extract-value flag is set, then return value. + if (extractValue) { + return value + } + + // 7. Return the code points from positionStart to position, + // inclusive, within input. + return input.slice(positionStart, position.position) +} + +/** + * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type + */ +function serializeAMimeType (mimeType) { + assert(mimeType !== 'failure') + const { parameters, essence } = mimeType + + // 1. Let serialization be the concatenation of mimeType’s + // type, U+002F (/), and mimeType’s subtype. + let serialization = essence + + // 2. For each name → value of mimeType’s parameters: + for (let [name, value] of parameters.entries()) { + // 1. Append U+003B (;) to serialization. + serialization += ';' + + // 2. Append name to serialization. + serialization += name + + // 3. Append U+003D (=) to serialization. + serialization += '=' + + // 4. If value does not solely contain HTTP token code + // points or value is the empty string, then: + if (!HTTP_TOKEN_CODEPOINTS.test(value)) { + // 1. Precede each occurence of U+0022 (") or + // U+005C (\) in value with U+005C (\). + value = value.replace(/(\\|")/g, '\\$1') + + // 2. Prepend U+0022 (") to value. + value = '"' + value + + // 3. Append U+0022 (") to value. + value += '"' + } + + // 5. Append value to serialization. + serialization += value + } + + // 3. Return serialization. + return serialization +} + +/** + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} char + */ +function isHTTPWhiteSpace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === ' ' +} + +/** + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} str + */ +function removeHTTPWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 + + if (leading) { + for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++); + } + + if (trailing) { + for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--); + } + + return str.slice(lead, trail + 1) +} + +/** + * @see https://infra.spec.whatwg.org/#ascii-whitespace + * @param {string} char + */ +function isASCIIWhitespace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' ' +} + +/** + * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace + */ +function removeASCIIWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 + + if (leading) { + for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++); + } + + if (trailing) { + for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--); + } + + return str.slice(lead, trail + 1) +} + +module.exports = { + dataURLProcessor, + URLSerializer, + collectASequenceOfCodePoints, + collectASequenceOfCodePointsFast, + stringPercentDecode, + parseMIMEType, + collectAnHTTPQuotedString, + serializeAMimeType +} diff --git a/lib/fetch/file.js b/lib/fetch/file.js new file mode 100644 index 0000000..3133d25 --- /dev/null +++ b/lib/fetch/file.js @@ -0,0 +1,344 @@ +'use strict' + +const { Blob, File: NativeFile } = require('buffer') +const { types } = require('util') +const { kState } = require('./symbols') +const { isBlobLike } = require('./util') +const { webidl } = require('./webidl') +const { parseMIMEType, serializeAMimeType } = require('./dataURL') +const { kEnumerableProperty } = require('../core/util') +const encoder = new TextEncoder() + +class File extends Blob { + constructor (fileBits, fileName, options = {}) { + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' }) + + fileBits = webidl.converters['sequence'](fileBits) + fileName = webidl.converters.USVString(fileName) + options = webidl.converters.FilePropertyBag(options) + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + // Note: Blob handles this for us + + // 2. Let n be the fileName argument to the constructor. + const n = fileName + + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: + + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // 2. Convert every character in t to ASCII lowercase. + let t = options.type + let d + + // eslint-disable-next-line no-labels + substep: { + if (t) { + t = parseMIMEType(t) + + if (t === 'failure') { + t = '' + // eslint-disable-next-line no-labels + break substep + } + + t = serializeAMimeType(t).toLowerCase() + } + + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + d = options.lastModified + } + + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. + + super(processBlobParts(fileBits, options), { type: t }) + this[kState] = { + name: n, + lastModified: d, + type: t + } + } + + get name () { + webidl.brandCheck(this, File) + + return this[kState].name + } + + get lastModified () { + webidl.brandCheck(this, File) + + return this[kState].lastModified + } + + get type () { + webidl.brandCheck(this, File) + + return this[kState].type + } +} + +class FileLike { + constructor (blobLike, fileName, options = {}) { + // TODO: argument idl type check + + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + + // 2. Let n be the fileName argument to the constructor. + const n = fileName + + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: + + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // TODO + const t = options.type + + // 2. Convert every character in t to ASCII lowercase. + // TODO + + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + const d = options.lastModified ?? Date.now() + + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. + + this[kState] = { + blobLike, + name: n, + type: t, + lastModified: d + } + } + + stream (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.stream(...args) + } + + arrayBuffer (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.arrayBuffer(...args) + } + + slice (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.slice(...args) + } + + text (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.text(...args) + } + + get size () { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.size + } + + get type () { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.type + } + + get name () { + webidl.brandCheck(this, FileLike) + + return this[kState].name + } + + get lastModified () { + webidl.brandCheck(this, FileLike) + + return this[kState].lastModified + } + + get [Symbol.toStringTag] () { + return 'File' + } +} + +Object.defineProperties(File.prototype, { + [Symbol.toStringTag]: { + value: 'File', + configurable: true + }, + name: kEnumerableProperty, + lastModified: kEnumerableProperty +}) + +webidl.converters.Blob = webidl.interfaceConverter(Blob) + +webidl.converters.BlobPart = function (V, opts) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } + + if ( + ArrayBuffer.isView(V) || + types.isAnyArrayBuffer(V) + ) { + return webidl.converters.BufferSource(V, opts) + } + } + + return webidl.converters.USVString(V, opts) +} + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.BlobPart +) + +// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag +webidl.converters.FilePropertyBag = webidl.dictionaryConverter([ + { + key: 'lastModified', + converter: webidl.converters['long long'], + get defaultValue () { + return Date.now() + } + }, + { + key: 'type', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'endings', + converter: (value) => { + value = webidl.converters.DOMString(value) + value = value.toLowerCase() + + if (value !== 'native') { + value = 'transparent' + } + + return value + }, + defaultValue: 'transparent' + } +]) + +/** + * @see https://www.w3.org/TR/FileAPI/#process-blob-parts + * @param {(NodeJS.TypedArray|Blob|string)[]} parts + * @param {{ type: string, endings: string }} options + */ +function processBlobParts (parts, options) { + // 1. Let bytes be an empty sequence of bytes. + /** @type {NodeJS.TypedArray[]} */ + const bytes = [] + + // 2. For each element in parts: + for (const element of parts) { + // 1. If element is a USVString, run the following substeps: + if (typeof element === 'string') { + // 1. Let s be element. + let s = element + + // 2. If the endings member of options is "native", set s + // to the result of converting line endings to native + // of element. + if (options.endings === 'native') { + s = convertLineEndingsNative(s) + } + + // 3. Append the result of UTF-8 encoding s to bytes. + bytes.push(encoder.encode(s)) + } else if ( + types.isAnyArrayBuffer(element) || + types.isTypedArray(element) + ) { + // 2. If element is a BufferSource, get a copy of the + // bytes held by the buffer source, and append those + // bytes to bytes. + if (!element.buffer) { // ArrayBuffer + bytes.push(new Uint8Array(element)) + } else { + bytes.push( + new Uint8Array(element.buffer, element.byteOffset, element.byteLength) + ) + } + } else if (isBlobLike(element)) { + // 3. If element is a Blob, append the bytes it represents + // to bytes. + bytes.push(element) + } + } + + // 3. Return bytes. + return bytes +} + +/** + * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native + * @param {string} s + */ +function convertLineEndingsNative (s) { + // 1. Let native line ending be be the code point U+000A LF. + let nativeLineEnding = '\n' + + // 2. If the underlying platform’s conventions are to + // represent newlines as a carriage return and line feed + // sequence, set native line ending to the code point + // U+000D CR followed by the code point U+000A LF. + if (process.platform === 'win32') { + nativeLineEnding = '\r\n' + } + + return s.replace(/\r?\n/g, nativeLineEnding) +} + +// If this function is moved to ./util.js, some tools (such as +// rollup) will warn about circular dependencies. See: +// https://github.com/nodejs/undici/issues/1629 +function isFileLike (object) { + return ( + (NativeFile && object instanceof NativeFile) || + object instanceof File || ( + object && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + object[Symbol.toStringTag] === 'File' + ) + ) +} + +module.exports = { File, FileLike, isFileLike } diff --git a/lib/fetch/formdata.js b/lib/fetch/formdata.js new file mode 100644 index 0000000..5975e26 --- /dev/null +++ b/lib/fetch/formdata.js @@ -0,0 +1,265 @@ +'use strict' + +const { isBlobLike, toUSVString, makeIterator } = require('./util') +const { kState } = require('./symbols') +const { File: UndiciFile, FileLike, isFileLike } = require('./file') +const { webidl } = require('./webidl') +const { Blob, File: NativeFile } = require('buffer') + +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile + +// https://xhr.spec.whatwg.org/#formdata +class FormData { + constructor (form) { + if (form !== undefined) { + throw webidl.errors.conversionFailed({ + prefix: 'FormData constructor', + argument: 'Argument 1', + types: ['undefined'] + }) + } + + this[kState] = [] + } + + append (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' }) + + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'" + ) + } + + // 1. Let value be value if given; otherwise blobValue. + + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? webidl.converters.USVString(filename) + : undefined + + // 2. Let entry be the result of creating an entry with + // name, value, and filename if given. + const entry = makeEntry(name, value, filename) + + // 3. Append entry to this’s entry list. + this[kState].push(entry) + } + + delete (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' }) + + name = webidl.converters.USVString(name) + + // The delete(name) method steps are to remove all entries whose name + // is name from this’s entry list. + this[kState] = this[kState].filter(entry => entry.name !== name) + } + + get (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' }) + + name = webidl.converters.USVString(name) + + // 1. If there is no entry whose name is name in this’s entry list, + // then return null. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx === -1) { + return null + } + + // 2. Return the value of the first entry whose name is name from + // this’s entry list. + return this[kState][idx].value + } + + getAll (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' }) + + name = webidl.converters.USVString(name) + + // 1. If there is no entry whose name is name in this’s entry list, + // then return the empty list. + // 2. Return the values of all entries whose name is name, in order, + // from this’s entry list. + return this[kState] + .filter((entry) => entry.name === name) + .map((entry) => entry.value) + } + + has (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' }) + + name = webidl.converters.USVString(name) + + // The has(name) method steps are to return true if there is an entry + // whose name is name in this’s entry list; otherwise false. + return this[kState].findIndex((entry) => entry.name === name) !== -1 + } + + set (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' }) + + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'" + ) + } + + // The set(name, value) and set(name, blobValue, filename) method steps + // are: + + // 1. Let value be value if given; otherwise blobValue. + + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? toUSVString(filename) + : undefined + + // 2. Let entry be the result of creating an entry with name, value, and + // filename if given. + const entry = makeEntry(name, value, filename) + + // 3. If there are entries in this’s entry list whose name is name, then + // replace the first such entry with entry and remove the others. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx !== -1) { + this[kState] = [ + ...this[kState].slice(0, idx), + entry, + ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name) + ] + } else { + // 4. Otherwise, append entry to this’s entry list. + this[kState].push(entry) + } + } + + entries () { + webidl.brandCheck(this, FormData) + + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key+value' + ) + } + + keys () { + webidl.brandCheck(this, FormData) + + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key' + ) + } + + values () { + webidl.brandCheck(this, FormData) + + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'value' + ) + } + + /** + * @param {(value: string, key: string, self: FormData) => void} callbackFn + * @param {unknown} thisArg + */ + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' }) + + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'." + ) + } + + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) + } + } +} + +FormData.prototype[Symbol.iterator] = FormData.prototype.entries + +Object.defineProperties(FormData.prototype, { + [Symbol.toStringTag]: { + value: 'FormData', + configurable: true + } +}) + +/** + * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry + * @param {string} name + * @param {string|Blob} value + * @param {?string} filename + * @returns + */ +function makeEntry (name, value, filename) { + // 1. Set name to the result of converting name into a scalar value string. + // "To convert a string into a scalar value string, replace any surrogates + // with U+FFFD." + // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end + name = Buffer.from(name).toString('utf8') + + // 2. If value is a string, then set value to the result of converting + // value into a scalar value string. + if (typeof value === 'string') { + value = Buffer.from(value).toString('utf8') + } else { + // 3. Otherwise: + + // 1. If value is not a File object, then set value to a new File object, + // representing the same bytes, whose name attribute value is "blob" + if (!isFileLike(value)) { + value = value instanceof Blob + ? new File([value], 'blob', { type: value.type }) + : new FileLike(value, 'blob', { type: value.type }) + } + + // 2. If filename is given, then set value to a new File object, + // representing the same bytes, whose name attribute is filename. + if (filename !== undefined) { + /** @type {FilePropertyBag} */ + const options = { + type: value.type, + lastModified: value.lastModified + } + + value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile + ? new File([value], filename, options) + : new FileLike(value, filename, options) + } + } + + // 4. Return an entry whose name is name and whose value is value. + return { name, value } +} + +module.exports = { FormData } diff --git a/lib/fetch/global.js b/lib/fetch/global.js new file mode 100644 index 0000000..1df6f12 --- /dev/null +++ b/lib/fetch/global.js @@ -0,0 +1,40 @@ +'use strict' + +// In case of breaking changes, increase the version +// number to avoid conflicts. +const globalOrigin = Symbol.for('undici.globalOrigin.1') + +function getGlobalOrigin () { + return globalThis[globalOrigin] +} + +function setGlobalOrigin (newOrigin) { + if (newOrigin === undefined) { + Object.defineProperty(globalThis, globalOrigin, { + value: undefined, + writable: true, + enumerable: false, + configurable: false + }) + + return + } + + const parsedURL = new URL(newOrigin) + + if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') { + throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`) + } + + Object.defineProperty(globalThis, globalOrigin, { + value: parsedURL, + writable: true, + enumerable: false, + configurable: false + }) +} + +module.exports = { + getGlobalOrigin, + setGlobalOrigin +} diff --git a/lib/fetch/headers.js b/lib/fetch/headers.js new file mode 100644 index 0000000..2f1c0be --- /dev/null +++ b/lib/fetch/headers.js @@ -0,0 +1,589 @@ +// https://github.com/Ethan-Arrowood/undici-fetch + +'use strict' + +const { kHeadersList, kConstruct } = require('../core/symbols') +const { kGuard } = require('./symbols') +const { kEnumerableProperty } = require('../core/util') +const { + makeIterator, + isValidHeaderName, + isValidHeaderValue +} = require('./util') +const { webidl } = require('./webidl') +const assert = require('assert') + +const kHeadersMap = Symbol('headers map') +const kHeadersSortedMap = Symbol('headers map sorted') + +/** + * @param {number} code + */ +function isHTTPWhiteSpaceCharCode (code) { + return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020 +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize + * @param {string} potentialValue + */ +function headerValueNormalize (potentialValue) { + // To normalize a byte sequence potentialValue, remove + // any leading and trailing HTTP whitespace bytes from + // potentialValue. + let i = 0; let j = potentialValue.length + + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i + + return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j) +} + +function fill (headers, object) { + // To fill a Headers object headers with a given object object, run these steps: + + // 1. If object is a sequence, then for each header in object: + // Note: webidl conversion to array has already been done. + if (Array.isArray(object)) { + for (let i = 0; i < object.length; ++i) { + const header = object[i] + // 1. If header does not contain exactly two items, then throw a TypeError. + if (header.length !== 2) { + throw webidl.errors.exception({ + header: 'Headers constructor', + message: `expected name/value pair to be length 2, found ${header.length}.` + }) + } + + // 2. Append (header’s first item, header’s second item) to headers. + appendHeader(headers, header[0], header[1]) + } + } else if (typeof object === 'object' && object !== null) { + // Note: null should throw + + // 2. Otherwise, object is a record, then for each key → value in object, + // append (key, value) to headers + const keys = Object.keys(object) + for (let i = 0; i < keys.length; ++i) { + appendHeader(headers, keys[i], object[keys[i]]) + } + } else { + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) + } +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-headers-append + */ +function appendHeader (headers, name, value) { + // 1. Normalize value. + value = headerValueNormalize(value) + + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value, + type: 'header value' + }) + } + + // 3. If headers’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if headers’s guard is "request" and name is a + // forbidden header name, return. + // Note: undici does not implement forbidden header names + if (headers[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (headers[kGuard] === 'request-no-cors') { + // 5. Otherwise, if headers’s guard is "request-no-cors": + // TODO + } + + // 6. Otherwise, if headers’s guard is "response" and name is a + // forbidden response-header name, return. + + // 7. Append (name, value) to headers’s header list. + return headers[kHeadersList].append(name, value) + + // 8. If headers’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from headers +} + +class HeadersList { + /** @type {[string, string][]|null} */ + cookies = null + + constructor (init) { + if (init instanceof HeadersList) { + this[kHeadersMap] = new Map(init[kHeadersMap]) + this[kHeadersSortedMap] = init[kHeadersSortedMap] + this.cookies = init.cookies === null ? null : [...init.cookies] + } else { + this[kHeadersMap] = new Map(init) + this[kHeadersSortedMap] = null + } + } + + // https://fetch.spec.whatwg.org/#header-list-contains + contains (name) { + // A header list list contains a header name name if list + // contains a header whose name is a byte-case-insensitive + // match for name. + name = name.toLowerCase() + + return this[kHeadersMap].has(name) + } + + clear () { + this[kHeadersMap].clear() + this[kHeadersSortedMap] = null + this.cookies = null + } + + // https://fetch.spec.whatwg.org/#concept-header-list-append + append (name, value) { + this[kHeadersSortedMap] = null + + // 1. If list contains name, then set name to the first such + // header’s name. + const lowercaseName = name.toLowerCase() + const exists = this[kHeadersMap].get(lowercaseName) + + // 2. Append (name, value) to list. + if (exists) { + const delimiter = lowercaseName === 'cookie' ? '; ' : ', ' + this[kHeadersMap].set(lowercaseName, { + name: exists.name, + value: `${exists.value}${delimiter}${value}` + }) + } else { + this[kHeadersMap].set(lowercaseName, { name, value }) + } + + if (lowercaseName === 'set-cookie') { + this.cookies ??= [] + this.cookies.push(value) + } + } + + // https://fetch.spec.whatwg.org/#concept-header-list-set + set (name, value) { + this[kHeadersSortedMap] = null + const lowercaseName = name.toLowerCase() + + if (lowercaseName === 'set-cookie') { + this.cookies = [value] + } + + // 1. If list contains name, then set the value of + // the first such header to value and remove the + // others. + // 2. Otherwise, append header (name, value) to list. + this[kHeadersMap].set(lowercaseName, { name, value }) + } + + // https://fetch.spec.whatwg.org/#concept-header-list-delete + delete (name) { + this[kHeadersSortedMap] = null + + name = name.toLowerCase() + + if (name === 'set-cookie') { + this.cookies = null + } + + this[kHeadersMap].delete(name) + } + + // https://fetch.spec.whatwg.org/#concept-header-list-get + get (name) { + const value = this[kHeadersMap].get(name.toLowerCase()) + + // 1. If list does not contain name, then return null. + // 2. Return the values of all headers in list whose name + // is a byte-case-insensitive match for name, + // separated from each other by 0x2C 0x20, in order. + return value === undefined ? null : value.value + } + + * [Symbol.iterator] () { + // use the lowercased name + for (const [name, { value }] of this[kHeadersMap]) { + yield [name, value] + } + } + + get entries () { + const headers = {} + + if (this[kHeadersMap].size) { + for (const { name, value } of this[kHeadersMap].values()) { + headers[name] = value + } + } + + return headers + } +} + +// https://fetch.spec.whatwg.org/#headers-class +class Headers { + constructor (init = undefined) { + if (init === kConstruct) { + return + } + this[kHeadersList] = new HeadersList() + + // The new Headers(init) constructor steps are: + + // 1. Set this’s guard to "none". + this[kGuard] = 'none' + + // 2. If init is given, then fill this with init. + if (init !== undefined) { + init = webidl.converters.HeadersInit(init) + fill(this, init) + } + } + + // https://fetch.spec.whatwg.org/#dom-headers-append + append (name, value) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' }) + + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) + + return appendHeader(this, name, value) + } + + // https://fetch.spec.whatwg.org/#dom-headers-delete + delete (name) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' }) + + name = webidl.converters.ByteString(name) + + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.delete', + value: name, + type: 'header name' + }) + } + + // 2. If this’s guard is "immutable", then throw a TypeError. + // 3. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 4. Otherwise, if this’s guard is "request-no-cors", name + // is not a no-CORS-safelisted request-header name, and + // name is not a privileged no-CORS request-header name, + // return. + // 5. Otherwise, if this’s guard is "response" and name is + // a forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO + } + + // 6. If this’s header list does not contain name, then + // return. + if (!this[kHeadersList].contains(name)) { + return + } + + // 7. Delete name from this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this. + this[kHeadersList].delete(name) + } + + // https://fetch.spec.whatwg.org/#dom-headers-get + get (name) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' }) + + name = webidl.converters.ByteString(name) + + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.get', + value: name, + type: 'header name' + }) + } + + // 2. Return the result of getting name from this’s header + // list. + return this[kHeadersList].get(name) + } + + // https://fetch.spec.whatwg.org/#dom-headers-has + has (name) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' }) + + name = webidl.converters.ByteString(name) + + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.has', + value: name, + type: 'header name' + }) + } + + // 2. Return true if this’s header list contains name; + // otherwise false. + return this[kHeadersList].contains(name) + } + + // https://fetch.spec.whatwg.org/#dom-headers-set + set (name, value) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' }) + + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) + + // 1. Normalize value. + value = headerValueNormalize(value) + + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value, + type: 'header value' + }) + } + + // 3. If this’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 5. Otherwise, if this’s guard is "request-no-cors" and + // name/value is not a no-CORS-safelisted request-header, + // return. + // 6. Otherwise, if this’s guard is "response" and name is a + // forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO + } + + // 7. Set (name, value) in this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this + this[kHeadersList].set(name, value) + } + + // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie + getSetCookie () { + webidl.brandCheck(this, Headers) + + // 1. If this’s header list does not contain `Set-Cookie`, then return « ». + // 2. Return the values of all headers in this’s header list whose name is + // a byte-case-insensitive match for `Set-Cookie`, in order. + + const list = this[kHeadersList].cookies + + if (list) { + return [...list] + } + + return [] + } + + // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine + get [kHeadersSortedMap] () { + if (this[kHeadersList][kHeadersSortedMap]) { + return this[kHeadersList][kHeadersSortedMap] + } + + // 1. Let headers be an empty list of headers with the key being the name + // and value the value. + const headers = [] + + // 2. Let names be the result of convert header names to a sorted-lowercase + // set with all the names of the headers in list. + const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1) + const cookies = this[kHeadersList].cookies + + // 3. For each name of names: + for (let i = 0; i < names.length; ++i) { + const [name, value] = names[i] + // 1. If name is `set-cookie`, then: + if (name === 'set-cookie') { + // 1. Let values be a list of all values of headers in list whose name + // is a byte-case-insensitive match for name, in order. + + // 2. For each value of values: + // 1. Append (name, value) to headers. + for (let j = 0; j < cookies.length; ++j) { + headers.push([name, cookies[j]]) + } + } else { + // 2. Otherwise: + + // 1. Let value be the result of getting name from list. + + // 2. Assert: value is non-null. + assert(value !== null) + + // 3. Append (name, value) to headers. + headers.push([name, value]) + } + } + + this[kHeadersList][kHeadersSortedMap] = headers + + // 4. Return headers. + return headers + } + + keys () { + webidl.brandCheck(this, Headers) + + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key') + } + + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key' + ) + } + + values () { + webidl.brandCheck(this, Headers) + + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'value') + } + + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'value' + ) + } + + entries () { + webidl.brandCheck(this, Headers) + + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key+value') + } + + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key+value' + ) + } + + /** + * @param {(value: string, key: string, self: Headers) => void} callbackFn + * @param {unknown} thisArg + */ + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' }) + + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'." + ) + } + + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) + } + } + + [Symbol.for('nodejs.util.inspect.custom')] () { + webidl.brandCheck(this, Headers) + + return this[kHeadersList] + } +} + +Headers.prototype[Symbol.iterator] = Headers.prototype.entries + +Object.defineProperties(Headers.prototype, { + append: kEnumerableProperty, + delete: kEnumerableProperty, + get: kEnumerableProperty, + has: kEnumerableProperty, + set: kEnumerableProperty, + getSetCookie: kEnumerableProperty, + keys: kEnumerableProperty, + values: kEnumerableProperty, + entries: kEnumerableProperty, + forEach: kEnumerableProperty, + [Symbol.iterator]: { enumerable: false }, + [Symbol.toStringTag]: { + value: 'Headers', + configurable: true + } +}) + +webidl.converters.HeadersInit = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (V[Symbol.iterator]) { + return webidl.converters['sequence>'](V) + } + + return webidl.converters['record'](V) + } + + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) +} + +module.exports = { + fill, + Headers, + HeadersList +} diff --git a/lib/fetch/index.js b/lib/fetch/index.js new file mode 100644 index 0000000..17c3d87 --- /dev/null +++ b/lib/fetch/index.js @@ -0,0 +1,2145 @@ +// https://github.com/Ethan-Arrowood/undici-fetch + +'use strict' + +const { + Response, + makeNetworkError, + makeAppropriateNetworkError, + filterResponse, + makeResponse +} = require('./response') +const { Headers } = require('./headers') +const { Request, makeRequest } = require('./request') +const zlib = require('zlib') +const { + bytesMatch, + makePolicyContainer, + clonePolicyContainer, + requestBadPort, + TAOCheck, + appendRequestOriginHeader, + responseLocationURL, + requestCurrentURL, + setRequestReferrerPolicyOnRedirect, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + createOpaqueTimingInfo, + appendFetchMetadata, + corsCheck, + crossOriginResourcePolicyCheck, + determineRequestsReferrer, + coarsenedSharedCurrentTime, + createDeferredPromise, + isBlobLike, + sameOrigin, + isCancelled, + isAborted, + isErrorLike, + fullyReadBody, + readableStreamClose, + isomorphicEncode, + urlIsLocal, + urlIsHttpHttpsScheme, + urlHasHttpsScheme +} = require('./util') +const { kState, kHeaders, kGuard, kRealm } = require('./symbols') +const assert = require('assert') +const { safelyExtractBody } = require('./body') +const { + redirectStatusSet, + nullBodyStatus, + safeMethodsSet, + requestBodyHeader, + subresourceSet, + DOMException +} = require('./constants') +const { kHeadersList } = require('../core/symbols') +const EE = require('events') +const { Readable, pipeline } = require('stream') +const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = require('../core/util') +const { dataURLProcessor, serializeAMimeType } = require('./dataURL') +const { TransformStream } = require('stream/web') +const { getGlobalDispatcher } = require('../global') +const { webidl } = require('./webidl') +const { STATUS_CODES } = require('http') +const GET_OR_HEAD = ['GET', 'HEAD'] + +/** @type {import('buffer').resolveObjectURL} */ +let resolveObjectURL +let ReadableStream = globalThis.ReadableStream + +class Fetch extends EE { + constructor (dispatcher) { + super() + + this.dispatcher = dispatcher + this.connection = null + this.dump = false + this.state = 'ongoing' + // 2 terminated listeners get added per request, + // but only 1 gets removed. If there are 20 redirects, + // 21 listeners will be added. + // See https://github.com/nodejs/undici/issues/1711 + // TODO (fix): Find and fix root cause for leaked listener. + this.setMaxListeners(21) + } + + terminate (reason) { + if (this.state !== 'ongoing') { + return + } + + this.state = 'terminated' + this.connection?.destroy(reason) + this.emit('terminated', reason) + } + + // https://fetch.spec.whatwg.org/#fetch-controller-abort + abort (error) { + if (this.state !== 'ongoing') { + return + } + + // 1. Set controller’s state to "aborted". + this.state = 'aborted' + + // 2. Let fallbackError be an "AbortError" DOMException. + // 3. Set error to fallbackError if it is not given. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } + + // 4. Let serializedError be StructuredSerialize(error). + // If that threw an exception, catch it, and let + // serializedError be StructuredSerialize(fallbackError). + + // 5. Set controller’s serialized abort reason to serializedError. + this.serializedAbortReason = error + + this.connection?.destroy(error) + this.emit('terminated', error) + } +} + +// https://fetch.spec.whatwg.org/#fetch-method +function fetch (input, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' }) + + // 1. Let p be a new promise. + const p = createDeferredPromise() + + // 2. Let requestObject be the result of invoking the initial value of + // Request as constructor with input and init as arguments. If this throws + // an exception, reject p with it and return p. + let requestObject + + try { + requestObject = new Request(input, init) + } catch (e) { + p.reject(e) + return p.promise + } + + // 3. Let request be requestObject’s request. + const request = requestObject[kState] + + // 4. If requestObject’s signal’s aborted flag is set, then: + if (requestObject.signal.aborted) { + // 1. Abort the fetch() call with p, request, null, and + // requestObject’s signal’s abort reason. + abortFetch(p, request, null, requestObject.signal.reason) + + // 2. Return p. + return p.promise + } + + // 5. Let globalObject be request’s client’s global object. + const globalObject = request.client.globalObject + + // 6. If globalObject is a ServiceWorkerGlobalScope object, then set + // request’s service-workers mode to "none". + if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') { + request.serviceWorkers = 'none' + } + + // 7. Let responseObject be null. + let responseObject = null + + // 8. Let relevantRealm be this’s relevant Realm. + const relevantRealm = null + + // 9. Let locallyAborted be false. + let locallyAborted = false + + // 10. Let controller be null. + let controller = null + + // 11. Add the following abort steps to requestObject’s signal: + addAbortListener( + requestObject.signal, + () => { + // 1. Set locallyAborted to true. + locallyAborted = true + + // 2. Assert: controller is non-null. + assert(controller != null) + + // 3. Abort controller with requestObject’s signal’s abort reason. + controller.abort(requestObject.signal.reason) + + // 4. Abort the fetch() call with p, request, responseObject, + // and requestObject’s signal’s abort reason. + abortFetch(p, request, responseObject, requestObject.signal.reason) + } + ) + + // 12. Let handleFetchDone given response response be to finalize and + // report timing with response, globalObject, and "fetch". + const handleFetchDone = (response) => + finalizeAndReportTiming(response, 'fetch') + + // 13. Set controller to the result of calling fetch given request, + // with processResponseEndOfBody set to handleFetchDone, and processResponse + // given response being these substeps: + + const processResponse = (response) => { + // 1. If locallyAborted is true, terminate these substeps. + if (locallyAborted) { + return Promise.resolve() + } + + // 2. If response’s aborted flag is set, then: + if (response.aborted) { + // 1. Let deserializedError be the result of deserialize a serialized + // abort reason given controller’s serialized abort reason and + // relevantRealm. + + // 2. Abort the fetch() call with p, request, responseObject, and + // deserializedError. + + abortFetch(p, request, responseObject, controller.serializedAbortReason) + return Promise.resolve() + } + + // 3. If response is a network error, then reject p with a TypeError + // and terminate these substeps. + if (response.type === 'error') { + p.reject( + Object.assign(new TypeError('fetch failed'), { cause: response.error }) + ) + return Promise.resolve() + } + + // 4. Set responseObject to the result of creating a Response object, + // given response, "immutable", and relevantRealm. + responseObject = new Response() + responseObject[kState] = response + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + + // 5. Resolve p with responseObject. + p.resolve(responseObject) + } + + controller = fetching({ + request, + processResponseEndOfBody: handleFetchDone, + processResponse, + dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici + }) + + // 14. Return p. + return p.promise +} + +// https://fetch.spec.whatwg.org/#finalize-and-report-timing +function finalizeAndReportTiming (response, initiatorType = 'other') { + // 1. If response is an aborted network error, then return. + if (response.type === 'error' && response.aborted) { + return + } + + // 2. If response’s URL list is null or empty, then return. + if (!response.urlList?.length) { + return + } + + // 3. Let originalURL be response’s URL list[0]. + const originalURL = response.urlList[0] + + // 4. Let timingInfo be response’s timing info. + let timingInfo = response.timingInfo + + // 5. Let cacheState be response’s cache state. + let cacheState = response.cacheState + + // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return. + if (!urlIsHttpHttpsScheme(originalURL)) { + return + } + + // 7. If timingInfo is null, then return. + if (timingInfo === null) { + return + } + + // 8. If response’s timing allow passed flag is not set, then: + if (!response.timingAllowPassed) { + // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. + timingInfo = createOpaqueTimingInfo({ + startTime: timingInfo.startTime + }) + + // 2. Set cacheState to the empty string. + cacheState = '' + } + + // 9. Set timingInfo’s end time to the coarsened shared current time + // given global’s relevant settings object’s cross-origin isolated + // capability. + // TODO: given global’s relevant settings object’s cross-origin isolated + // capability? + timingInfo.endTime = coarsenedSharedCurrentTime() + + // 10. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo + + // 11. Mark resource timing for timingInfo, originalURL, initiatorType, + // global, and cacheState. + markResourceTiming( + timingInfo, + originalURL, + initiatorType, + globalThis, + cacheState + ) +} + +// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing +function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) { + if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) { + performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState) + } +} + +// https://fetch.spec.whatwg.org/#abort-fetch +function abortFetch (p, request, responseObject, error) { + // Note: AbortSignal.reason was added in node v17.2.0 + // which would give us an undefined error to reject with. + // Remove this once node v16 is no longer supported. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } + + // 1. Reject promise with error. + p.reject(error) + + // 2. If request’s body is not null and is readable, then cancel request’s + // body with error. + if (request.body != null && isReadable(request.body?.stream)) { + request.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) + } + + // 3. If responseObject is null, then return. + if (responseObject == null) { + return + } + + // 4. Let response be responseObject’s response. + const response = responseObject[kState] + + // 5. If response’s body is not null and is readable, then error response’s + // body with error. + if (response.body != null && isReadable(response.body?.stream)) { + response.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) + } +} + +// https://fetch.spec.whatwg.org/#fetching +function fetching ({ + request, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseEndOfBody, + processResponseConsumeBody, + useParallelQueue = false, + dispatcher // undici +}) { + // 1. Let taskDestination be null. + let taskDestination = null + + // 2. Let crossOriginIsolatedCapability be false. + let crossOriginIsolatedCapability = false + + // 3. If request’s client is non-null, then: + if (request.client != null) { + // 1. Set taskDestination to request’s client’s global object. + taskDestination = request.client.globalObject + + // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin + // isolated capability. + crossOriginIsolatedCapability = + request.client.crossOriginIsolatedCapability + } + + // 4. If useParallelQueue is true, then set taskDestination to the result of + // starting a new parallel queue. + // TODO + + // 5. Let timingInfo be a new fetch timing info whose start time and + // post-redirect start time are the coarsened shared current time given + // crossOriginIsolatedCapability. + const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability) + const timingInfo = createOpaqueTimingInfo({ + startTime: currenTime + }) + + // 6. Let fetchParams be a new fetch params whose + // request is request, + // timing info is timingInfo, + // process request body chunk length is processRequestBodyChunkLength, + // process request end-of-body is processRequestEndOfBody, + // process response is processResponse, + // process response consume body is processResponseConsumeBody, + // process response end-of-body is processResponseEndOfBody, + // task destination is taskDestination, + // and cross-origin isolated capability is crossOriginIsolatedCapability. + const fetchParams = { + controller: new Fetch(dispatcher), + request, + timingInfo, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseConsumeBody, + processResponseEndOfBody, + taskDestination, + crossOriginIsolatedCapability + } + + // 7. If request’s body is a byte sequence, then set request’s body to + // request’s body as a body. + // NOTE: Since fetching is only called from fetch, body should already be + // extracted. + assert(!request.body || request.body.stream) + + // 8. If request’s window is "client", then set request’s window to request’s + // client, if request’s client’s global object is a Window object; otherwise + // "no-window". + if (request.window === 'client') { + // TODO: What if request.client is null? + request.window = + request.client?.globalObject?.constructor?.name === 'Window' + ? request.client + : 'no-window' + } + + // 9. If request’s origin is "client", then set request’s origin to request’s + // client’s origin. + if (request.origin === 'client') { + // TODO: What if request.client is null? + request.origin = request.client?.origin + } + + // 10. If all of the following conditions are true: + // TODO + + // 11. If request’s policy container is "client", then: + if (request.policyContainer === 'client') { + // 1. If request’s client is non-null, then set request’s policy + // container to a clone of request’s client’s policy container. [HTML] + if (request.client != null) { + request.policyContainer = clonePolicyContainer( + request.client.policyContainer + ) + } else { + // 2. Otherwise, set request’s policy container to a new policy + // container. + request.policyContainer = makePolicyContainer() + } + } + + // 12. If request’s header list does not contain `Accept`, then: + if (!request.headersList.contains('accept')) { + // 1. Let value be `*/*`. + const value = '*/*' + + // 2. A user agent should set value to the first matching statement, if + // any, switching on request’s destination: + // "document" + // "frame" + // "iframe" + // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8` + // "image" + // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5` + // "style" + // `text/css,*/*;q=0.1` + // TODO + + // 3. Append `Accept`/value to request’s header list. + request.headersList.append('accept', value) + } + + // 13. If request’s header list does not contain `Accept-Language`, then + // user agents should append `Accept-Language`/an appropriate value to + // request’s header list. + if (!request.headersList.contains('accept-language')) { + request.headersList.append('accept-language', '*') + } + + // 14. If request’s priority is null, then use request’s initiator and + // destination appropriately in setting request’s priority to a + // user-agent-defined object. + if (request.priority === null) { + // TODO + } + + // 15. If request is a subresource request, then: + if (subresourceSet.has(request.destination)) { + // TODO + } + + // 16. Run main fetch given fetchParams. + mainFetch(fetchParams) + .catch(err => { + fetchParams.controller.terminate(err) + }) + + // 17. Return fetchParam's controller + return fetchParams.controller +} + +// https://fetch.spec.whatwg.org/#concept-main-fetch +async function mainFetch (fetchParams, recursive = false) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let response be null. + let response = null + + // 3. If request’s local-URLs-only flag is set and request’s current URL is + // not local, then set response to a network error. + if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) { + response = makeNetworkError('local URLs only') + } + + // 4. Run report Content Security Policy violations for request. + // TODO + + // 5. Upgrade request to a potentially trustworthy URL, if appropriate. + tryUpgradeRequestToAPotentiallyTrustworthyURL(request) + + // 6. If should request be blocked due to a bad port, should fetching request + // be blocked as mixed content, or should request be blocked by Content + // Security Policy returns blocked, then set response to a network error. + if (requestBadPort(request) === 'blocked') { + response = makeNetworkError('bad port') + } + // TODO: should fetching request be blocked as mixed content? + // TODO: should request be blocked by Content Security Policy? + + // 7. If request’s referrer policy is the empty string, then set request’s + // referrer policy to request’s policy container’s referrer policy. + if (request.referrerPolicy === '') { + request.referrerPolicy = request.policyContainer.referrerPolicy + } + + // 8. If request’s referrer is not "no-referrer", then set request’s + // referrer to the result of invoking determine request’s referrer. + if (request.referrer !== 'no-referrer') { + request.referrer = determineRequestsReferrer(request) + } + + // 9. Set request’s current URL’s scheme to "https" if all of the following + // conditions are true: + // - request’s current URL’s scheme is "http" + // - request’s current URL’s host is a domain + // - Matching request’s current URL’s host per Known HSTS Host Domain Name + // Matching results in either a superdomain match with an asserted + // includeSubDomains directive or a congruent match (with or without an + // asserted includeSubDomains directive). [HSTS] + // TODO + + // 10. If recursive is false, then run the remaining steps in parallel. + // TODO + + // 11. If response is null, then set response to the result of running + // the steps corresponding to the first matching statement: + if (response === null) { + response = await (async () => { + const currentURL = requestCurrentURL(request) + + if ( + // - request’s current URL’s origin is same origin with request’s origin, + // and request’s response tainting is "basic" + (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') || + // request’s current URL’s scheme is "data" + (currentURL.protocol === 'data:') || + // - request’s mode is "navigate" or "websocket" + (request.mode === 'navigate' || request.mode === 'websocket') + ) { + // 1. Set request’s response tainting to "basic". + request.responseTainting = 'basic' + + // 2. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) + } + + // request’s mode is "same-origin" + if (request.mode === 'same-origin') { + // 1. Return a network error. + return makeNetworkError('request mode cannot be "same-origin"') + } + + // request’s mode is "no-cors" + if (request.mode === 'no-cors') { + // 1. If request’s redirect mode is not "follow", then return a network + // error. + if (request.redirect !== 'follow') { + return makeNetworkError( + 'redirect mode cannot be "follow" for "no-cors" request' + ) + } + + // 2. Set request’s response tainting to "opaque". + request.responseTainting = 'opaque' + + // 3. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) + } + + // request’s current URL’s scheme is not an HTTP(S) scheme + if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) { + // Return a network error. + return makeNetworkError('URL scheme must be a HTTP(S) scheme') + } + + // - request’s use-CORS-preflight flag is set + // - request’s unsafe-request flag is set and either request’s method is + // not a CORS-safelisted method or CORS-unsafe request-header names with + // request’s header list is not empty + // 1. Set request’s response tainting to "cors". + // 2. Let corsWithPreflightResponse be the result of running HTTP fetch + // given fetchParams and true. + // 3. If corsWithPreflightResponse is a network error, then clear cache + // entries using request. + // 4. Return corsWithPreflightResponse. + // TODO + + // Otherwise + // 1. Set request’s response tainting to "cors". + request.responseTainting = 'cors' + + // 2. Return the result of running HTTP fetch given fetchParams. + return await httpFetch(fetchParams) + })() + } + + // 12. If recursive is true, then return response. + if (recursive) { + return response + } + + // 13. If response is not a network error and response is not a filtered + // response, then: + if (response.status !== 0 && !response.internalResponse) { + // If request’s response tainting is "cors", then: + if (request.responseTainting === 'cors') { + // 1. Let headerNames be the result of extracting header list values + // given `Access-Control-Expose-Headers` and response’s header list. + // TODO + // 2. If request’s credentials mode is not "include" and headerNames + // contains `*`, then set response’s CORS-exposed header-name list to + // all unique header names in response’s header list. + // TODO + // 3. Otherwise, if headerNames is not null or failure, then set + // response’s CORS-exposed header-name list to headerNames. + // TODO + } + + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (request.responseTainting === 'basic') { + response = filterResponse(response, 'basic') + } else if (request.responseTainting === 'cors') { + response = filterResponse(response, 'cors') + } else if (request.responseTainting === 'opaque') { + response = filterResponse(response, 'opaque') + } else { + assert(false) + } + } + + // 14. Let internalResponse be response, if response is a network error, + // and response’s internal response otherwise. + let internalResponse = + response.status === 0 ? response : response.internalResponse + + // 15. If internalResponse’s URL list is empty, then set it to a clone of + // request’s URL list. + if (internalResponse.urlList.length === 0) { + internalResponse.urlList.push(...request.urlList) + } + + // 16. If request’s timing allow failed flag is unset, then set + // internalResponse’s timing allow passed flag. + if (!request.timingAllowFailed) { + response.timingAllowPassed = true + } + + // 17. If response is not a network error and any of the following returns + // blocked + // - should internalResponse to request be blocked as mixed content + // - should internalResponse to request be blocked by Content Security Policy + // - should internalResponse to request be blocked due to its MIME type + // - should internalResponse to request be blocked due to nosniff + // TODO + + // 18. If response’s type is "opaque", internalResponse’s status is 206, + // internalResponse’s range-requested flag is set, and request’s header + // list does not contain `Range`, then set response and internalResponse + // to a network error. + if ( + response.type === 'opaque' && + internalResponse.status === 206 && + internalResponse.rangeRequested && + !request.headers.contains('range') + ) { + response = internalResponse = makeNetworkError() + } + + // 19. If response is not a network error and either request’s method is + // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status, + // set internalResponse’s body to null and disregard any enqueuing toward + // it (if any). + if ( + response.status !== 0 && + (request.method === 'HEAD' || + request.method === 'CONNECT' || + nullBodyStatus.includes(internalResponse.status)) + ) { + internalResponse.body = null + fetchParams.controller.dump = true + } + + // 20. If request’s integrity metadata is not the empty string, then: + if (request.integrity) { + // 1. Let processBodyError be this step: run fetch finale given fetchParams + // and a network error. + const processBodyError = (reason) => + fetchFinale(fetchParams, makeNetworkError(reason)) + + // 2. If request’s response tainting is "opaque", or response’s body is null, + // then run processBodyError and abort these steps. + if (request.responseTainting === 'opaque' || response.body == null) { + processBodyError(response.error) + return + } + + // 3. Let processBody given bytes be these steps: + const processBody = (bytes) => { + // 1. If bytes do not match request’s integrity metadata, + // then run processBodyError and abort these steps. [SRI] + if (!bytesMatch(bytes, request.integrity)) { + processBodyError('integrity mismatch') + return + } + + // 2. Set response’s body to bytes as a body. + response.body = safelyExtractBody(bytes)[0] + + // 3. Run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) + } + + // 4. Fully read response’s body given processBody and processBodyError. + await fullyReadBody(response.body, processBody, processBodyError) + } else { + // 21. Otherwise, run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) + } +} + +// https://fetch.spec.whatwg.org/#concept-scheme-fetch +// given a fetch params fetchParams +function schemeFetch (fetchParams) { + // Note: since the connection is destroyed on redirect, which sets fetchParams to a + // cancelled state, we do not want this condition to trigger *unless* there have been + // no redirects. See https://github.com/nodejs/undici/issues/1776 + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { + return Promise.resolve(makeAppropriateNetworkError(fetchParams)) + } + + // 2. Let request be fetchParams’s request. + const { request } = fetchParams + + const { protocol: scheme } = requestCurrentURL(request) + + // 3. Switch on request’s current URL’s scheme and run the associated steps: + switch (scheme) { + case 'about:': { + // If request’s current URL’s path is the string "blank", then return a new response + // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) », + // and body is the empty byte sequence as a body. + + // Otherwise, return a network error. + return Promise.resolve(makeNetworkError('about scheme is not supported')) + } + case 'blob:': { + if (!resolveObjectURL) { + resolveObjectURL = require('buffer').resolveObjectURL + } + + // 1. Let blobURLEntry be request’s current URL’s blob URL entry. + const blobURLEntry = requestCurrentURL(request) + + // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56 + // Buffer.resolveObjectURL does not ignore URL queries. + if (blobURLEntry.search.length !== 0) { + return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.')) + } + + const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString()) + + // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s + // object is not a Blob object, then return a network error. + if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) { + return Promise.resolve(makeNetworkError('invalid method')) + } + + // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object. + const bodyWithType = safelyExtractBody(blobURLEntryObject) + + // 4. Let body be bodyWithType’s body. + const body = bodyWithType[0] + + // 5. Let length be body’s length, serialized and isomorphic encoded. + const length = isomorphicEncode(`${body.length}`) + + // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence. + const type = bodyWithType[1] ?? '' + + // 7. Return a new response whose status message is `OK`, header list is + // « (`Content-Length`, length), (`Content-Type`, type) », and body is body. + const response = makeResponse({ + statusText: 'OK', + headersList: [ + ['content-length', { name: 'Content-Length', value: length }], + ['content-type', { name: 'Content-Type', value: type }] + ] + }) + + response.body = body + + return Promise.resolve(response) + } + case 'data:': { + // 1. Let dataURLStruct be the result of running the + // data: URL processor on request’s current URL. + const currentURL = requestCurrentURL(request) + const dataURLStruct = dataURLProcessor(currentURL) + + // 2. If dataURLStruct is failure, then return a + // network error. + if (dataURLStruct === 'failure') { + return Promise.resolve(makeNetworkError('failed to fetch the data URL')) + } + + // 3. Let mimeType be dataURLStruct’s MIME type, serialized. + const mimeType = serializeAMimeType(dataURLStruct.mimeType) + + // 4. Return a response whose status message is `OK`, + // header list is « (`Content-Type`, mimeType) », + // and body is dataURLStruct’s body as a body. + return Promise.resolve(makeResponse({ + statusText: 'OK', + headersList: [ + ['content-type', { name: 'Content-Type', value: mimeType }] + ], + body: safelyExtractBody(dataURLStruct.body)[0] + })) + } + case 'file:': { + // For now, unfortunate as it is, file URLs are left as an exercise for the reader. + // When in doubt, return a network error. + return Promise.resolve(makeNetworkError('not implemented... yet...')) + } + case 'http:': + case 'https:': { + // Return the result of running HTTP fetch given fetchParams. + + return httpFetch(fetchParams) + .catch((err) => makeNetworkError(err)) + } + default: { + return Promise.resolve(makeNetworkError('unknown scheme')) + } + } +} + +// https://fetch.spec.whatwg.org/#finalize-response +function finalizeResponse (fetchParams, response) { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true + + // 2, If fetchParams’s process response done is not null, then queue a fetch + // task to run fetchParams’s process response done given response, with + // fetchParams’s task destination. + if (fetchParams.processResponseDone != null) { + queueMicrotask(() => fetchParams.processResponseDone(response)) + } +} + +// https://fetch.spec.whatwg.org/#fetch-finale +function fetchFinale (fetchParams, response) { + // 1. If response is a network error, then: + if (response.type === 'error') { + // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ». + response.urlList = [fetchParams.request.urlList[0]] + + // 2. Set response’s timing info to the result of creating an opaque timing + // info for fetchParams’s timing info. + response.timingInfo = createOpaqueTimingInfo({ + startTime: fetchParams.timingInfo.startTime + }) + } + + // 2. Let processResponseEndOfBody be the following steps: + const processResponseEndOfBody = () => { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true + + // If fetchParams’s process response end-of-body is not null, + // then queue a fetch task to run fetchParams’s process response + // end-of-body given response with fetchParams’s task destination. + if (fetchParams.processResponseEndOfBody != null) { + queueMicrotask(() => fetchParams.processResponseEndOfBody(response)) + } + } + + // 3. If fetchParams’s process response is non-null, then queue a fetch task + // to run fetchParams’s process response given response, with fetchParams’s + // task destination. + if (fetchParams.processResponse != null) { + queueMicrotask(() => fetchParams.processResponse(response)) + } + + // 4. If response’s body is null, then run processResponseEndOfBody. + if (response.body == null) { + processResponseEndOfBody() + } else { + // 5. Otherwise: + + // 1. Let transformStream be a new a TransformStream. + + // 2. Let identityTransformAlgorithm be an algorithm which, given chunk, + // enqueues chunk in transformStream. + const identityTransformAlgorithm = (chunk, controller) => { + controller.enqueue(chunk) + } + + // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm + // and flushAlgorithm set to processResponseEndOfBody. + const transformStream = new TransformStream({ + start () {}, + transform: identityTransformAlgorithm, + flush: processResponseEndOfBody + }, { + size () { + return 1 + } + }, { + size () { + return 1 + } + }) + + // 4. Set response’s body to the result of piping response’s body through transformStream. + response.body = { stream: response.body.stream.pipeThrough(transformStream) } + } + + // 6. If fetchParams’s process response consume body is non-null, then: + if (fetchParams.processResponseConsumeBody != null) { + // 1. Let processBody given nullOrBytes be this step: run fetchParams’s + // process response consume body given response and nullOrBytes. + const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes) + + // 2. Let processBodyError be this step: run fetchParams’s process + // response consume body given response and failure. + const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure) + + // 3. If response’s body is null, then queue a fetch task to run processBody + // given null, with fetchParams’s task destination. + if (response.body == null) { + queueMicrotask(() => processBody(null)) + } else { + // 4. Otherwise, fully read response’s body given processBody, processBodyError, + // and fetchParams’s task destination. + return fullyReadBody(response.body, processBody, processBodyError) + } + return Promise.resolve() + } +} + +// https://fetch.spec.whatwg.org/#http-fetch +async function httpFetch (fetchParams) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let response be null. + let response = null + + // 3. Let actualResponse be null. + let actualResponse = null + + // 4. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo + + // 5. If request’s service-workers mode is "all", then: + if (request.serviceWorkers === 'all') { + // TODO + } + + // 6. If response is null, then: + if (response === null) { + // 1. If makeCORSPreflight is true and one of these conditions is true: + // TODO + + // 2. If request’s redirect mode is "follow", then set request’s + // service-workers mode to "none". + if (request.redirect === 'follow') { + request.serviceWorkers = 'none' + } + + // 3. Set response and actualResponse to the result of running + // HTTP-network-or-cache fetch given fetchParams. + actualResponse = response = await httpNetworkOrCacheFetch(fetchParams) + + // 4. If request’s response tainting is "cors" and a CORS check + // for request and response returns failure, then return a network error. + if ( + request.responseTainting === 'cors' && + corsCheck(request, response) === 'failure' + ) { + return makeNetworkError('cors failure') + } + + // 5. If the TAO check for request and response returns failure, then set + // request’s timing allow failed flag. + if (TAOCheck(request, response) === 'failure') { + request.timingAllowFailed = true + } + } + + // 7. If either request’s response tainting or response’s type + // is "opaque", and the cross-origin resource policy check with + // request’s origin, request’s client, request’s destination, + // and actualResponse returns blocked, then return a network error. + if ( + (request.responseTainting === 'opaque' || response.type === 'opaque') && + crossOriginResourcePolicyCheck( + request.origin, + request.client, + request.destination, + actualResponse + ) === 'blocked' + ) { + return makeNetworkError('blocked') + } + + // 8. If actualResponse’s status is a redirect status, then: + if (redirectStatusSet.has(actualResponse.status)) { + // 1. If actualResponse’s status is not 303, request’s body is not null, + // and the connection uses HTTP/2, then user agents may, and are even + // encouraged to, transmit an RST_STREAM frame. + // See, https://github.com/whatwg/fetch/issues/1288 + if (request.redirect !== 'manual') { + fetchParams.controller.connection.destroy() + } + + // 2. Switch on request’s redirect mode: + if (request.redirect === 'error') { + // Set response to a network error. + response = makeNetworkError('unexpected redirect') + } else if (request.redirect === 'manual') { + // Set response to an opaque-redirect filtered response whose internal + // response is actualResponse. + // NOTE(spec): On the web this would return an `opaqueredirect` response, + // but that doesn't make sense server side. + // See https://github.com/nodejs/undici/issues/1193. + response = actualResponse + } else if (request.redirect === 'follow') { + // Set response to the result of running HTTP-redirect fetch given + // fetchParams and response. + response = await httpRedirectFetch(fetchParams, response) + } else { + assert(false) + } + } + + // 9. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo + + // 10. Return response. + return response +} + +// https://fetch.spec.whatwg.org/#http-redirect-fetch +function httpRedirectFetch (fetchParams, response) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let actualResponse be response, if response is not a filtered response, + // and response’s internal response otherwise. + const actualResponse = response.internalResponse + ? response.internalResponse + : response + + // 3. Let locationURL be actualResponse’s location URL given request’s current + // URL’s fragment. + let locationURL + + try { + locationURL = responseLocationURL( + actualResponse, + requestCurrentURL(request).hash + ) + + // 4. If locationURL is null, then return response. + if (locationURL == null) { + return response + } + } catch (err) { + // 5. If locationURL is failure, then return a network error. + return Promise.resolve(makeNetworkError(err)) + } + + // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network + // error. + if (!urlIsHttpHttpsScheme(locationURL)) { + return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme')) + } + + // 7. If request’s redirect count is 20, then return a network error. + if (request.redirectCount === 20) { + return Promise.resolve(makeNetworkError('redirect count exceeded')) + } + + // 8. Increase request’s redirect count by 1. + request.redirectCount += 1 + + // 9. If request’s mode is "cors", locationURL includes credentials, and + // request’s origin is not same origin with locationURL’s origin, then return + // a network error. + if ( + request.mode === 'cors' && + (locationURL.username || locationURL.password) && + !sameOrigin(request, locationURL) + ) { + return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')) + } + + // 10. If request’s response tainting is "cors" and locationURL includes + // credentials, then return a network error. + if ( + request.responseTainting === 'cors' && + (locationURL.username || locationURL.password) + ) { + return Promise.resolve(makeNetworkError( + 'URL cannot contain credentials for request mode "cors"' + )) + } + + // 11. If actualResponse’s status is not 303, request’s body is non-null, + // and request’s body’s source is null, then return a network error. + if ( + actualResponse.status !== 303 && + request.body != null && + request.body.source == null + ) { + return Promise.resolve(makeNetworkError()) + } + + // 12. If one of the following is true + // - actualResponse’s status is 301 or 302 and request’s method is `POST` + // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD` + if ( + ([301, 302].includes(actualResponse.status) && request.method === 'POST') || + (actualResponse.status === 303 && + !GET_OR_HEAD.includes(request.method)) + ) { + // then: + // 1. Set request’s method to `GET` and request’s body to null. + request.method = 'GET' + request.body = null + + // 2. For each headerName of request-body-header name, delete headerName from + // request’s header list. + for (const headerName of requestBodyHeader) { + request.headersList.delete(headerName) + } + } + + // 13. If request’s current URL’s origin is not same origin with locationURL’s + // origin, then for each headerName of CORS non-wildcard request-header name, + // delete headerName from request’s header list. + if (!sameOrigin(requestCurrentURL(request), locationURL)) { + // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name + request.headersList.delete('authorization') + + // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. + request.headersList.delete('cookie') + request.headersList.delete('host') + } + + // 14. If request’s body is non-null, then set request’s body to the first return + // value of safely extracting request’s body’s source. + if (request.body != null) { + assert(request.body.source != null) + request.body = safelyExtractBody(request.body.source)[0] + } + + // 15. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo + + // 16. Set timingInfo’s redirect end time and post-redirect start time to the + // coarsened shared current time given fetchParams’s cross-origin isolated + // capability. + timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = + coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability) + + // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s + // redirect start time to timingInfo’s start time. + if (timingInfo.redirectStartTime === 0) { + timingInfo.redirectStartTime = timingInfo.startTime + } + + // 18. Append locationURL to request’s URL list. + request.urlList.push(locationURL) + + // 19. Invoke set request’s referrer policy on redirect on request and + // actualResponse. + setRequestReferrerPolicyOnRedirect(request, actualResponse) + + // 20. Return the result of running main fetch given fetchParams and true. + return mainFetch(fetchParams, true) +} + +// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch +async function httpNetworkOrCacheFetch ( + fetchParams, + isAuthenticationFetch = false, + isNewConnectionFetch = false +) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let httpFetchParams be null. + let httpFetchParams = null + + // 3. Let httpRequest be null. + let httpRequest = null + + // 4. Let response be null. + let response = null + + // 5. Let storedResponse be null. + // TODO: cache + + // 6. Let httpCache be null. + const httpCache = null + + // 7. Let the revalidatingFlag be unset. + const revalidatingFlag = false + + // 8. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. If request’s window is "no-window" and request’s redirect mode is + // "error", then set httpFetchParams to fetchParams and httpRequest to + // request. + if (request.window === 'no-window' && request.redirect === 'error') { + httpFetchParams = fetchParams + httpRequest = request + } else { + // Otherwise: + + // 1. Set httpRequest to a clone of request. + httpRequest = makeRequest(request) + + // 2. Set httpFetchParams to a copy of fetchParams. + httpFetchParams = { ...fetchParams } + + // 3. Set httpFetchParams’s request to httpRequest. + httpFetchParams.request = httpRequest + } + + // 3. Let includeCredentials be true if one of + const includeCredentials = + request.credentials === 'include' || + (request.credentials === 'same-origin' && + request.responseTainting === 'basic') + + // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s + // body is non-null; otherwise null. + const contentLength = httpRequest.body ? httpRequest.body.length : null + + // 5. Let contentLengthHeaderValue be null. + let contentLengthHeaderValue = null + + // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or + // `PUT`, then set contentLengthHeaderValue to `0`. + if ( + httpRequest.body == null && + ['POST', 'PUT'].includes(httpRequest.method) + ) { + contentLengthHeaderValue = '0' + } + + // 7. If contentLength is non-null, then set contentLengthHeaderValue to + // contentLength, serialized and isomorphic encoded. + if (contentLength != null) { + contentLengthHeaderValue = isomorphicEncode(`${contentLength}`) + } + + // 8. If contentLengthHeaderValue is non-null, then append + // `Content-Length`/contentLengthHeaderValue to httpRequest’s header + // list. + if (contentLengthHeaderValue != null) { + httpRequest.headersList.append('content-length', contentLengthHeaderValue) + } + + // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`, + // contentLengthHeaderValue) to httpRequest’s header list. + + // 10. If contentLength is non-null and httpRequest’s keepalive is true, + // then: + if (contentLength != null && httpRequest.keepalive) { + // NOTE: keepalive is a noop outside of browser context. + } + + // 11. If httpRequest’s referrer is a URL, then append + // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded, + // to httpRequest’s header list. + if (httpRequest.referrer instanceof URL) { + httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href)) + } + + // 12. Append a request `Origin` header for httpRequest. + appendRequestOriginHeader(httpRequest) + + // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA] + appendFetchMetadata(httpRequest) + + // 14. If httpRequest’s header list does not contain `User-Agent`, then + // user agents should append `User-Agent`/default `User-Agent` value to + // httpRequest’s header list. + if (!httpRequest.headersList.contains('user-agent')) { + httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node') + } + + // 15. If httpRequest’s cache mode is "default" and httpRequest’s header + // list contains `If-Modified-Since`, `If-None-Match`, + // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set + // httpRequest’s cache mode to "no-store". + if ( + httpRequest.cache === 'default' && + (httpRequest.headersList.contains('if-modified-since') || + httpRequest.headersList.contains('if-none-match') || + httpRequest.headersList.contains('if-unmodified-since') || + httpRequest.headersList.contains('if-match') || + httpRequest.headersList.contains('if-range')) + ) { + httpRequest.cache = 'no-store' + } + + // 16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent + // no-cache cache-control header modification flag is unset, and + // httpRequest’s header list does not contain `Cache-Control`, then append + // `Cache-Control`/`max-age=0` to httpRequest’s header list. + if ( + httpRequest.cache === 'no-cache' && + !httpRequest.preventNoCacheCacheControlHeaderModification && + !httpRequest.headersList.contains('cache-control') + ) { + httpRequest.headersList.append('cache-control', 'max-age=0') + } + + // 17. If httpRequest’s cache mode is "no-store" or "reload", then: + if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') { + // 1. If httpRequest’s header list does not contain `Pragma`, then append + // `Pragma`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('pragma')) { + httpRequest.headersList.append('pragma', 'no-cache') + } + + // 2. If httpRequest’s header list does not contain `Cache-Control`, + // then append `Cache-Control`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('cache-control')) { + httpRequest.headersList.append('cache-control', 'no-cache') + } + } + + // 18. If httpRequest’s header list contains `Range`, then append + // `Accept-Encoding`/`identity` to httpRequest’s header list. + if (httpRequest.headersList.contains('range')) { + httpRequest.headersList.append('accept-encoding', 'identity') + } + + // 19. Modify httpRequest’s header list per HTTP. Do not append a given + // header if httpRequest’s header list contains that header’s name. + // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129 + if (!httpRequest.headersList.contains('accept-encoding')) { + if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) { + httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate') + } else { + httpRequest.headersList.append('accept-encoding', 'gzip, deflate') + } + } + + httpRequest.headersList.delete('host') + + // 20. If includeCredentials is true, then: + if (includeCredentials) { + // 1. If the user agent is not configured to block cookies for httpRequest + // (see section 7 of [COOKIES]), then: + // TODO: credentials + // 2. If httpRequest’s header list does not contain `Authorization`, then: + // TODO: credentials + } + + // 21. If there’s a proxy-authentication entry, use it as appropriate. + // TODO: proxy-authentication + + // 22. Set httpCache to the result of determining the HTTP cache + // partition, given httpRequest. + // TODO: cache + + // 23. If httpCache is null, then set httpRequest’s cache mode to + // "no-store". + if (httpCache == null) { + httpRequest.cache = 'no-store' + } + + // 24. If httpRequest’s cache mode is neither "no-store" nor "reload", + // then: + if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') { + // TODO: cache + } + + // 9. If aborted, then return the appropriate network error for fetchParams. + // TODO + + // 10. If response is null, then: + if (response == null) { + // 1. If httpRequest’s cache mode is "only-if-cached", then return a + // network error. + if (httpRequest.mode === 'only-if-cached') { + return makeNetworkError('only if cached') + } + + // 2. Let forwardResponse be the result of running HTTP-network fetch + // given httpFetchParams, includeCredentials, and isNewConnectionFetch. + const forwardResponse = await httpNetworkFetch( + httpFetchParams, + includeCredentials, + isNewConnectionFetch + ) + + // 3. If httpRequest’s method is unsafe and forwardResponse’s status is + // in the range 200 to 399, inclusive, invalidate appropriate stored + // responses in httpCache, as per the "Invalidation" chapter of HTTP + // Caching, and set storedResponse to null. [HTTP-CACHING] + if ( + !safeMethodsSet.has(httpRequest.method) && + forwardResponse.status >= 200 && + forwardResponse.status <= 399 + ) { + // TODO: cache + } + + // 4. If the revalidatingFlag is set and forwardResponse’s status is 304, + // then: + if (revalidatingFlag && forwardResponse.status === 304) { + // TODO: cache + } + + // 5. If response is null, then: + if (response == null) { + // 1. Set response to forwardResponse. + response = forwardResponse + + // 2. Store httpRequest and forwardResponse in httpCache, as per the + // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING] + // TODO: cache + } + } + + // 11. Set response’s URL list to a clone of httpRequest’s URL list. + response.urlList = [...httpRequest.urlList] + + // 12. If httpRequest’s header list contains `Range`, then set response’s + // range-requested flag. + if (httpRequest.headersList.contains('range')) { + response.rangeRequested = true + } + + // 13. Set response’s request-includes-credentials to includeCredentials. + response.requestIncludesCredentials = includeCredentials + + // 14. If response’s status is 401, httpRequest’s response tainting is not + // "cors", includeCredentials is true, and request’s window is an environment + // settings object, then: + // TODO + + // 15. If response’s status is 407, then: + if (response.status === 407) { + // 1. If request’s window is "no-window", then return a network error. + if (request.window === 'no-window') { + return makeNetworkError() + } + + // 2. ??? + + // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) + } + + // 4. Prompt the end user as appropriate in request’s window and store + // the result as a proxy-authentication entry. [HTTP-AUTH] + // TODO: Invoke some kind of callback? + + // 5. Set response to the result of running HTTP-network-or-cache fetch given + // fetchParams. + // TODO + return makeNetworkError('proxy authentication required') + } + + // 16. If all of the following are true + if ( + // response’s status is 421 + response.status === 421 && + // isNewConnectionFetch is false + !isNewConnectionFetch && + // request’s body is null, or request’s body is non-null and request’s body’s source is non-null + (request.body == null || request.body.source != null) + ) { + // then: + + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) + } + + // 2. Set response to the result of running HTTP-network-or-cache + // fetch given fetchParams, isAuthenticationFetch, and true. + + // TODO (spec): The spec doesn't specify this but we need to cancel + // the active response before we can start a new one. + // https://github.com/whatwg/fetch/issues/1293 + fetchParams.controller.connection.destroy() + + response = await httpNetworkOrCacheFetch( + fetchParams, + isAuthenticationFetch, + true + ) + } + + // 17. If isAuthenticationFetch is true, then create an authentication entry + if (isAuthenticationFetch) { + // TODO + } + + // 18. Return response. + return response +} + +// https://fetch.spec.whatwg.org/#http-network-fetch +async function httpNetworkFetch ( + fetchParams, + includeCredentials = false, + forceNewConnection = false +) { + assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed) + + fetchParams.controller.connection = { + abort: null, + destroyed: false, + destroy (err) { + if (!this.destroyed) { + this.destroyed = true + this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError')) + } + } + } + + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let response be null. + let response = null + + // 3. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo + + // 4. Let httpCache be the result of determining the HTTP cache partition, + // given request. + // TODO: cache + const httpCache = null + + // 5. If httpCache is null, then set request’s cache mode to "no-store". + if (httpCache == null) { + request.cache = 'no-store' + } + + // 6. Let networkPartitionKey be the result of determining the network + // partition key given request. + // TODO + + // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise + // "no". + const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars + + // 8. Switch on request’s mode: + if (request.mode === 'websocket') { + // Let connection be the result of obtaining a WebSocket connection, + // given request’s current URL. + // TODO + } else { + // Let connection be the result of obtaining a connection, given + // networkPartitionKey, request’s current URL’s origin, + // includeCredentials, and forceNewConnection. + // TODO + } + + // 9. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. If connection is failure, then return a network error. + + // 2. Set timingInfo’s final connection timing info to the result of + // calling clamp and coarsen connection timing info with connection’s + // timing info, timingInfo’s post-redirect start time, and fetchParams’s + // cross-origin isolated capability. + + // 3. If connection is not an HTTP/2 connection, request’s body is non-null, + // and request’s body’s source is null, then append (`Transfer-Encoding`, + // `chunked`) to request’s header list. + + // 4. Set timingInfo’s final network-request start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated + // capability. + + // 5. Set response to the result of making an HTTP request over connection + // using request with the following caveats: + + // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS] + // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH] + + // - If request’s body is non-null, and request’s body’s source is null, + // then the user agent may have a buffer of up to 64 kibibytes and store + // a part of request’s body in that buffer. If the user agent reads from + // request’s body beyond that buffer’s size and the user agent needs to + // resend request, then instead return a network error. + + // - Set timingInfo’s final network-response start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated capability, + // immediately after the user agent’s HTTP parser receives the first byte + // of the response (e.g., frame header bytes for HTTP/2 or response status + // line for HTTP/1.x). + + // - Wait until all the headers are transmitted. + + // - Any responses whose status is in the range 100 to 199, inclusive, + // and is not 101, are to be ignored, except for the purposes of setting + // timingInfo’s final network-response start time above. + + // - If request’s header list contains `Transfer-Encoding`/`chunked` and + // response is transferred via HTTP/1.0 or older, then return a network + // error. + + // - If the HTTP request results in a TLS client certificate dialog, then: + + // 1. If request’s window is an environment settings object, make the + // dialog available in request’s window. + + // 2. Otherwise, return a network error. + + // To transmit request’s body body, run these steps: + let requestBody = null + // 1. If body is null and fetchParams’s process request end-of-body is + // non-null, then queue a fetch task given fetchParams’s process request + // end-of-body and fetchParams’s task destination. + if (request.body == null && fetchParams.processRequestEndOfBody) { + queueMicrotask(() => fetchParams.processRequestEndOfBody()) + } else if (request.body != null) { + // 2. Otherwise, if body is non-null: + + // 1. Let processBodyChunk given bytes be these steps: + const processBodyChunk = async function * (bytes) { + // 1. If the ongoing fetch is terminated, then abort these steps. + if (isCancelled(fetchParams)) { + return + } + + // 2. Run this step in parallel: transmit bytes. + yield bytes + + // 3. If fetchParams’s process request body is non-null, then run + // fetchParams’s process request body given bytes’s length. + fetchParams.processRequestBodyChunkLength?.(bytes.byteLength) + } + + // 2. Let processEndOfBody be these steps: + const processEndOfBody = () => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return + } + + // 2. If fetchParams’s process request end-of-body is non-null, + // then run fetchParams’s process request end-of-body. + if (fetchParams.processRequestEndOfBody) { + fetchParams.processRequestEndOfBody() + } + } + + // 3. Let processBodyError given e be these steps: + const processBodyError = (e) => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return + } + + // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller. + if (e.name === 'AbortError') { + fetchParams.controller.abort() + } else { + fetchParams.controller.terminate(e) + } + } + + // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody, + // processBodyError, and fetchParams’s task destination. + requestBody = (async function * () { + try { + for await (const bytes of request.body.stream) { + yield * processBodyChunk(bytes) + } + processEndOfBody() + } catch (err) { + processBodyError(err) + } + })() + } + + try { + // socket is only provided for websockets + const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody }) + + if (socket) { + response = makeResponse({ status, statusText, headersList, socket }) + } else { + const iterator = body[Symbol.asyncIterator]() + fetchParams.controller.next = () => iterator.next() + + response = makeResponse({ status, statusText, headersList }) + } + } catch (err) { + // 10. If aborted, then: + if (err.name === 'AbortError') { + // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame. + fetchParams.controller.connection.destroy() + + // 2. Return the appropriate network error for fetchParams. + return makeAppropriateNetworkError(fetchParams, err) + } + + return makeNetworkError(err) + } + + // 11. Let pullAlgorithm be an action that resumes the ongoing fetch + // if it is suspended. + const pullAlgorithm = () => { + fetchParams.controller.resume() + } + + // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s + // controller with reason, given reason. + const cancelAlgorithm = (reason) => { + fetchParams.controller.abort(reason) + } + + // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by + // the user agent. + // TODO + + // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object + // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent. + // TODO + + // 15. Let stream be a new ReadableStream. + // 16. Set up stream with pullAlgorithm set to pullAlgorithm, + // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to + // highWaterMark, and sizeAlgorithm set to sizeAlgorithm. + if (!ReadableStream) { + ReadableStream = require('stream/web').ReadableStream + } + + const stream = new ReadableStream( + { + async start (controller) { + fetchParams.controller.controller = controller + }, + async pull (controller) { + await pullAlgorithm(controller) + }, + async cancel (reason) { + await cancelAlgorithm(reason) + } + }, + { + highWaterMark: 0, + size () { + return 1 + } + } + ) + + // 17. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. Set response’s body to a new body whose stream is stream. + response.body = { stream } + + // 2. If response is not a network error and request’s cache mode is + // not "no-store", then update response in httpCache for request. + // TODO + + // 3. If includeCredentials is true and the user agent is not configured + // to block cookies for request (see section 7 of [COOKIES]), then run the + // "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on + // the value of each header whose name is a byte-case-insensitive match for + // `Set-Cookie` in response’s header list, if any, and request’s current URL. + // TODO + + // 18. If aborted, then: + // TODO + + // 19. Run these steps in parallel: + + // 1. Run these steps, but abort when fetchParams is canceled: + fetchParams.controller.on('terminated', onAborted) + fetchParams.controller.resume = async () => { + // 1. While true + while (true) { + // 1-3. See onData... + + // 4. Set bytes to the result of handling content codings given + // codings and bytes. + let bytes + let isFailure + try { + const { done, value } = await fetchParams.controller.next() + + if (isAborted(fetchParams)) { + break + } + + bytes = done ? undefined : value + } catch (err) { + if (fetchParams.controller.ended && !timingInfo.encodedBodySize) { + // zlib doesn't like empty streams. + bytes = undefined + } else { + bytes = err + + // err may be propagated from the result of calling readablestream.cancel, + // which might not be an error. https://github.com/nodejs/undici/issues/2009 + isFailure = true + } + } + + if (bytes === undefined) { + // 2. Otherwise, if the bytes transmission for response’s message + // body is done normally and stream is readable, then close + // stream, finalize response for fetchParams and response, and + // abort these in-parallel steps. + readableStreamClose(fetchParams.controller.controller) + + finalizeResponse(fetchParams, response) + + return + } + + // 5. Increase timingInfo’s decoded body size by bytes’s length. + timingInfo.decodedBodySize += bytes?.byteLength ?? 0 + + // 6. If bytes is failure, then terminate fetchParams’s controller. + if (isFailure) { + fetchParams.controller.terminate(bytes) + return + } + + // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes + // into stream. + fetchParams.controller.controller.enqueue(new Uint8Array(bytes)) + + // 8. If stream is errored, then terminate the ongoing fetch. + if (isErrored(stream)) { + fetchParams.controller.terminate() + return + } + + // 9. If stream doesn’t need more data ask the user agent to suspend + // the ongoing fetch. + if (!fetchParams.controller.controller.desiredSize) { + return + } + } + } + + // 2. If aborted, then: + function onAborted (reason) { + // 2. If fetchParams is aborted, then: + if (isAborted(fetchParams)) { + // 1. Set response’s aborted flag. + response.aborted = true + + // 2. If stream is readable, then error stream with the result of + // deserialize a serialized abort reason given fetchParams’s + // controller’s serialized abort reason and an + // implementation-defined realm. + if (isReadable(stream)) { + fetchParams.controller.controller.error( + fetchParams.controller.serializedAbortReason + ) + } + } else { + // 3. Otherwise, if stream is readable, error stream with a TypeError. + if (isReadable(stream)) { + fetchParams.controller.controller.error(new TypeError('terminated', { + cause: isErrorLike(reason) ? reason : undefined + })) + } + } + + // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame. + // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so. + fetchParams.controller.connection.destroy() + } + + // 20. Return response. + return response + + async function dispatch ({ body }) { + const url = requestCurrentURL(request) + /** @type {import('../..').Agent} */ + const agent = fetchParams.controller.dispatcher + + return new Promise((resolve, reject) => agent.dispatch( + { + path: url.pathname + url.search, + origin: url.origin, + method: request.method, + body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body, + headers: request.headersList.entries, + maxRedirections: 0, + upgrade: request.mode === 'websocket' ? 'websocket' : undefined + }, + { + body: null, + abort: null, + + onConnect (abort) { + // TODO (fix): Do we need connection here? + const { connection } = fetchParams.controller + + if (connection.destroyed) { + abort(new DOMException('The operation was aborted.', 'AbortError')) + } else { + fetchParams.controller.on('terminated', abort) + this.abort = connection.abort = abort + } + }, + + onHeaders (status, headersList, resume, statusText) { + if (status < 200) { + return + } + + let codings = [] + let location = '' + + const headers = new Headers() + + // For H2, the headers are a plain JS object + // We distinguish between them and iterate accordingly + if (Array.isArray(headersList)) { + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()) + } else if (key.toLowerCase() === 'location') { + location = val + } + + headers[kHeadersList].append(key, val) + } + } else { + const keys = Object.keys(headersList) + for (const key of keys) { + const val = headersList[key] + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse() + } else if (key.toLowerCase() === 'location') { + location = val + } + + headers[kHeadersList].append(key, val) + } + } + + this.body = new Readable({ read: resume }) + + const decoders = [] + + const willFollow = request.redirect === 'follow' && + location && + redirectStatusSet.has(status) + + // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding + if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) { + for (const coding of codings) { + // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2 + if (coding === 'x-gzip' || coding === 'gzip') { + decoders.push(zlib.createGunzip({ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH + })) + } else if (coding === 'deflate') { + decoders.push(zlib.createInflate()) + } else if (coding === 'br') { + decoders.push(zlib.createBrotliDecompress()) + } else { + decoders.length = 0 + break + } + } + } + + resolve({ + status, + statusText, + headersList: headers[kHeadersList], + body: decoders.length + ? pipeline(this.body, ...decoders, () => { }) + : this.body.on('error', () => {}) + }) + + return true + }, + + onData (chunk) { + if (fetchParams.controller.dump) { + return + } + + // 1. If one or more bytes have been transmitted from response’s + // message body, then: + + // 1. Let bytes be the transmitted bytes. + const bytes = chunk + + // 2. Let codings be the result of extracting header list values + // given `Content-Encoding` and response’s header list. + // See pullAlgorithm. + + // 3. Increase timingInfo’s encoded body size by bytes’s length. + timingInfo.encodedBodySize += bytes.byteLength + + // 4. See pullAlgorithm... + + return this.body.push(bytes) + }, + + onComplete () { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } + + fetchParams.controller.ended = true + + this.body.push(null) + }, + + onError (error) { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } + + this.body?.destroy(error) + + fetchParams.controller.terminate(error) + + reject(error) + }, + + onUpgrade (status, headersList, socket) { + if (status !== 101) { + return + } + + const headers = new Headers() + + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') + + headers[kHeadersList].append(key, val) + } + + resolve({ + status, + statusText: STATUS_CODES[status], + headersList: headers[kHeadersList], + socket + }) + + return true + } + } + )) + } +} + +module.exports = { + fetch, + Fetch, + fetching, + finalizeAndReportTiming +} diff --git a/lib/fetch/request.js b/lib/fetch/request.js new file mode 100644 index 0000000..6fe4dff --- /dev/null +++ b/lib/fetch/request.js @@ -0,0 +1,946 @@ +/* globals AbortController */ + +'use strict' + +const { extractBody, mixinBody, cloneBody } = require('./body') +const { Headers, fill: fillHeaders, HeadersList } = require('./headers') +const { FinalizationRegistry } = require('../compat/dispatcher-weakref')() +const util = require('../core/util') +const { + isValidHTTPToken, + sameOrigin, + normalizeMethod, + makePolicyContainer, + normalizeMethodRecord +} = require('./util') +const { + forbiddenMethodsSet, + corsSafeListedMethodsSet, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + requestDuplex +} = require('./constants') +const { kEnumerableProperty } = util +const { kHeaders, kSignal, kState, kGuard, kRealm } = require('./symbols') +const { webidl } = require('./webidl') +const { getGlobalOrigin } = require('./global') +const { URLSerializer } = require('./dataURL') +const { kHeadersList, kConstruct } = require('../core/symbols') +const assert = require('assert') +const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = require('events') + +let TransformStream = globalThis.TransformStream + +const kAbortController = Symbol('abortController') + +const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { + signal.removeEventListener('abort', abort) +}) + +// https://fetch.spec.whatwg.org/#request-class +class Request { + // https://fetch.spec.whatwg.org/#dom-request + constructor (input, init = {}) { + if (input === kConstruct) { + return + } + + webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' }) + + input = webidl.converters.RequestInfo(input) + init = webidl.converters.RequestInit(init) + + // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object + this[kRealm] = { + settingsObject: { + baseUrl: getGlobalOrigin(), + get origin () { + return this.baseUrl?.origin + }, + policyContainer: makePolicyContainer() + } + } + + // 1. Let request be null. + let request = null + + // 2. Let fallbackMode be null. + let fallbackMode = null + + // 3. Let baseURL be this’s relevant settings object’s API base URL. + const baseUrl = this[kRealm].settingsObject.baseUrl + + // 4. Let signal be null. + let signal = null + + // 5. If input is a string, then: + if (typeof input === 'string') { + // 1. Let parsedURL be the result of parsing input with baseURL. + // 2. If parsedURL is failure, then throw a TypeError. + let parsedURL + try { + parsedURL = new URL(input, baseUrl) + } catch (err) { + throw new TypeError('Failed to parse URL from ' + input, { cause: err }) + } + + // 3. If parsedURL includes credentials, then throw a TypeError. + if (parsedURL.username || parsedURL.password) { + throw new TypeError( + 'Request cannot be constructed from a URL that includes credentials: ' + + input + ) + } + + // 4. Set request to a new request whose URL is parsedURL. + request = makeRequest({ urlList: [parsedURL] }) + + // 5. Set fallbackMode to "cors". + fallbackMode = 'cors' + } else { + // 6. Otherwise: + + // 7. Assert: input is a Request object. + assert(input instanceof Request) + + // 8. Set request to input’s request. + request = input[kState] + + // 9. Set signal to input’s signal. + signal = input[kSignal] + } + + // 7. Let origin be this’s relevant settings object’s origin. + const origin = this[kRealm].settingsObject.origin + + // 8. Let window be "client". + let window = 'client' + + // 9. If request’s window is an environment settings object and its origin + // is same origin with origin, then set window to request’s window. + if ( + request.window?.constructor?.name === 'EnvironmentSettingsObject' && + sameOrigin(request.window, origin) + ) { + window = request.window + } + + // 10. If init["window"] exists and is non-null, then throw a TypeError. + if (init.window != null) { + throw new TypeError(`'window' option '${window}' must be null`) + } + + // 11. If init["window"] exists, then set window to "no-window". + if ('window' in init) { + window = 'no-window' + } + + // 12. Set request to a new request with the following properties: + request = makeRequest({ + // URL request’s URL. + // undici implementation note: this is set as the first item in request's urlList in makeRequest + // method request’s method. + method: request.method, + // header list A copy of request’s header list. + // undici implementation note: headersList is cloned in makeRequest + headersList: request.headersList, + // unsafe-request flag Set. + unsafeRequest: request.unsafeRequest, + // client This’s relevant settings object. + client: this[kRealm].settingsObject, + // window window. + window, + // priority request’s priority. + priority: request.priority, + // origin request’s origin. The propagation of the origin is only significant for navigation requests + // being handled by a service worker. In this scenario a request can have an origin that is different + // from the current client. + origin: request.origin, + // referrer request’s referrer. + referrer: request.referrer, + // referrer policy request’s referrer policy. + referrerPolicy: request.referrerPolicy, + // mode request’s mode. + mode: request.mode, + // credentials mode request’s credentials mode. + credentials: request.credentials, + // cache mode request’s cache mode. + cache: request.cache, + // redirect mode request’s redirect mode. + redirect: request.redirect, + // integrity metadata request’s integrity metadata. + integrity: request.integrity, + // keepalive request’s keepalive. + keepalive: request.keepalive, + // reload-navigation flag request’s reload-navigation flag. + reloadNavigation: request.reloadNavigation, + // history-navigation flag request’s history-navigation flag. + historyNavigation: request.historyNavigation, + // URL list A clone of request’s URL list. + urlList: [...request.urlList] + }) + + const initHasKey = Object.keys(init).length !== 0 + + // 13. If init is not empty, then: + if (initHasKey) { + // 1. If request’s mode is "navigate", then set it to "same-origin". + if (request.mode === 'navigate') { + request.mode = 'same-origin' + } + + // 2. Unset request’s reload-navigation flag. + request.reloadNavigation = false + + // 3. Unset request’s history-navigation flag. + request.historyNavigation = false + + // 4. Set request’s origin to "client". + request.origin = 'client' + + // 5. Set request’s referrer to "client" + request.referrer = 'client' + + // 6. Set request’s referrer policy to the empty string. + request.referrerPolicy = '' + + // 7. Set request’s URL to request’s current URL. + request.url = request.urlList[request.urlList.length - 1] + + // 8. Set request’s URL list to « request’s URL ». + request.urlList = [request.url] + } + + // 14. If init["referrer"] exists, then: + if (init.referrer !== undefined) { + // 1. Let referrer be init["referrer"]. + const referrer = init.referrer + + // 2. If referrer is the empty string, then set request’s referrer to "no-referrer". + if (referrer === '') { + request.referrer = 'no-referrer' + } else { + // 1. Let parsedReferrer be the result of parsing referrer with + // baseURL. + // 2. If parsedReferrer is failure, then throw a TypeError. + let parsedReferrer + try { + parsedReferrer = new URL(referrer, baseUrl) + } catch (err) { + throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }) + } + + // 3. If one of the following is true + // - parsedReferrer’s scheme is "about" and path is the string "client" + // - parsedReferrer’s origin is not same origin with origin + // then set request’s referrer to "client". + if ( + (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') || + (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl)) + ) { + request.referrer = 'client' + } else { + // 4. Otherwise, set request’s referrer to parsedReferrer. + request.referrer = parsedReferrer + } + } + } + + // 15. If init["referrerPolicy"] exists, then set request’s referrer policy + // to it. + if (init.referrerPolicy !== undefined) { + request.referrerPolicy = init.referrerPolicy + } + + // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise. + let mode + if (init.mode !== undefined) { + mode = init.mode + } else { + mode = fallbackMode + } + + // 17. If mode is "navigate", then throw a TypeError. + if (mode === 'navigate') { + throw webidl.errors.exception({ + header: 'Request constructor', + message: 'invalid request mode navigate.' + }) + } + + // 18. If mode is non-null, set request’s mode to mode. + if (mode != null) { + request.mode = mode + } + + // 19. If init["credentials"] exists, then set request’s credentials mode + // to it. + if (init.credentials !== undefined) { + request.credentials = init.credentials + } + + // 18. If init["cache"] exists, then set request’s cache mode to it. + if (init.cache !== undefined) { + request.cache = init.cache + } + + // 21. If request’s cache mode is "only-if-cached" and request’s mode is + // not "same-origin", then throw a TypeError. + if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') { + throw new TypeError( + "'only-if-cached' can be set only with 'same-origin' mode" + ) + } + + // 22. If init["redirect"] exists, then set request’s redirect mode to it. + if (init.redirect !== undefined) { + request.redirect = init.redirect + } + + // 23. If init["integrity"] exists, then set request’s integrity metadata to it. + if (init.integrity != null) { + request.integrity = String(init.integrity) + } + + // 24. If init["keepalive"] exists, then set request’s keepalive to it. + if (init.keepalive !== undefined) { + request.keepalive = Boolean(init.keepalive) + } + + // 25. If init["method"] exists, then: + if (init.method !== undefined) { + // 1. Let method be init["method"]. + let method = init.method + + // 2. If method is not a method or method is a forbidden method, then + // throw a TypeError. + if (!isValidHTTPToken(method)) { + throw new TypeError(`'${method}' is not a valid HTTP method.`) + } + + if (forbiddenMethodsSet.has(method.toUpperCase())) { + throw new TypeError(`'${method}' HTTP method is unsupported.`) + } + + // 3. Normalize method. + method = normalizeMethodRecord[method] ?? normalizeMethod(method) + + // 4. Set request’s method to method. + request.method = method + } + + // 26. If init["signal"] exists, then set signal to it. + if (init.signal !== undefined) { + signal = init.signal + } + + // 27. Set this’s request to request. + this[kState] = request + + // 28. Set this’s signal to a new AbortSignal object with this’s relevant + // Realm. + // TODO: could this be simplified with AbortSignal.any + // (https://dom.spec.whatwg.org/#dom-abortsignal-any) + const ac = new AbortController() + this[kSignal] = ac.signal + this[kSignal][kRealm] = this[kRealm] + + // 29. If signal is not null, then make this’s signal follow signal. + if (signal != null) { + if ( + !signal || + typeof signal.aborted !== 'boolean' || + typeof signal.addEventListener !== 'function' + ) { + throw new TypeError( + "Failed to construct 'Request': member signal is not of type AbortSignal." + ) + } + + if (signal.aborted) { + ac.abort(signal.reason) + } else { + // Keep a strong ref to ac while request object + // is alive. This is needed to prevent AbortController + // from being prematurely garbage collected. + // See, https://github.com/nodejs/undici/issues/1926. + this[kAbortController] = ac + + const acRef = new WeakRef(ac) + const abort = function () { + const ac = acRef.deref() + if (ac !== undefined) { + ac.abort(this.reason) + } + } + + // Third-party AbortControllers may not work with these. + // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619. + try { + // If the max amount of listeners is equal to the default, increase it + // This is only available in node >= v19.9.0 + if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) { + setMaxListeners(100, signal) + } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) { + setMaxListeners(100, signal) + } + } catch {} + + util.addAbortListener(signal, abort) + requestFinalizer.register(ac, { signal, abort }) + } + } + + // 30. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is request’s header list and guard is + // "request". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kHeadersList] = request.headersList + this[kHeaders][kGuard] = 'request' + this[kHeaders][kRealm] = this[kRealm] + + // 31. If this’s request’s mode is "no-cors", then: + if (mode === 'no-cors') { + // 1. If this’s request’s method is not a CORS-safelisted method, + // then throw a TypeError. + if (!corsSafeListedMethodsSet.has(request.method)) { + throw new TypeError( + `'${request.method} is unsupported in no-cors mode.` + ) + } + + // 2. Set this’s headers’s guard to "request-no-cors". + this[kHeaders][kGuard] = 'request-no-cors' + } + + // 32. If init is not empty, then: + if (initHasKey) { + /** @type {HeadersList} */ + const headersList = this[kHeaders][kHeadersList] + // 1. Let headers be a copy of this’s headers and its associated header + // list. + // 2. If init["headers"] exists, then set headers to init["headers"]. + const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList) + + // 3. Empty this’s headers’s header list. + headersList.clear() + + // 4. If headers is a Headers object, then for each header in its header + // list, append header’s name/header’s value to this’s headers. + if (headers instanceof HeadersList) { + for (const [key, val] of headers) { + headersList.append(key, val) + } + // Note: Copy the `set-cookie` meta-data. + headersList.cookies = headers.cookies + } else { + // 5. Otherwise, fill this’s headers with headers. + fillHeaders(this[kHeaders], headers) + } + } + + // 33. Let inputBody be input’s request’s body if input is a Request + // object; otherwise null. + const inputBody = input instanceof Request ? input[kState].body : null + + // 34. If either init["body"] exists and is non-null or inputBody is + // non-null, and request’s method is `GET` or `HEAD`, then throw a + // TypeError. + if ( + (init.body != null || inputBody != null) && + (request.method === 'GET' || request.method === 'HEAD') + ) { + throw new TypeError('Request with GET/HEAD method cannot have body.') + } + + // 35. Let initBody be null. + let initBody = null + + // 36. If init["body"] exists and is non-null, then: + if (init.body != null) { + // 1. Let Content-Type be null. + // 2. Set initBody and Content-Type to the result of extracting + // init["body"], with keepalive set to request’s keepalive. + const [extractedBody, contentType] = extractBody( + init.body, + request.keepalive + ) + initBody = extractedBody + + // 3, If Content-Type is non-null and this’s headers’s header list does + // not contain `Content-Type`, then append `Content-Type`/Content-Type to + // this’s headers. + if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) { + this[kHeaders].append('content-type', contentType) + } + } + + // 37. Let inputOrInitBody be initBody if it is non-null; otherwise + // inputBody. + const inputOrInitBody = initBody ?? inputBody + + // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is + // null, then: + if (inputOrInitBody != null && inputOrInitBody.source == null) { + // 1. If initBody is non-null and init["duplex"] does not exist, + // then throw a TypeError. + if (initBody != null && init.duplex == null) { + throw new TypeError('RequestInit: duplex option is required when sending a body.') + } + + // 2. If this’s request’s mode is neither "same-origin" nor "cors", + // then throw a TypeError. + if (request.mode !== 'same-origin' && request.mode !== 'cors') { + throw new TypeError( + 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' + ) + } + + // 3. Set this’s request’s use-CORS-preflight flag. + request.useCORSPreflightFlag = true + } + + // 39. Let finalBody be inputOrInitBody. + let finalBody = inputOrInitBody + + // 40. If initBody is null and inputBody is non-null, then: + if (initBody == null && inputBody != null) { + // 1. If input is unusable, then throw a TypeError. + if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) { + throw new TypeError( + 'Cannot construct a Request with a Request object that has already been used.' + ) + } + + // 2. Set finalBody to the result of creating a proxy for inputBody. + if (!TransformStream) { + TransformStream = require('stream/web').TransformStream + } + + // https://streams.spec.whatwg.org/#readablestream-create-a-proxy + const identityTransform = new TransformStream() + inputBody.stream.pipeThrough(identityTransform) + finalBody = { + source: inputBody.source, + length: inputBody.length, + stream: identityTransform.readable + } + } + + // 41. Set this’s request’s body to finalBody. + this[kState].body = finalBody + } + + // Returns request’s HTTP method, which is "GET" by default. + get method () { + webidl.brandCheck(this, Request) + + // The method getter steps are to return this’s request’s method. + return this[kState].method + } + + // Returns the URL of request as a string. + get url () { + webidl.brandCheck(this, Request) + + // The url getter steps are to return this’s request’s URL, serialized. + return URLSerializer(this[kState].url) + } + + // Returns a Headers object consisting of the headers associated with request. + // Note that headers added in the network layer by the user agent will not + // be accounted for in this object, e.g., the "Host" header. + get headers () { + webidl.brandCheck(this, Request) + + // The headers getter steps are to return this’s headers. + return this[kHeaders] + } + + // Returns the kind of resource requested by request, e.g., "document" + // or "script". + get destination () { + webidl.brandCheck(this, Request) + + // The destination getter are to return this’s request’s destination. + return this[kState].destination + } + + // Returns the referrer of request. Its value can be a same-origin URL if + // explicitly set in init, the empty string to indicate no referrer, and + // "about:client" when defaulting to the global’s default. This is used + // during fetching to determine the value of the `Referer` header of the + // request being made. + get referrer () { + webidl.brandCheck(this, Request) + + // 1. If this’s request’s referrer is "no-referrer", then return the + // empty string. + if (this[kState].referrer === 'no-referrer') { + return '' + } + + // 2. If this’s request’s referrer is "client", then return + // "about:client". + if (this[kState].referrer === 'client') { + return 'about:client' + } + + // Return this’s request’s referrer, serialized. + return this[kState].referrer.toString() + } + + // Returns the referrer policy associated with request. + // This is used during fetching to compute the value of the request’s + // referrer. + get referrerPolicy () { + webidl.brandCheck(this, Request) + + // The referrerPolicy getter steps are to return this’s request’s referrer policy. + return this[kState].referrerPolicy + } + + // Returns the mode associated with request, which is a string indicating + // whether the request will use CORS, or will be restricted to same-origin + // URLs. + get mode () { + webidl.brandCheck(this, Request) + + // The mode getter steps are to return this’s request’s mode. + return this[kState].mode + } + + // Returns the credentials mode associated with request, + // which is a string indicating whether credentials will be sent with the + // request always, never, or only when sent to a same-origin URL. + get credentials () { + // The credentials getter steps are to return this’s request’s credentials mode. + return this[kState].credentials + } + + // Returns the cache mode associated with request, + // which is a string indicating how the request will + // interact with the browser’s cache when fetching. + get cache () { + webidl.brandCheck(this, Request) + + // The cache getter steps are to return this’s request’s cache mode. + return this[kState].cache + } + + // Returns the redirect mode associated with request, + // which is a string indicating how redirects for the + // request will be handled during fetching. A request + // will follow redirects by default. + get redirect () { + webidl.brandCheck(this, Request) + + // The redirect getter steps are to return this’s request’s redirect mode. + return this[kState].redirect + } + + // Returns request’s subresource integrity metadata, which is a + // cryptographic hash of the resource being fetched. Its value + // consists of multiple hashes separated by whitespace. [SRI] + get integrity () { + webidl.brandCheck(this, Request) + + // The integrity getter steps are to return this’s request’s integrity + // metadata. + return this[kState].integrity + } + + // Returns a boolean indicating whether or not request can outlive the + // global in which it was created. + get keepalive () { + webidl.brandCheck(this, Request) + + // The keepalive getter steps are to return this’s request’s keepalive. + return this[kState].keepalive + } + + // Returns a boolean indicating whether or not request is for a reload + // navigation. + get isReloadNavigation () { + webidl.brandCheck(this, Request) + + // The isReloadNavigation getter steps are to return true if this’s + // request’s reload-navigation flag is set; otherwise false. + return this[kState].reloadNavigation + } + + // Returns a boolean indicating whether or not request is for a history + // navigation (a.k.a. back-foward navigation). + get isHistoryNavigation () { + webidl.brandCheck(this, Request) + + // The isHistoryNavigation getter steps are to return true if this’s request’s + // history-navigation flag is set; otherwise false. + return this[kState].historyNavigation + } + + // Returns the signal associated with request, which is an AbortSignal + // object indicating whether or not request has been aborted, and its + // abort event handler. + get signal () { + webidl.brandCheck(this, Request) + + // The signal getter steps are to return this’s signal. + return this[kSignal] + } + + get body () { + webidl.brandCheck(this, Request) + + return this[kState].body ? this[kState].body.stream : null + } + + get bodyUsed () { + webidl.brandCheck(this, Request) + + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) + } + + get duplex () { + webidl.brandCheck(this, Request) + + return 'half' + } + + // Returns a clone of request. + clone () { + webidl.brandCheck(this, Request) + + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || this.body?.locked) { + throw new TypeError('unusable') + } + + // 2. Let clonedRequest be the result of cloning this’s request. + const clonedRequest = cloneRequest(this[kState]) + + // 3. Let clonedRequestObject be the result of creating a Request object, + // given clonedRequest, this’s headers’s guard, and this’s relevant Realm. + const clonedRequestObject = new Request(kConstruct) + clonedRequestObject[kState] = clonedRequest + clonedRequestObject[kRealm] = this[kRealm] + clonedRequestObject[kHeaders] = new Headers(kConstruct) + clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList + clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] + + // 4. Make clonedRequestObject’s signal follow this’s signal. + const ac = new AbortController() + if (this.signal.aborted) { + ac.abort(this.signal.reason) + } else { + util.addAbortListener( + this.signal, + () => { + ac.abort(this.signal.reason) + } + ) + } + clonedRequestObject[kSignal] = ac.signal + + // 4. Return clonedRequestObject. + return clonedRequestObject + } +} + +mixinBody(Request) + +function makeRequest (init) { + // https://fetch.spec.whatwg.org/#requests + const request = { + method: 'GET', + localURLsOnly: false, + unsafeRequest: false, + body: null, + client: null, + reservedClient: null, + replacesClientId: '', + window: 'client', + keepalive: false, + serviceWorkers: 'all', + initiator: '', + destination: '', + priority: null, + origin: 'client', + policyContainer: 'client', + referrer: 'client', + referrerPolicy: '', + mode: 'no-cors', + useCORSPreflightFlag: false, + credentials: 'same-origin', + useCredentials: false, + cache: 'default', + redirect: 'follow', + integrity: '', + cryptoGraphicsNonceMetadata: '', + parserMetadata: '', + reloadNavigation: false, + historyNavigation: false, + userActivation: false, + taintedOrigin: false, + redirectCount: 0, + responseTainting: 'basic', + preventNoCacheCacheControlHeaderModification: false, + done: false, + timingAllowFailed: false, + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList() + } + request.url = request.urlList[0] + return request +} + +// https://fetch.spec.whatwg.org/#concept-request-clone +function cloneRequest (request) { + // To clone a request request, run these steps: + + // 1. Let newRequest be a copy of request, except for its body. + const newRequest = makeRequest({ ...request, body: null }) + + // 2. If request’s body is non-null, set newRequest’s body to the + // result of cloning request’s body. + if (request.body != null) { + newRequest.body = cloneBody(request.body) + } + + // 3. Return newRequest. + return newRequest +} + +Object.defineProperties(Request.prototype, { + method: kEnumerableProperty, + url: kEnumerableProperty, + headers: kEnumerableProperty, + redirect: kEnumerableProperty, + clone: kEnumerableProperty, + signal: kEnumerableProperty, + duplex: kEnumerableProperty, + destination: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + isHistoryNavigation: kEnumerableProperty, + isReloadNavigation: kEnumerableProperty, + keepalive: kEnumerableProperty, + integrity: kEnumerableProperty, + cache: kEnumerableProperty, + credentials: kEnumerableProperty, + attribute: kEnumerableProperty, + referrerPolicy: kEnumerableProperty, + referrer: kEnumerableProperty, + mode: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Request', + configurable: true + } +}) + +webidl.converters.Request = webidl.interfaceConverter( + Request +) + +// https://fetch.spec.whatwg.org/#requestinfo +webidl.converters.RequestInfo = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) + } + + if (V instanceof Request) { + return webidl.converters.Request(V) + } + + return webidl.converters.USVString(V) +} + +webidl.converters.AbortSignal = webidl.interfaceConverter( + AbortSignal +) + +// https://fetch.spec.whatwg.org/#requestinit +webidl.converters.RequestInit = webidl.dictionaryConverter([ + { + key: 'method', + converter: webidl.converters.ByteString + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + }, + { + key: 'body', + converter: webidl.nullableConverter( + webidl.converters.BodyInit + ) + }, + { + key: 'referrer', + converter: webidl.converters.USVString + }, + { + key: 'referrerPolicy', + converter: webidl.converters.DOMString, + // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy + allowedValues: referrerPolicy + }, + { + key: 'mode', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#concept-request-mode + allowedValues: requestMode + }, + { + key: 'credentials', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcredentials + allowedValues: requestCredentials + }, + { + key: 'cache', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcache + allowedValues: requestCache + }, + { + key: 'redirect', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestredirect + allowedValues: requestRedirect + }, + { + key: 'integrity', + converter: webidl.converters.DOMString + }, + { + key: 'keepalive', + converter: webidl.converters.boolean + }, + { + key: 'signal', + converter: webidl.nullableConverter( + (signal) => webidl.converters.AbortSignal( + signal, + { strict: false } + ) + ) + }, + { + key: 'window', + converter: webidl.converters.any + }, + { + key: 'duplex', + converter: webidl.converters.DOMString, + allowedValues: requestDuplex + } +]) + +module.exports = { Request, makeRequest } diff --git a/lib/fetch/response.js b/lib/fetch/response.js new file mode 100644 index 0000000..7338612 --- /dev/null +++ b/lib/fetch/response.js @@ -0,0 +1,571 @@ +'use strict' + +const { Headers, HeadersList, fill } = require('./headers') +const { extractBody, cloneBody, mixinBody } = require('./body') +const util = require('../core/util') +const { kEnumerableProperty } = util +const { + isValidReasonPhrase, + isCancelled, + isAborted, + isBlobLike, + serializeJavascriptValueToJSONString, + isErrorLike, + isomorphicEncode +} = require('./util') +const { + redirectStatusSet, + nullBodyStatus, + DOMException +} = require('./constants') +const { kState, kHeaders, kGuard, kRealm } = require('./symbols') +const { webidl } = require('./webidl') +const { FormData } = require('./formdata') +const { getGlobalOrigin } = require('./global') +const { URLSerializer } = require('./dataURL') +const { kHeadersList, kConstruct } = require('../core/symbols') +const assert = require('assert') +const { types } = require('util') + +const ReadableStream = globalThis.ReadableStream || require('stream/web').ReadableStream +const textEncoder = new TextEncoder('utf-8') + +// https://fetch.spec.whatwg.org/#response-class +class Response { + // Creates network error Response. + static error () { + // TODO + const relevantRealm = { settingsObject: {} } + + // The static error() method steps are to return the result of creating a + // Response object, given a new network error, "immutable", and this’s + // relevant Realm. + const responseObject = new Response() + responseObject[kState] = makeNetworkError() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + return responseObject + } + + // https://fetch.spec.whatwg.org/#dom-response-json + static json (data, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' }) + + if (init !== null) { + init = webidl.converters.ResponseInit(init) + } + + // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data. + const bytes = textEncoder.encode( + serializeJavascriptValueToJSONString(data) + ) + + // 2. Let body be the result of extracting bytes. + const body = extractBody(bytes) + + // 3. Let responseObject be the result of creating a Response object, given a new response, + // "response", and this’s relevant Realm. + const relevantRealm = { settingsObject: {} } + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'response' + responseObject[kHeaders][kRealm] = relevantRealm + + // 4. Perform initialize a response given responseObject, init, and (body, "application/json"). + initializeResponse(responseObject, init, { body: body[0], type: 'application/json' }) + + // 5. Return responseObject. + return responseObject + } + + // Creates a redirect Response that redirects to url with status status. + static redirect (url, status = 302) { + const relevantRealm = { settingsObject: {} } + + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' }) + + url = webidl.converters.USVString(url) + status = webidl.converters['unsigned short'](status) + + // 1. Let parsedURL be the result of parsing url with current settings + // object’s API base URL. + // 2. If parsedURL is failure, then throw a TypeError. + // TODO: base-URL? + let parsedURL + try { + parsedURL = new URL(url, getGlobalOrigin()) + } catch (err) { + throw Object.assign(new TypeError('Failed to parse URL from ' + url), { + cause: err + }) + } + + // 3. If status is not a redirect status, then throw a RangeError. + if (!redirectStatusSet.has(status)) { + throw new RangeError('Invalid status code ' + status) + } + + // 4. Let responseObject be the result of creating a Response object, + // given a new response, "immutable", and this’s relevant Realm. + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + + // 5. Set responseObject’s response’s status to status. + responseObject[kState].status = status + + // 6. Let value be parsedURL, serialized and isomorphic encoded. + const value = isomorphicEncode(URLSerializer(parsedURL)) + + // 7. Append `Location`/value to responseObject’s response’s header list. + responseObject[kState].headersList.append('location', value) + + // 8. Return responseObject. + return responseObject + } + + // https://fetch.spec.whatwg.org/#dom-response + constructor (body = null, init = {}) { + if (body !== null) { + body = webidl.converters.BodyInit(body) + } + + init = webidl.converters.ResponseInit(init) + + // TODO + this[kRealm] = { settingsObject: {} } + + // 1. Set this’s response to a new response. + this[kState] = makeResponse({}) + + // 2. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is this’s response’s header list and guard + // is "response". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kGuard] = 'response' + this[kHeaders][kHeadersList] = this[kState].headersList + this[kHeaders][kRealm] = this[kRealm] + + // 3. Let bodyWithType be null. + let bodyWithType = null + + // 4. If body is non-null, then set bodyWithType to the result of extracting body. + if (body != null) { + const [extractedBody, type] = extractBody(body) + bodyWithType = { body: extractedBody, type } + } + + // 5. Perform initialize a response given this, init, and bodyWithType. + initializeResponse(this, init, bodyWithType) + } + + // Returns response’s type, e.g., "cors". + get type () { + webidl.brandCheck(this, Response) + + // The type getter steps are to return this’s response’s type. + return this[kState].type + } + + // Returns response’s URL, if it has one; otherwise the empty string. + get url () { + webidl.brandCheck(this, Response) + + const urlList = this[kState].urlList + + // The url getter steps are to return the empty string if this’s + // response’s URL is null; otherwise this’s response’s URL, + // serialized with exclude fragment set to true. + const url = urlList[urlList.length - 1] ?? null + + if (url === null) { + return '' + } + + return URLSerializer(url, true) + } + + // Returns whether response was obtained through a redirect. + get redirected () { + webidl.brandCheck(this, Response) + + // The redirected getter steps are to return true if this’s response’s URL + // list has more than one item; otherwise false. + return this[kState].urlList.length > 1 + } + + // Returns response’s status. + get status () { + webidl.brandCheck(this, Response) + + // The status getter steps are to return this’s response’s status. + return this[kState].status + } + + // Returns whether response’s status is an ok status. + get ok () { + webidl.brandCheck(this, Response) + + // The ok getter steps are to return true if this’s response’s status is an + // ok status; otherwise false. + return this[kState].status >= 200 && this[kState].status <= 299 + } + + // Returns response’s status message. + get statusText () { + webidl.brandCheck(this, Response) + + // The statusText getter steps are to return this’s response’s status + // message. + return this[kState].statusText + } + + // Returns response’s headers as Headers. + get headers () { + webidl.brandCheck(this, Response) + + // The headers getter steps are to return this’s headers. + return this[kHeaders] + } + + get body () { + webidl.brandCheck(this, Response) + + return this[kState].body ? this[kState].body.stream : null + } + + get bodyUsed () { + webidl.brandCheck(this, Response) + + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) + } + + // Returns a clone of response. + clone () { + webidl.brandCheck(this, Response) + + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || (this.body && this.body.locked)) { + throw webidl.errors.exception({ + header: 'Response.clone', + message: 'Body has already been consumed.' + }) + } + + // 2. Let clonedResponse be the result of cloning this’s response. + const clonedResponse = cloneResponse(this[kState]) + + // 3. Return the result of creating a Response object, given + // clonedResponse, this’s headers’s guard, and this’s relevant Realm. + const clonedResponseObject = new Response() + clonedResponseObject[kState] = clonedResponse + clonedResponseObject[kRealm] = this[kRealm] + clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList + clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm] + + return clonedResponseObject + } +} + +mixinBody(Response) + +Object.defineProperties(Response.prototype, { + type: kEnumerableProperty, + url: kEnumerableProperty, + status: kEnumerableProperty, + ok: kEnumerableProperty, + redirected: kEnumerableProperty, + statusText: kEnumerableProperty, + headers: kEnumerableProperty, + clone: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Response', + configurable: true + } +}) + +Object.defineProperties(Response, { + json: kEnumerableProperty, + redirect: kEnumerableProperty, + error: kEnumerableProperty +}) + +// https://fetch.spec.whatwg.org/#concept-response-clone +function cloneResponse (response) { + // To clone a response response, run these steps: + + // 1. If response is a filtered response, then return a new identical + // filtered response whose internal response is a clone of response’s + // internal response. + if (response.internalResponse) { + return filterResponse( + cloneResponse(response.internalResponse), + response.type + ) + } + + // 2. Let newResponse be a copy of response, except for its body. + const newResponse = makeResponse({ ...response, body: null }) + + // 3. If response’s body is non-null, then set newResponse’s body to the + // result of cloning response’s body. + if (response.body != null) { + newResponse.body = cloneBody(response.body) + } + + // 4. Return newResponse. + return newResponse +} + +function makeResponse (init) { + return { + aborted: false, + rangeRequested: false, + timingAllowPassed: false, + requestIncludesCredentials: false, + type: 'default', + status: 200, + timingInfo: null, + cacheState: '', + statusText: '', + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList(), + urlList: init.urlList ? [...init.urlList] : [] + } +} + +function makeNetworkError (reason) { + const isError = isErrorLike(reason) + return makeResponse({ + type: 'error', + status: 0, + error: isError + ? reason + : new Error(reason ? String(reason) : reason), + aborted: reason && reason.name === 'AbortError' + }) +} + +function makeFilteredResponse (response, state) { + state = { + internalResponse: response, + ...state + } + + return new Proxy(response, { + get (target, p) { + return p in state ? state[p] : target[p] + }, + set (target, p, value) { + assert(!(p in state)) + target[p] = value + return true + } + }) +} + +// https://fetch.spec.whatwg.org/#concept-filtered-response +function filterResponse (response, type) { + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (type === 'basic') { + // A basic filtered response is a filtered response whose type is "basic" + // and header list excludes any headers in internal response’s header list + // whose name is a forbidden response-header name. + + // Note: undici does not implement forbidden response-header names + return makeFilteredResponse(response, { + type: 'basic', + headersList: response.headersList + }) + } else if (type === 'cors') { + // A CORS filtered response is a filtered response whose type is "cors" + // and header list excludes any headers in internal response’s header + // list whose name is not a CORS-safelisted response-header name, given + // internal response’s CORS-exposed header-name list. + + // Note: undici does not implement CORS-safelisted response-header names + return makeFilteredResponse(response, { + type: 'cors', + headersList: response.headersList + }) + } else if (type === 'opaque') { + // An opaque filtered response is a filtered response whose type is + // "opaque", URL list is the empty list, status is 0, status message + // is the empty byte sequence, header list is empty, and body is null. + + return makeFilteredResponse(response, { + type: 'opaque', + urlList: Object.freeze([]), + status: 0, + statusText: '', + body: null + }) + } else if (type === 'opaqueredirect') { + // An opaque-redirect filtered response is a filtered response whose type + // is "opaqueredirect", status is 0, status message is the empty byte + // sequence, header list is empty, and body is null. + + return makeFilteredResponse(response, { + type: 'opaqueredirect', + status: 0, + statusText: '', + headersList: [], + body: null + }) + } else { + assert(false) + } +} + +// https://fetch.spec.whatwg.org/#appropriate-network-error +function makeAppropriateNetworkError (fetchParams, err = null) { + // 1. Assert: fetchParams is canceled. + assert(isCancelled(fetchParams)) + + // 2. Return an aborted network error if fetchParams is aborted; + // otherwise return a network error. + return isAborted(fetchParams) + ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err })) + : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err })) +} + +// https://whatpr.org/fetch/1392.html#initialize-a-response +function initializeResponse (response, init, body) { + // 1. If init["status"] is not in the range 200 to 599, inclusive, then + // throw a RangeError. + if (init.status !== null && (init.status < 200 || init.status > 599)) { + throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.') + } + + // 2. If init["statusText"] does not match the reason-phrase token production, + // then throw a TypeError. + if ('statusText' in init && init.statusText != null) { + // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2: + // reason-phrase = *( HTAB / SP / VCHAR / obs-text ) + if (!isValidReasonPhrase(String(init.statusText))) { + throw new TypeError('Invalid statusText') + } + } + + // 3. Set response’s response’s status to init["status"]. + if ('status' in init && init.status != null) { + response[kState].status = init.status + } + + // 4. Set response’s response’s status message to init["statusText"]. + if ('statusText' in init && init.statusText != null) { + response[kState].statusText = init.statusText + } + + // 5. If init["headers"] exists, then fill response’s headers with init["headers"]. + if ('headers' in init && init.headers != null) { + fill(response[kHeaders], init.headers) + } + + // 6. If body was given, then: + if (body) { + // 1. If response's status is a null body status, then throw a TypeError. + if (nullBodyStatus.includes(response.status)) { + throw webidl.errors.exception({ + header: 'Response constructor', + message: 'Invalid response status code ' + response.status + }) + } + + // 2. Set response's body to body's body. + response[kState].body = body.body + + // 3. If body's type is non-null and response's header list does not contain + // `Content-Type`, then append (`Content-Type`, body's type) to response's header list. + if (body.type != null && !response[kState].headersList.contains('Content-Type')) { + response[kState].headersList.append('content-type', body.type) + } + } +} + +webidl.converters.ReadableStream = webidl.interfaceConverter( + ReadableStream +) + +webidl.converters.FormData = webidl.interfaceConverter( + FormData +) + +webidl.converters.URLSearchParams = webidl.interfaceConverter( + URLSearchParams +) + +// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit +webidl.converters.XMLHttpRequestBodyInit = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) + } + + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } + + if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { + return webidl.converters.BufferSource(V) + } + + if (util.isFormDataLike(V)) { + return webidl.converters.FormData(V, { strict: false }) + } + + if (V instanceof URLSearchParams) { + return webidl.converters.URLSearchParams(V) + } + + return webidl.converters.DOMString(V) +} + +// https://fetch.spec.whatwg.org/#bodyinit +webidl.converters.BodyInit = function (V) { + if (V instanceof ReadableStream) { + return webidl.converters.ReadableStream(V) + } + + // Note: the spec doesn't include async iterables, + // this is an undici extension. + if (V?.[Symbol.asyncIterator]) { + return V + } + + return webidl.converters.XMLHttpRequestBodyInit(V) +} + +webidl.converters.ResponseInit = webidl.dictionaryConverter([ + { + key: 'status', + converter: webidl.converters['unsigned short'], + defaultValue: 200 + }, + { + key: 'statusText', + converter: webidl.converters.ByteString, + defaultValue: '' + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + } +]) + +module.exports = { + makeNetworkError, + makeResponse, + makeAppropriateNetworkError, + filterResponse, + Response, + cloneResponse +} diff --git a/lib/fetch/symbols.js b/lib/fetch/symbols.js new file mode 100644 index 0000000..0b947d5 --- /dev/null +++ b/lib/fetch/symbols.js @@ -0,0 +1,10 @@ +'use strict' + +module.exports = { + kUrl: Symbol('url'), + kHeaders: Symbol('headers'), + kSignal: Symbol('signal'), + kState: Symbol('state'), + kGuard: Symbol('guard'), + kRealm: Symbol('realm') +} diff --git a/lib/fetch/util.js b/lib/fetch/util.js new file mode 100644 index 0000000..b12142c --- /dev/null +++ b/lib/fetch/util.js @@ -0,0 +1,1071 @@ +'use strict' + +const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = require('./constants') +const { getGlobalOrigin } = require('./global') +const { performance } = require('perf_hooks') +const { isBlobLike, toUSVString, ReadableStreamFrom } = require('../core/util') +const assert = require('assert') +const { isUint8Array } = require('util/types') + +// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable +/** @type {import('crypto')|undefined} */ +let crypto + +try { + crypto = require('crypto') +} catch { + +} + +function responseURL (response) { + // https://fetch.spec.whatwg.org/#responses + // A response has an associated URL. It is a pointer to the last URL + // in response’s URL list and null if response’s URL list is empty. + const urlList = response.urlList + const length = urlList.length + return length === 0 ? null : urlList[length - 1].toString() +} + +// https://fetch.spec.whatwg.org/#concept-response-location-url +function responseLocationURL (response, requestFragment) { + // 1. If response’s status is not a redirect status, then return null. + if (!redirectStatusSet.has(response.status)) { + return null + } + + // 2. Let location be the result of extracting header list values given + // `Location` and response’s header list. + let location = response.headersList.get('location') + + // 3. If location is a header value, then set location to the result of + // parsing location with response’s URL. + if (location !== null && isValidHeaderValue(location)) { + location = new URL(location, responseURL(response)) + } + + // 4. If location is a URL whose fragment is null, then set location’s + // fragment to requestFragment. + if (location && !location.hash) { + location.hash = requestFragment + } + + // 5. Return location. + return location +} + +/** @returns {URL} */ +function requestCurrentURL (request) { + return request.urlList[request.urlList.length - 1] +} + +function requestBadPort (request) { + // 1. Let url be request’s current URL. + const url = requestCurrentURL(request) + + // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port, + // then return blocked. + if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { + return 'blocked' + } + + // 3. Return allowed. + return 'allowed' +} + +function isErrorLike (object) { + return object instanceof Error || ( + object?.constructor?.name === 'Error' || + object?.constructor?.name === 'DOMException' + ) +} + +// Check whether |statusText| is a ByteString and +// matches the Reason-Phrase token production. +// RFC 2616: https://tools.ietf.org/html/rfc2616 +// RFC 7230: https://tools.ietf.org/html/rfc7230 +// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )" +// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116 +function isValidReasonPhrase (statusText) { + for (let i = 0; i < statusText.length; ++i) { + const c = statusText.charCodeAt(i) + if ( + !( + ( + c === 0x09 || // HTAB + (c >= 0x20 && c <= 0x7e) || // SP / VCHAR + (c >= 0x80 && c <= 0xff) + ) // obs-text + ) + ) { + return false + } + } + return true +} + +/** + * @see https://tools.ietf.org/html/rfc7230#section-3.2.6 + * @param {number} c + */ +function isTokenCharCode (c) { + switch (c) { + case 0x22: + case 0x28: + case 0x29: + case 0x2c: + case 0x2f: + case 0x3a: + case 0x3b: + case 0x3c: + case 0x3d: + case 0x3e: + case 0x3f: + case 0x40: + case 0x5b: + case 0x5c: + case 0x5d: + case 0x7b: + case 0x7d: + // DQUOTE and "(),/:;<=>?@[\]{}" + return false + default: + // VCHAR %x21-7E + return c >= 0x21 && c <= 0x7e + } +} + +/** + * @param {string} characters + */ +function isValidHTTPToken (characters) { + if (characters.length === 0) { + return false + } + for (let i = 0; i < characters.length; ++i) { + if (!isTokenCharCode(characters.charCodeAt(i))) { + return false + } + } + return true +} + +/** + * @see https://fetch.spec.whatwg.org/#header-name + * @param {string} potentialValue + */ +function isValidHeaderName (potentialValue) { + return isValidHTTPToken(potentialValue) +} + +/** + * @see https://fetch.spec.whatwg.org/#header-value + * @param {string} potentialValue + */ +function isValidHeaderValue (potentialValue) { + // - Has no leading or trailing HTTP tab or space bytes. + // - Contains no 0x00 (NUL) or HTTP newline bytes. + if ( + potentialValue.startsWith('\t') || + potentialValue.startsWith(' ') || + potentialValue.endsWith('\t') || + potentialValue.endsWith(' ') + ) { + return false + } + + if ( + potentialValue.includes('\0') || + potentialValue.includes('\r') || + potentialValue.includes('\n') + ) { + return false + } + + return true +} + +// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect +function setRequestReferrerPolicyOnRedirect (request, actualResponse) { + // Given a request request and a response actualResponse, this algorithm + // updates request’s referrer policy according to the Referrer-Policy + // header (if any) in actualResponse. + + // 1. Let policy be the result of executing § 8.1 Parse a referrer policy + // from a Referrer-Policy header on actualResponse. + + // 8.1 Parse a referrer policy from a Referrer-Policy header + // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list. + const { headersList } = actualResponse + // 2. Let policy be the empty string. + // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token. + // 4. Return policy. + const policyHeader = (headersList.get('referrer-policy') ?? '').split(',') + + // Note: As the referrer-policy can contain multiple policies + // separated by comma, we need to loop through all of them + // and pick the first valid one. + // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy + let policy = '' + if (policyHeader.length > 0) { + // The right-most policy takes precedence. + // The left-most policy is the fallback. + for (let i = policyHeader.length; i !== 0; i--) { + const token = policyHeader[i - 1].trim() + if (referrerPolicyTokens.has(token)) { + policy = token + break + } + } + } + + // 2. If policy is not the empty string, then set request’s referrer policy to policy. + if (policy !== '') { + request.referrerPolicy = policy + } +} + +// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check +function crossOriginResourcePolicyCheck () { + // TODO + return 'allowed' +} + +// https://fetch.spec.whatwg.org/#concept-cors-check +function corsCheck () { + // TODO + return 'success' +} + +// https://fetch.spec.whatwg.org/#concept-tao-check +function TAOCheck () { + // TODO + return 'success' +} + +function appendFetchMetadata (httpRequest) { + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header + + // 1. Assert: r’s url is a potentially trustworthy URL. + // TODO + + // 2. Let header be a Structured Header whose value is a token. + let header = null + + // 3. Set header’s value to r’s mode. + header = httpRequest.mode + + // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list. + httpRequest.headersList.set('sec-fetch-mode', header) + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header + // TODO +} + +// https://fetch.spec.whatwg.org/#append-a-request-origin-header +function appendRequestOriginHeader (request) { + // 1. Let serializedOrigin be the result of byte-serializing a request origin with request. + let serializedOrigin = request.origin + + // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list. + if (request.responseTainting === 'cors' || request.mode === 'websocket') { + if (serializedOrigin) { + request.headersList.append('origin', serializedOrigin) + } + + // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then: + } else if (request.method !== 'GET' && request.method !== 'HEAD') { + // 1. Switch on request’s referrer policy: + switch (request.referrerPolicy) { + case 'no-referrer': + // Set serializedOrigin to `null`. + serializedOrigin = null + break + case 'no-referrer-when-downgrade': + case 'strict-origin': + case 'strict-origin-when-cross-origin': + // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`. + if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) { + serializedOrigin = null + } + break + case 'same-origin': + // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`. + if (!sameOrigin(request, requestCurrentURL(request))) { + serializedOrigin = null + } + break + default: + // Do nothing. + } + + if (serializedOrigin) { + // 2. Append (`Origin`, serializedOrigin) to request’s header list. + request.headersList.append('origin', serializedOrigin) + } + } +} + +function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) { + // TODO + return performance.now() +} + +// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info +function createOpaqueTimingInfo (timingInfo) { + return { + startTime: timingInfo.startTime ?? 0, + redirectStartTime: 0, + redirectEndTime: 0, + postRedirectStartTime: timingInfo.startTime ?? 0, + finalServiceWorkerStartTime: 0, + finalNetworkResponseStartTime: 0, + finalNetworkRequestStartTime: 0, + endTime: 0, + encodedBodySize: 0, + decodedBodySize: 0, + finalConnectionTimingInfo: null + } +} + +// https://html.spec.whatwg.org/multipage/origin.html#policy-container +function makePolicyContainer () { + // Note: the fetch spec doesn't make use of embedder policy or CSP list + return { + referrerPolicy: 'strict-origin-when-cross-origin' + } +} + +// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container +function clonePolicyContainer (policyContainer) { + return { + referrerPolicy: policyContainer.referrerPolicy + } +} + +// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer +function determineRequestsReferrer (request) { + // 1. Let policy be request's referrer policy. + const policy = request.referrerPolicy + + // Note: policy cannot (shouldn't) be null or an empty string. + assert(policy) + + // 2. Let environment be request’s client. + + let referrerSource = null + + // 3. Switch on request’s referrer: + if (request.referrer === 'client') { + // Note: node isn't a browser and doesn't implement document/iframes, + // so we bypass this step and replace it with our own. + + const globalOrigin = getGlobalOrigin() + + if (!globalOrigin || globalOrigin.origin === 'null') { + return 'no-referrer' + } + + // note: we need to clone it as it's mutated + referrerSource = new URL(globalOrigin) + } else if (request.referrer instanceof URL) { + // Let referrerSource be request’s referrer. + referrerSource = request.referrer + } + + // 4. Let request’s referrerURL be the result of stripping referrerSource for + // use as a referrer. + let referrerURL = stripURLForReferrer(referrerSource) + + // 5. Let referrerOrigin be the result of stripping referrerSource for use as + // a referrer, with the origin-only flag set to true. + const referrerOrigin = stripURLForReferrer(referrerSource, true) + + // 6. If the result of serializing referrerURL is a string whose length is + // greater than 4096, set referrerURL to referrerOrigin. + if (referrerURL.toString().length > 4096) { + referrerURL = referrerOrigin + } + + const areSameOrigin = sameOrigin(request, referrerURL) + const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && + !isURLPotentiallyTrustworthy(request.url) + + // 8. Execute the switch statements corresponding to the value of policy: + switch (policy) { + case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true) + case 'unsafe-url': return referrerURL + case 'same-origin': + return areSameOrigin ? referrerOrigin : 'no-referrer' + case 'origin-when-cross-origin': + return areSameOrigin ? referrerURL : referrerOrigin + case 'strict-origin-when-cross-origin': { + const currentURL = requestCurrentURL(request) + + // 1. If the origin of referrerURL and the origin of request’s current + // URL are the same, then return referrerURL. + if (sameOrigin(referrerURL, currentURL)) { + return referrerURL + } + + // 2. If referrerURL is a potentially trustworthy URL and request’s + // current URL is not a potentially trustworthy URL, then return no + // referrer. + if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) { + return 'no-referrer' + } + + // 3. Return referrerOrigin. + return referrerOrigin + } + case 'strict-origin': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ + case 'no-referrer-when-downgrade': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ + + default: // eslint-disable-line + return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin + } +} + +/** + * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url + * @param {URL} url + * @param {boolean|undefined} originOnly + */ +function stripURLForReferrer (url, originOnly) { + // 1. Assert: url is a URL. + assert(url instanceof URL) + + // 2. If url’s scheme is a local scheme, then return no referrer. + if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') { + return 'no-referrer' + } + + // 3. Set url’s username to the empty string. + url.username = '' + + // 4. Set url’s password to the empty string. + url.password = '' + + // 5. Set url’s fragment to null. + url.hash = '' + + // 6. If the origin-only flag is true, then: + if (originOnly) { + // 1. Set url’s path to « the empty string ». + url.pathname = '' + + // 2. Set url’s query to null. + url.search = '' + } + + // 7. Return url. + return url +} + +function isURLPotentiallyTrustworthy (url) { + if (!(url instanceof URL)) { + return false + } + + // If child of about, return true + if (url.href === 'about:blank' || url.href === 'about:srcdoc') { + return true + } + + // If scheme is data, return true + if (url.protocol === 'data:') return true + + // If file, return true + if (url.protocol === 'file:') return true + + return isOriginPotentiallyTrustworthy(url.origin) + + function isOriginPotentiallyTrustworthy (origin) { + // If origin is explicitly null, return false + if (origin == null || origin === 'null') return false + + const originAsURL = new URL(origin) + + // If secure, return true + if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') { + return true + } + + // If localhost or variants, return true + if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) || + (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) || + (originAsURL.hostname.endsWith('.localhost'))) { + return true + } + + // If any other, return false + return false + } +} + +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist + * @param {Uint8Array} bytes + * @param {string} metadataList + */ +function bytesMatch (bytes, metadataList) { + // If node is not built with OpenSSL support, we cannot check + // a request's integrity, so allow it by default (the spec will + // allow requests if an invalid hash is given, as precedence). + /* istanbul ignore if: only if node is built with --without-ssl */ + if (crypto === undefined) { + return true + } + + // 1. Let parsedMetadata be the result of parsing metadataList. + const parsedMetadata = parseMetadata(metadataList) + + // 2. If parsedMetadata is no metadata, return true. + if (parsedMetadata === 'no metadata') { + return true + } + + // 3. If parsedMetadata is the empty set, return true. + if (parsedMetadata.length === 0) { + return true + } + + // 4. Let metadata be the result of getting the strongest + // metadata from parsedMetadata. + const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo)) + // get the strongest algorithm + const strongest = list[0].algo + // get all entries that use the strongest algorithm; ignore weaker + const metadata = list.filter((item) => item.algo === strongest) + + // 5. For each item in metadata: + for (const item of metadata) { + // 1. Let algorithm be the alg component of item. + const algorithm = item.algo + + // 2. Let expectedValue be the val component of item. + let expectedValue = item.hash + + // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e + // "be liberal with padding". This is annoying, and it's not even in the spec. + + if (expectedValue.endsWith('==')) { + expectedValue = expectedValue.slice(0, -2) + } + + // 3. Let actualValue be the result of applying algorithm to bytes. + let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') + + if (actualValue.endsWith('==')) { + actualValue = actualValue.slice(0, -2) + } + + // 4. If actualValue is a case-sensitive match for expectedValue, + // return true. + if (actualValue === expectedValue) { + return true + } + + let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url') + + if (actualBase64URL.endsWith('==')) { + actualBase64URL = actualBase64URL.slice(0, -2) + } + + if (actualBase64URL === expectedValue) { + return true + } + } + + // 6. Return false. + return false +} + +// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options +// https://www.w3.org/TR/CSP2/#source-list-syntax +// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 +const parseHashWithOptions = /((?sha256|sha384|sha512)-(?[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i + +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + * @param {string} metadata + */ +function parseMetadata (metadata) { + // 1. Let result be the empty set. + /** @type {{ algo: string, hash: string }[]} */ + const result = [] + + // 2. Let empty be equal to true. + let empty = true + + const supportedHashes = crypto.getHashes() + + // 3. For each token returned by splitting metadata on spaces: + for (const token of metadata.split(' ')) { + // 1. Set empty to false. + empty = false + + // 2. Parse token as a hash-with-options. + const parsedToken = parseHashWithOptions.exec(token) + + // 3. If token does not parse, continue to the next token. + if (parsedToken === null || parsedToken.groups === undefined) { + // Note: Chromium blocks the request at this point, but Firefox + // gives a warning that an invalid integrity was given. The + // correct behavior is to ignore these, and subsequently not + // check the integrity of the resource. + continue + } + + // 4. Let algorithm be the hash-algo component of token. + const algorithm = parsedToken.groups.algo + + // 5. If algorithm is a hash function recognized by the user + // agent, add the parsed token to result. + if (supportedHashes.includes(algorithm.toLowerCase())) { + result.push(parsedToken.groups) + } + } + + // 4. Return no metadata if empty is true, otherwise return result. + if (empty === true) { + return 'no metadata' + } + + return result +} + +// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request +function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { + // TODO +} + +/** + * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin} + * @param {URL} A + * @param {URL} B + */ +function sameOrigin (A, B) { + // 1. If A and B are the same opaque origin, then return true. + if (A.origin === B.origin && A.origin === 'null') { + return true + } + + // 2. If A and B are both tuple origins and their schemes, + // hosts, and port are identical, then return true. + if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) { + return true + } + + // 3. Return false. + return false +} + +function createDeferredPromise () { + let res + let rej + const promise = new Promise((resolve, reject) => { + res = resolve + rej = reject + }) + + return { promise, resolve: res, reject: rej } +} + +function isAborted (fetchParams) { + return fetchParams.controller.state === 'aborted' +} + +function isCancelled (fetchParams) { + return fetchParams.controller.state === 'aborted' || + fetchParams.controller.state === 'terminated' +} + +const normalizeMethodRecord = { + delete: 'DELETE', + DELETE: 'DELETE', + get: 'GET', + GET: 'GET', + head: 'HEAD', + HEAD: 'HEAD', + options: 'OPTIONS', + OPTIONS: 'OPTIONS', + post: 'POST', + POST: 'POST', + put: 'PUT', + PUT: 'PUT' +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(normalizeMethodRecord, null) + +/** + * @see https://fetch.spec.whatwg.org/#concept-method-normalize + * @param {string} method + */ +function normalizeMethod (method) { + return normalizeMethodRecord[method.toLowerCase()] ?? method +} + +// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string +function serializeJavascriptValueToJSONString (value) { + // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »). + const result = JSON.stringify(value) + + // 2. If result is undefined, then throw a TypeError. + if (result === undefined) { + throw new TypeError('Value is not JSON serializable') + } + + // 3. Assert: result is a string. + assert(typeof result === 'string') + + // 4. Return result. + return result +} + +// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object +const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())) + +/** + * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object + * @param {() => unknown[]} iterator + * @param {string} name name of the instance + * @param {'key'|'value'|'key+value'} kind + */ +function makeIterator (iterator, name, kind) { + const object = { + index: 0, + kind, + target: iterator + } + + const i = { + next () { + // 1. Let interface be the interface for which the iterator prototype object exists. + + // 2. Let thisValue be the this value. + + // 3. Let object be ? ToObject(thisValue). + + // 4. If object is a platform object, then perform a security + // check, passing: + + // 5. If object is not a default iterator object for interface, + // then throw a TypeError. + if (Object.getPrototypeOf(this) !== i) { + throw new TypeError( + `'next' called on an object that does not implement interface ${name} Iterator.` + ) + } + + // 6. Let index be object’s index. + // 7. Let kind be object’s kind. + // 8. Let values be object’s target's value pairs to iterate over. + const { index, kind, target } = object + const values = target() + + // 9. Let len be the length of values. + const len = values.length + + // 10. If index is greater than or equal to len, then return + // CreateIterResultObject(undefined, true). + if (index >= len) { + return { value: undefined, done: true } + } + + // 11. Let pair be the entry in values at index index. + const pair = values[index] + + // 12. Set object’s index to index + 1. + object.index = index + 1 + + // 13. Return the iterator result for pair and kind. + return iteratorResult(pair, kind) + }, + // The class string of an iterator prototype object for a given interface is the + // result of concatenating the identifier of the interface and the string " Iterator". + [Symbol.toStringTag]: `${name} Iterator` + } + + // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%. + Object.setPrototypeOf(i, esIteratorPrototype) + // esIteratorPrototype needs to be the prototype of i + // which is the prototype of an empty object. Yes, it's confusing. + return Object.setPrototypeOf({}, i) +} + +// https://webidl.spec.whatwg.org/#iterator-result +function iteratorResult (pair, kind) { + let result + + // 1. Let result be a value determined by the value of kind: + switch (kind) { + case 'key': { + // 1. Let idlKey be pair’s key. + // 2. Let key be the result of converting idlKey to an + // ECMAScript value. + // 3. result is key. + result = pair[0] + break + } + case 'value': { + // 1. Let idlValue be pair’s value. + // 2. Let value be the result of converting idlValue to + // an ECMAScript value. + // 3. result is value. + result = pair[1] + break + } + case 'key+value': { + // 1. Let idlKey be pair’s key. + // 2. Let idlValue be pair’s value. + // 3. Let key be the result of converting idlKey to an + // ECMAScript value. + // 4. Let value be the result of converting idlValue to + // an ECMAScript value. + // 5. Let array be ! ArrayCreate(2). + // 6. Call ! CreateDataProperty(array, "0", key). + // 7. Call ! CreateDataProperty(array, "1", value). + // 8. result is array. + result = pair + break + } + } + + // 2. Return CreateIterResultObject(result, false). + return { value: result, done: false } +} + +/** + * @see https://fetch.spec.whatwg.org/#body-fully-read + */ +async function fullyReadBody (body, processBody, processBodyError) { + // 1. If taskDestination is null, then set taskDestination to + // the result of starting a new parallel queue. + + // 2. Let successSteps given a byte sequence bytes be to queue a + // fetch task to run processBody given bytes, with taskDestination. + const successSteps = processBody + + // 3. Let errorSteps be to queue a fetch task to run processBodyError, + // with taskDestination. + const errorSteps = processBodyError + + // 4. Let reader be the result of getting a reader for body’s stream. + // If that threw an exception, then run errorSteps with that + // exception and return. + let reader + + try { + reader = body.stream.getReader() + } catch (e) { + errorSteps(e) + return + } + + // 5. Read all bytes from reader, given successSteps and errorSteps. + try { + const result = await readAllBytes(reader) + successSteps(result) + } catch (e) { + errorSteps(e) + } +} + +/** @type {ReadableStream} */ +let ReadableStream = globalThis.ReadableStream + +function isReadableStreamLike (stream) { + if (!ReadableStream) { + ReadableStream = require('stream/web').ReadableStream + } + + return stream instanceof ReadableStream || ( + stream[Symbol.toStringTag] === 'ReadableStream' && + typeof stream.tee === 'function' + ) +} + +const MAXIMUM_ARGUMENT_LENGTH = 65535 + +/** + * @see https://infra.spec.whatwg.org/#isomorphic-decode + * @param {number[]|Uint8Array} input + */ +function isomorphicDecode (input) { + // 1. To isomorphic decode a byte sequence input, return a string whose code point + // length is equal to input’s length and whose code points have the same values + // as the values of input’s bytes, in the same order. + + if (input.length < MAXIMUM_ARGUMENT_LENGTH) { + return String.fromCharCode(...input) + } + + return input.reduce((previous, current) => previous + String.fromCharCode(current), '') +} + +/** + * @param {ReadableStreamController} controller + */ +function readableStreamClose (controller) { + try { + controller.close() + } catch (err) { + // TODO: add comment explaining why this error occurs. + if (!err.message.includes('Controller is already closed')) { + throw err + } + } +} + +/** + * @see https://infra.spec.whatwg.org/#isomorphic-encode + * @param {string} input + */ +function isomorphicEncode (input) { + // 1. Assert: input contains no code points greater than U+00FF. + for (let i = 0; i < input.length; i++) { + assert(input.charCodeAt(i) <= 0xFF) + } + + // 2. Return a byte sequence whose length is equal to input’s code + // point length and whose bytes have the same values as the + // values of input’s code points, in the same order + return input +} + +/** + * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes + * @see https://streams.spec.whatwg.org/#read-loop + * @param {ReadableStreamDefaultReader} reader + */ +async function readAllBytes (reader) { + const bytes = [] + let byteLength = 0 + + while (true) { + const { done, value: chunk } = await reader.read() + + if (done) { + // 1. Call successSteps with bytes. + return Buffer.concat(bytes, byteLength) + } + + // 1. If chunk is not a Uint8Array object, call failureSteps + // with a TypeError and abort these steps. + if (!isUint8Array(chunk)) { + throw new TypeError('Received non-Uint8Array chunk') + } + + // 2. Append the bytes represented by chunk to bytes. + bytes.push(chunk) + byteLength += chunk.length + + // 3. Read-loop given reader, bytes, successSteps, and failureSteps. + } +} + +/** + * @see https://fetch.spec.whatwg.org/#is-local + * @param {URL} url + */ +function urlIsLocal (url) { + assert('protocol' in url) // ensure it's a url object + + const protocol = url.protocol + + return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:' +} + +/** + * @param {string|URL} url + */ +function urlHasHttpsScheme (url) { + if (typeof url === 'string') { + return url.startsWith('https:') + } + + return url.protocol === 'https:' +} + +/** + * @see https://fetch.spec.whatwg.org/#http-scheme + * @param {URL} url + */ +function urlIsHttpHttpsScheme (url) { + assert('protocol' in url) // ensure it's a url object + + const protocol = url.protocol + + return protocol === 'http:' || protocol === 'https:' +} + +/** + * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0. + */ +const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key)) + +module.exports = { + isAborted, + isCancelled, + createDeferredPromise, + ReadableStreamFrom, + toUSVString, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + coarsenedSharedCurrentTime, + determineRequestsReferrer, + makePolicyContainer, + clonePolicyContainer, + appendFetchMetadata, + appendRequestOriginHeader, + TAOCheck, + corsCheck, + crossOriginResourcePolicyCheck, + createOpaqueTimingInfo, + setRequestReferrerPolicyOnRedirect, + isValidHTTPToken, + requestBadPort, + requestCurrentURL, + responseURL, + responseLocationURL, + isBlobLike, + isURLPotentiallyTrustworthy, + isValidReasonPhrase, + sameOrigin, + normalizeMethod, + serializeJavascriptValueToJSONString, + makeIterator, + isValidHeaderName, + isValidHeaderValue, + hasOwn, + isErrorLike, + fullyReadBody, + bytesMatch, + isReadableStreamLike, + readableStreamClose, + isomorphicEncode, + isomorphicDecode, + urlIsLocal, + urlHasHttpsScheme, + urlIsHttpHttpsScheme, + readAllBytes, + normalizeMethodRecord +} diff --git a/lib/fetch/webidl.js b/lib/fetch/webidl.js new file mode 100644 index 0000000..6fcf2ab --- /dev/null +++ b/lib/fetch/webidl.js @@ -0,0 +1,646 @@ +'use strict' + +const { types } = require('util') +const { hasOwn, toUSVString } = require('./util') + +/** @type {import('../../types/webidl').Webidl} */ +const webidl = {} +webidl.converters = {} +webidl.util = {} +webidl.errors = {} + +webidl.errors.exception = function (message) { + return new TypeError(`${message.header}: ${message.message}`) +} + +webidl.errors.conversionFailed = function (context) { + const plural = context.types.length === 1 ? '' : ' one of' + const message = + `${context.argument} could not be converted to` + + `${plural}: ${context.types.join(', ')}.` + + return webidl.errors.exception({ + header: context.prefix, + message + }) +} + +webidl.errors.invalidArgument = function (context) { + return webidl.errors.exception({ + header: context.prefix, + message: `"${context.value}" is an invalid ${context.type}.` + }) +} + +// https://webidl.spec.whatwg.org/#implements +webidl.brandCheck = function (V, I, opts = undefined) { + if (opts?.strict !== false && !(V instanceof I)) { + throw new TypeError('Illegal invocation') + } else { + return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag] + } +} + +webidl.argumentLengthCheck = function ({ length }, min, ctx) { + if (length < min) { + throw webidl.errors.exception({ + message: `${min} argument${min !== 1 ? 's' : ''} required, ` + + `but${length ? ' only' : ''} ${length} found.`, + ...ctx + }) + } +} + +webidl.illegalConstructor = function () { + throw webidl.errors.exception({ + header: 'TypeError', + message: 'Illegal constructor' + }) +} + +// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values +webidl.util.Type = function (V) { + switch (typeof V) { + case 'undefined': return 'Undefined' + case 'boolean': return 'Boolean' + case 'string': return 'String' + case 'symbol': return 'Symbol' + case 'number': return 'Number' + case 'bigint': return 'BigInt' + case 'function': + case 'object': { + if (V === null) { + return 'Null' + } + + return 'Object' + } + } +} + +// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint +webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) { + let upperBound + let lowerBound + + // 1. If bitLength is 64, then: + if (bitLength === 64) { + // 1. Let upperBound be 2^53 − 1. + upperBound = Math.pow(2, 53) - 1 + + // 2. If signedness is "unsigned", then let lowerBound be 0. + if (signedness === 'unsigned') { + lowerBound = 0 + } else { + // 3. Otherwise let lowerBound be −2^53 + 1. + lowerBound = Math.pow(-2, 53) + 1 + } + } else if (signedness === 'unsigned') { + // 2. Otherwise, if signedness is "unsigned", then: + + // 1. Let lowerBound be 0. + lowerBound = 0 + + // 2. Let upperBound be 2^bitLength − 1. + upperBound = Math.pow(2, bitLength) - 1 + } else { + // 3. Otherwise: + + // 1. Let lowerBound be -2^bitLength − 1. + lowerBound = Math.pow(-2, bitLength) - 1 + + // 2. Let upperBound be 2^bitLength − 1 − 1. + upperBound = Math.pow(2, bitLength - 1) - 1 + } + + // 4. Let x be ? ToNumber(V). + let x = Number(V) + + // 5. If x is −0, then set x to +0. + if (x === 0) { + x = 0 + } + + // 6. If the conversion is to an IDL type associated + // with the [EnforceRange] extended attribute, then: + if (opts.enforceRange === true) { + // 1. If x is NaN, +∞, or −∞, then throw a TypeError. + if ( + Number.isNaN(x) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Could not convert ${V} to an integer.` + }) + } + + // 2. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) + + // 3. If x < lowerBound or x > upperBound, then + // throw a TypeError. + if (x < lowerBound || x > upperBound) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.` + }) + } + + // 4. Return x. + return x + } + + // 7. If x is not NaN and the conversion is to an IDL + // type associated with the [Clamp] extended + // attribute, then: + if (!Number.isNaN(x) && opts.clamp === true) { + // 1. Set x to min(max(x, lowerBound), upperBound). + x = Math.min(Math.max(x, lowerBound), upperBound) + + // 2. Round x to the nearest integer, choosing the + // even integer if it lies halfway between two, + // and choosing +0 rather than −0. + if (Math.floor(x) % 2 === 0) { + x = Math.floor(x) + } else { + x = Math.ceil(x) + } + + // 3. Return x. + return x + } + + // 8. If x is NaN, +0, +∞, or −∞, then return +0. + if ( + Number.isNaN(x) || + (x === 0 && Object.is(0, x)) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + return 0 + } + + // 9. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) + + // 10. Set x to x modulo 2^bitLength. + x = x % Math.pow(2, bitLength) + + // 11. If signedness is "signed" and x ≥ 2^bitLength − 1, + // then return x − 2^bitLength. + if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) { + return x - Math.pow(2, bitLength) + } + + // 12. Otherwise, return x. + return x +} + +// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart +webidl.util.IntegerPart = function (n) { + // 1. Let r be floor(abs(n)). + const r = Math.floor(Math.abs(n)) + + // 2. If n < 0, then return -1 × r. + if (n < 0) { + return -1 * r + } + + // 3. Otherwise, return r. + return r +} + +// https://webidl.spec.whatwg.org/#es-sequence +webidl.sequenceConverter = function (converter) { + return (V) => { + // 1. If Type(V) is not Object, throw a TypeError. + if (webidl.util.Type(V) !== 'Object') { + throw webidl.errors.exception({ + header: 'Sequence', + message: `Value of type ${webidl.util.Type(V)} is not an Object.` + }) + } + + // 2. Let method be ? GetMethod(V, @@iterator). + /** @type {Generator} */ + const method = V?.[Symbol.iterator]?.() + const seq = [] + + // 3. If method is undefined, throw a TypeError. + if ( + method === undefined || + typeof method.next !== 'function' + ) { + throw webidl.errors.exception({ + header: 'Sequence', + message: 'Object is not an iterator.' + }) + } + + // https://webidl.spec.whatwg.org/#create-sequence-from-iterable + while (true) { + const { done, value } = method.next() + + if (done) { + break + } + + seq.push(converter(value)) + } + + return seq + } +} + +// https://webidl.spec.whatwg.org/#es-to-record +webidl.recordConverter = function (keyConverter, valueConverter) { + return (O) => { + // 1. If Type(O) is not Object, throw a TypeError. + if (webidl.util.Type(O) !== 'Object') { + throw webidl.errors.exception({ + header: 'Record', + message: `Value of type ${webidl.util.Type(O)} is not an Object.` + }) + } + + // 2. Let result be a new empty instance of record. + const result = {} + + if (!types.isProxy(O)) { + // Object.keys only returns enumerable properties + const keys = Object.keys(O) + + for (const key of keys) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) + + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) + + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue + } + + // 5. Return result. + return result + } + + // 3. Let keys be ? O.[[OwnPropertyKeys]](). + const keys = Reflect.ownKeys(O) + + // 4. For each key of keys. + for (const key of keys) { + // 1. Let desc be ? O.[[GetOwnProperty]](key). + const desc = Reflect.getOwnPropertyDescriptor(O, key) + + // 2. If desc is not undefined and desc.[[Enumerable]] is true: + if (desc?.enumerable) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) + + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) + + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue + } + } + + // 5. Return result. + return result + } +} + +webidl.interfaceConverter = function (i) { + return (V, opts = {}) => { + if (opts.strict !== false && !(V instanceof i)) { + throw webidl.errors.exception({ + header: i.name, + message: `Expected ${V} to be an instance of ${i.name}.` + }) + } + + return V + } +} + +webidl.dictionaryConverter = function (converters) { + return (dictionary) => { + const type = webidl.util.Type(dictionary) + const dict = {} + + if (type === 'Null' || type === 'Undefined') { + return dict + } else if (type !== 'Object') { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Expected ${dictionary} to be one of: Null, Undefined, Object.` + }) + } + + for (const options of converters) { + const { key, defaultValue, required, converter } = options + + if (required === true) { + if (!hasOwn(dictionary, key)) { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Missing required key "${key}".` + }) + } + } + + let value = dictionary[key] + const hasDefault = hasOwn(options, 'defaultValue') + + // Only use defaultValue if value is undefined and + // a defaultValue options was provided. + if (hasDefault && value !== null) { + value = value ?? defaultValue + } + + // A key can be optional and have no default value. + // When this happens, do not perform a conversion, + // and do not assign the key a value. + if (required || hasDefault || value !== undefined) { + value = converter(value) + + if ( + options.allowedValues && + !options.allowedValues.includes(value) + ) { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.` + }) + } + + dict[key] = value + } + } + + return dict + } +} + +webidl.nullableConverter = function (converter) { + return (V) => { + if (V === null) { + return V + } + + return converter(V) + } +} + +// https://webidl.spec.whatwg.org/#es-DOMString +webidl.converters.DOMString = function (V, opts = {}) { + // 1. If V is null and the conversion is to an IDL type + // associated with the [LegacyNullToEmptyString] + // extended attribute, then return the DOMString value + // that represents the empty string. + if (V === null && opts.legacyNullToEmptyString) { + return '' + } + + // 2. Let x be ? ToString(V). + if (typeof V === 'symbol') { + throw new TypeError('Could not convert argument of type symbol to string.') + } + + // 3. Return the IDL DOMString value that represents the + // same sequence of code units as the one the + // ECMAScript String value x represents. + return String(V) +} + +// https://webidl.spec.whatwg.org/#es-ByteString +webidl.converters.ByteString = function (V) { + // 1. Let x be ? ToString(V). + // Note: DOMString converter perform ? ToString(V) + const x = webidl.converters.DOMString(V) + + // 2. If the value of any element of x is greater than + // 255, then throw a TypeError. + for (let index = 0; index < x.length; index++) { + if (x.charCodeAt(index) > 255) { + throw new TypeError( + 'Cannot convert argument to a ByteString because the character at ' + + `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.` + ) + } + } + + // 3. Return an IDL ByteString value whose length is the + // length of x, and where the value of each element is + // the value of the corresponding element of x. + return x +} + +// https://webidl.spec.whatwg.org/#es-USVString +webidl.converters.USVString = toUSVString + +// https://webidl.spec.whatwg.org/#es-boolean +webidl.converters.boolean = function (V) { + // 1. Let x be the result of computing ToBoolean(V). + const x = Boolean(V) + + // 2. Return the IDL boolean value that is the one that represents + // the same truth value as the ECMAScript Boolean value x. + return x +} + +// https://webidl.spec.whatwg.org/#es-any +webidl.converters.any = function (V) { + return V +} + +// https://webidl.spec.whatwg.org/#es-long-long +webidl.converters['long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "signed"). + const x = webidl.util.ConvertToInt(V, 64, 'signed') + + // 2. Return the IDL long long value that represents + // the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-long-long +webidl.converters['unsigned long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "unsigned"). + const x = webidl.util.ConvertToInt(V, 64, 'unsigned') + + // 2. Return the IDL unsigned long long value that + // represents the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-long +webidl.converters['unsigned long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 32, "unsigned"). + const x = webidl.util.ConvertToInt(V, 32, 'unsigned') + + // 2. Return the IDL unsigned long value that + // represents the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-short +webidl.converters['unsigned short'] = function (V, opts) { + // 1. Let x be ? ConvertToInt(V, 16, "unsigned"). + const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts) + + // 2. Return the IDL unsigned short value that represents + // the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#idl-ArrayBuffer +webidl.converters.ArrayBuffer = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have an + // [[ArrayBufferData]] internal slot, then throw a + // TypeError. + // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances + // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances + if ( + webidl.util.Type(V) !== 'Object' || + !types.isAnyArrayBuffer(V) + ) { + throw webidl.errors.conversionFailed({ + prefix: `${V}`, + argument: `${V}`, + types: ['ArrayBuffer'] + }) + } + + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V) is true, then throw a + // TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V) is true, then throw a + // TypeError. + // Note: resizable ArrayBuffers are currently a proposal. + + // 4. Return the IDL ArrayBuffer value that is a + // reference to the same object as V. + return V +} + +webidl.converters.TypedArray = function (V, T, opts = {}) { + // 1. Let T be the IDL type V is being converted to. + + // 2. If Type(V) is not Object, or V does not have a + // [[TypedArrayName]] internal slot with a value + // equal to T’s name, then throw a TypeError. + if ( + webidl.util.Type(V) !== 'Object' || + !types.isTypedArray(V) || + V.constructor.name !== T.name + ) { + throw webidl.errors.conversionFailed({ + prefix: `${T.name}`, + argument: `${V}`, + types: [T.name] + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 4. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable array buffers are currently a proposal + + // 5. Return the IDL value of type T that is a reference + // to the same object as V. + return V +} + +webidl.converters.DataView = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have a + // [[DataView]] internal slot, then throw a TypeError. + if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) { + throw webidl.errors.exception({ + header: 'DataView', + message: 'Object is not a DataView.' + }) + } + + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true, + // then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable ArrayBuffers are currently a proposal + + // 4. Return the IDL DataView value that is a reference + // to the same object as V. + return V +} + +// https://webidl.spec.whatwg.org/#BufferSource +webidl.converters.BufferSource = function (V, opts = {}) { + if (types.isAnyArrayBuffer(V)) { + return webidl.converters.ArrayBuffer(V, opts) + } + + if (types.isTypedArray(V)) { + return webidl.converters.TypedArray(V, V.constructor) + } + + if (types.isDataView(V)) { + return webidl.converters.DataView(V, opts) + } + + throw new TypeError(`Could not convert ${V} to a BufferSource.`) +} + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.ByteString +) + +webidl.converters['sequence>'] = webidl.sequenceConverter( + webidl.converters['sequence'] +) + +webidl.converters['record'] = webidl.recordConverter( + webidl.converters.ByteString, + webidl.converters.ByteString +) + +module.exports = { + webidl +} diff --git a/lib/fileapi/encoding.js b/lib/fileapi/encoding.js new file mode 100644 index 0000000..1d1d2b6 --- /dev/null +++ b/lib/fileapi/encoding.js @@ -0,0 +1,290 @@ +'use strict' + +/** + * @see https://encoding.spec.whatwg.org/#concept-encoding-get + * @param {string|undefined} label + */ +function getEncoding (label) { + if (!label) { + return 'failure' + } + + // 1. Remove any leading and trailing ASCII whitespace from label. + // 2. If label is an ASCII case-insensitive match for any of the + // labels listed in the table below, then return the + // corresponding encoding; otherwise return failure. + switch (label.trim().toLowerCase()) { + case 'unicode-1-1-utf-8': + case 'unicode11utf8': + case 'unicode20utf8': + case 'utf-8': + case 'utf8': + case 'x-unicode20utf8': + return 'UTF-8' + case '866': + case 'cp866': + case 'csibm866': + case 'ibm866': + return 'IBM866' + case 'csisolatin2': + case 'iso-8859-2': + case 'iso-ir-101': + case 'iso8859-2': + case 'iso88592': + case 'iso_8859-2': + case 'iso_8859-2:1987': + case 'l2': + case 'latin2': + return 'ISO-8859-2' + case 'csisolatin3': + case 'iso-8859-3': + case 'iso-ir-109': + case 'iso8859-3': + case 'iso88593': + case 'iso_8859-3': + case 'iso_8859-3:1988': + case 'l3': + case 'latin3': + return 'ISO-8859-3' + case 'csisolatin4': + case 'iso-8859-4': + case 'iso-ir-110': + case 'iso8859-4': + case 'iso88594': + case 'iso_8859-4': + case 'iso_8859-4:1988': + case 'l4': + case 'latin4': + return 'ISO-8859-4' + case 'csisolatincyrillic': + case 'cyrillic': + case 'iso-8859-5': + case 'iso-ir-144': + case 'iso8859-5': + case 'iso88595': + case 'iso_8859-5': + case 'iso_8859-5:1988': + return 'ISO-8859-5' + case 'arabic': + case 'asmo-708': + case 'csiso88596e': + case 'csiso88596i': + case 'csisolatinarabic': + case 'ecma-114': + case 'iso-8859-6': + case 'iso-8859-6-e': + case 'iso-8859-6-i': + case 'iso-ir-127': + case 'iso8859-6': + case 'iso88596': + case 'iso_8859-6': + case 'iso_8859-6:1987': + return 'ISO-8859-6' + case 'csisolatingreek': + case 'ecma-118': + case 'elot_928': + case 'greek': + case 'greek8': + case 'iso-8859-7': + case 'iso-ir-126': + case 'iso8859-7': + case 'iso88597': + case 'iso_8859-7': + case 'iso_8859-7:1987': + case 'sun_eu_greek': + return 'ISO-8859-7' + case 'csiso88598e': + case 'csisolatinhebrew': + case 'hebrew': + case 'iso-8859-8': + case 'iso-8859-8-e': + case 'iso-ir-138': + case 'iso8859-8': + case 'iso88598': + case 'iso_8859-8': + case 'iso_8859-8:1988': + case 'visual': + return 'ISO-8859-8' + case 'csiso88598i': + case 'iso-8859-8-i': + case 'logical': + return 'ISO-8859-8-I' + case 'csisolatin6': + case 'iso-8859-10': + case 'iso-ir-157': + case 'iso8859-10': + case 'iso885910': + case 'l6': + case 'latin6': + return 'ISO-8859-10' + case 'iso-8859-13': + case 'iso8859-13': + case 'iso885913': + return 'ISO-8859-13' + case 'iso-8859-14': + case 'iso8859-14': + case 'iso885914': + return 'ISO-8859-14' + case 'csisolatin9': + case 'iso-8859-15': + case 'iso8859-15': + case 'iso885915': + case 'iso_8859-15': + case 'l9': + return 'ISO-8859-15' + case 'iso-8859-16': + return 'ISO-8859-16' + case 'cskoi8r': + case 'koi': + case 'koi8': + case 'koi8-r': + case 'koi8_r': + return 'KOI8-R' + case 'koi8-ru': + case 'koi8-u': + return 'KOI8-U' + case 'csmacintosh': + case 'mac': + case 'macintosh': + case 'x-mac-roman': + return 'macintosh' + case 'iso-8859-11': + case 'iso8859-11': + case 'iso885911': + case 'tis-620': + case 'windows-874': + return 'windows-874' + case 'cp1250': + case 'windows-1250': + case 'x-cp1250': + return 'windows-1250' + case 'cp1251': + case 'windows-1251': + case 'x-cp1251': + return 'windows-1251' + case 'ansi_x3.4-1968': + case 'ascii': + case 'cp1252': + case 'cp819': + case 'csisolatin1': + case 'ibm819': + case 'iso-8859-1': + case 'iso-ir-100': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'iso_8859-1:1987': + case 'l1': + case 'latin1': + case 'us-ascii': + case 'windows-1252': + case 'x-cp1252': + return 'windows-1252' + case 'cp1253': + case 'windows-1253': + case 'x-cp1253': + return 'windows-1253' + case 'cp1254': + case 'csisolatin5': + case 'iso-8859-9': + case 'iso-ir-148': + case 'iso8859-9': + case 'iso88599': + case 'iso_8859-9': + case 'iso_8859-9:1989': + case 'l5': + case 'latin5': + case 'windows-1254': + case 'x-cp1254': + return 'windows-1254' + case 'cp1255': + case 'windows-1255': + case 'x-cp1255': + return 'windows-1255' + case 'cp1256': + case 'windows-1256': + case 'x-cp1256': + return 'windows-1256' + case 'cp1257': + case 'windows-1257': + case 'x-cp1257': + return 'windows-1257' + case 'cp1258': + case 'windows-1258': + case 'x-cp1258': + return 'windows-1258' + case 'x-mac-cyrillic': + case 'x-mac-ukrainian': + return 'x-mac-cyrillic' + case 'chinese': + case 'csgb2312': + case 'csiso58gb231280': + case 'gb2312': + case 'gb_2312': + case 'gb_2312-80': + case 'gbk': + case 'iso-ir-58': + case 'x-gbk': + return 'GBK' + case 'gb18030': + return 'gb18030' + case 'big5': + case 'big5-hkscs': + case 'cn-big5': + case 'csbig5': + case 'x-x-big5': + return 'Big5' + case 'cseucpkdfmtjapanese': + case 'euc-jp': + case 'x-euc-jp': + return 'EUC-JP' + case 'csiso2022jp': + case 'iso-2022-jp': + return 'ISO-2022-JP' + case 'csshiftjis': + case 'ms932': + case 'ms_kanji': + case 'shift-jis': + case 'shift_jis': + case 'sjis': + case 'windows-31j': + case 'x-sjis': + return 'Shift_JIS' + case 'cseuckr': + case 'csksc56011987': + case 'euc-kr': + case 'iso-ir-149': + case 'korean': + case 'ks_c_5601-1987': + case 'ks_c_5601-1989': + case 'ksc5601': + case 'ksc_5601': + case 'windows-949': + return 'EUC-KR' + case 'csiso2022kr': + case 'hz-gb-2312': + case 'iso-2022-cn': + case 'iso-2022-cn-ext': + case 'iso-2022-kr': + case 'replacement': + return 'replacement' + case 'unicodefffe': + case 'utf-16be': + return 'UTF-16BE' + case 'csunicode': + case 'iso-10646-ucs-2': + case 'ucs-2': + case 'unicode': + case 'unicodefeff': + case 'utf-16': + case 'utf-16le': + return 'UTF-16LE' + case 'x-user-defined': + return 'x-user-defined' + default: return 'failure' + } +} + +module.exports = { + getEncoding +} diff --git a/lib/fileapi/filereader.js b/lib/fileapi/filereader.js new file mode 100644 index 0000000..cd36a22 --- /dev/null +++ b/lib/fileapi/filereader.js @@ -0,0 +1,344 @@ +'use strict' + +const { + staticPropertyDescriptors, + readOperation, + fireAProgressEvent +} = require('./util') +const { + kState, + kError, + kResult, + kEvents, + kAborted +} = require('./symbols') +const { webidl } = require('../fetch/webidl') +const { kEnumerableProperty } = require('../core/util') + +class FileReader extends EventTarget { + constructor () { + super() + + this[kState] = 'empty' + this[kResult] = null + this[kError] = null + this[kEvents] = { + loadend: null, + error: null, + abort: null, + load: null, + progress: null, + loadstart: null + } + } + + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer + * @param {import('buffer').Blob} blob + */ + readAsArrayBuffer (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsArrayBuffer(blob) method, when invoked, + // must initiate a read operation for blob with ArrayBuffer. + readOperation(this, blob, 'ArrayBuffer') + } + + /** + * @see https://w3c.github.io/FileAPI/#readAsBinaryString + * @param {import('buffer').Blob} blob + */ + readAsBinaryString (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsBinaryString(blob) method, when invoked, + // must initiate a read operation for blob with BinaryString. + readOperation(this, blob, 'BinaryString') + } + + /** + * @see https://w3c.github.io/FileAPI/#readAsDataText + * @param {import('buffer').Blob} blob + * @param {string?} encoding + */ + readAsText (blob, encoding = undefined) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + if (encoding !== undefined) { + encoding = webidl.converters.DOMString(encoding) + } + + // The readAsText(blob, encoding) method, when invoked, + // must initiate a read operation for blob with Text and encoding. + readOperation(this, blob, 'Text', encoding) + } + + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL + * @param {import('buffer').Blob} blob + */ + readAsDataURL (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsDataURL(blob) method, when invoked, must + // initiate a read operation for blob with DataURL. + readOperation(this, blob, 'DataURL') + } + + /** + * @see https://w3c.github.io/FileAPI/#dfn-abort + */ + abort () { + // 1. If this's state is "empty" or if this's state is + // "done" set this's result to null and terminate + // this algorithm. + if (this[kState] === 'empty' || this[kState] === 'done') { + this[kResult] = null + return + } + + // 2. If this's state is "loading" set this's state to + // "done" and set this's result to null. + if (this[kState] === 'loading') { + this[kState] = 'done' + this[kResult] = null + } + + // 3. If there are any tasks from this on the file reading + // task source in an affiliated task queue, then remove + // those tasks from that task queue. + this[kAborted] = true + + // 4. Terminate the algorithm for the read method being processed. + // TODO + + // 5. Fire a progress event called abort at this. + fireAProgressEvent('abort', this) + + // 6. If this's state is not "loading", fire a progress + // event called loadend at this. + if (this[kState] !== 'loading') { + fireAProgressEvent('loadend', this) + } + } + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate + */ + get readyState () { + webidl.brandCheck(this, FileReader) + + switch (this[kState]) { + case 'empty': return this.EMPTY + case 'loading': return this.LOADING + case 'done': return this.DONE + } + } + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-result + */ + get result () { + webidl.brandCheck(this, FileReader) + + // The result attribute’s getter, when invoked, must return + // this's result. + return this[kResult] + } + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-error + */ + get error () { + webidl.brandCheck(this, FileReader) + + // The error attribute’s getter, when invoked, must return + // this's error. + return this[kError] + } + + get onloadend () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].loadend + } + + set onloadend (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].loadend) { + this.removeEventListener('loadend', this[kEvents].loadend) + } + + if (typeof fn === 'function') { + this[kEvents].loadend = fn + this.addEventListener('loadend', fn) + } else { + this[kEvents].loadend = null + } + } + + get onerror () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].error + } + + set onerror (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].error) { + this.removeEventListener('error', this[kEvents].error) + } + + if (typeof fn === 'function') { + this[kEvents].error = fn + this.addEventListener('error', fn) + } else { + this[kEvents].error = null + } + } + + get onloadstart () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].loadstart + } + + set onloadstart (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].loadstart) { + this.removeEventListener('loadstart', this[kEvents].loadstart) + } + + if (typeof fn === 'function') { + this[kEvents].loadstart = fn + this.addEventListener('loadstart', fn) + } else { + this[kEvents].loadstart = null + } + } + + get onprogress () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].progress + } + + set onprogress (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].progress) { + this.removeEventListener('progress', this[kEvents].progress) + } + + if (typeof fn === 'function') { + this[kEvents].progress = fn + this.addEventListener('progress', fn) + } else { + this[kEvents].progress = null + } + } + + get onload () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].load + } + + set onload (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].load) { + this.removeEventListener('load', this[kEvents].load) + } + + if (typeof fn === 'function') { + this[kEvents].load = fn + this.addEventListener('load', fn) + } else { + this[kEvents].load = null + } + } + + get onabort () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].abort + } + + set onabort (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].abort) { + this.removeEventListener('abort', this[kEvents].abort) + } + + if (typeof fn === 'function') { + this[kEvents].abort = fn + this.addEventListener('abort', fn) + } else { + this[kEvents].abort = null + } + } +} + +// https://w3c.github.io/FileAPI/#dom-filereader-empty +FileReader.EMPTY = FileReader.prototype.EMPTY = 0 +// https://w3c.github.io/FileAPI/#dom-filereader-loading +FileReader.LOADING = FileReader.prototype.LOADING = 1 +// https://w3c.github.io/FileAPI/#dom-filereader-done +FileReader.DONE = FileReader.prototype.DONE = 2 + +Object.defineProperties(FileReader.prototype, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors, + readAsArrayBuffer: kEnumerableProperty, + readAsBinaryString: kEnumerableProperty, + readAsText: kEnumerableProperty, + readAsDataURL: kEnumerableProperty, + abort: kEnumerableProperty, + readyState: kEnumerableProperty, + result: kEnumerableProperty, + error: kEnumerableProperty, + onloadstart: kEnumerableProperty, + onprogress: kEnumerableProperty, + onload: kEnumerableProperty, + onabort: kEnumerableProperty, + onerror: kEnumerableProperty, + onloadend: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'FileReader', + writable: false, + enumerable: false, + configurable: true + } +}) + +Object.defineProperties(FileReader, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors +}) + +module.exports = { + FileReader +} diff --git a/lib/fileapi/progressevent.js b/lib/fileapi/progressevent.js new file mode 100644 index 0000000..778cf22 --- /dev/null +++ b/lib/fileapi/progressevent.js @@ -0,0 +1,78 @@ +'use strict' + +const { webidl } = require('../fetch/webidl') + +const kState = Symbol('ProgressEvent state') + +/** + * @see https://xhr.spec.whatwg.org/#progressevent + */ +class ProgressEvent extends Event { + constructor (type, eventInitDict = {}) { + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {}) + + super(type, eventInitDict) + + this[kState] = { + lengthComputable: eventInitDict.lengthComputable, + loaded: eventInitDict.loaded, + total: eventInitDict.total + } + } + + get lengthComputable () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].lengthComputable + } + + get loaded () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].loaded + } + + get total () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].total + } +} + +webidl.converters.ProgressEventInit = webidl.dictionaryConverter([ + { + key: 'lengthComputable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'loaded', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'total', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false + } +]) + +module.exports = { + ProgressEvent +} diff --git a/lib/fileapi/symbols.js b/lib/fileapi/symbols.js new file mode 100644 index 0000000..dd11746 --- /dev/null +++ b/lib/fileapi/symbols.js @@ -0,0 +1,10 @@ +'use strict' + +module.exports = { + kState: Symbol('FileReader state'), + kResult: Symbol('FileReader result'), + kError: Symbol('FileReader error'), + kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'), + kEvents: Symbol('FileReader events'), + kAborted: Symbol('FileReader aborted') +} diff --git a/lib/fileapi/util.js b/lib/fileapi/util.js new file mode 100644 index 0000000..1d10899 --- /dev/null +++ b/lib/fileapi/util.js @@ -0,0 +1,392 @@ +'use strict' + +const { + kState, + kError, + kResult, + kAborted, + kLastProgressEventFired +} = require('./symbols') +const { ProgressEvent } = require('./progressevent') +const { getEncoding } = require('./encoding') +const { DOMException } = require('../fetch/constants') +const { serializeAMimeType, parseMIMEType } = require('../fetch/dataURL') +const { types } = require('util') +const { StringDecoder } = require('string_decoder') +const { btoa } = require('buffer') + +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} + +/** + * @see https://w3c.github.io/FileAPI/#readOperation + * @param {import('./filereader').FileReader} fr + * @param {import('buffer').Blob} blob + * @param {string} type + * @param {string?} encodingName + */ +function readOperation (fr, blob, type, encodingName) { + // 1. If fr’s state is "loading", throw an InvalidStateError + // DOMException. + if (fr[kState] === 'loading') { + throw new DOMException('Invalid state', 'InvalidStateError') + } + + // 2. Set fr’s state to "loading". + fr[kState] = 'loading' + + // 3. Set fr’s result to null. + fr[kResult] = null + + // 4. Set fr’s error to null. + fr[kError] = null + + // 5. Let stream be the result of calling get stream on blob. + /** @type {import('stream/web').ReadableStream} */ + const stream = blob.stream() + + // 6. Let reader be the result of getting a reader from stream. + const reader = stream.getReader() + + // 7. Let bytes be an empty byte sequence. + /** @type {Uint8Array[]} */ + const bytes = [] + + // 8. Let chunkPromise be the result of reading a chunk from + // stream with reader. + let chunkPromise = reader.read() + + // 9. Let isFirstChunk be true. + let isFirstChunk = true + + // 10. In parallel, while true: + // Note: "In parallel" just means non-blocking + // Note 2: readOperation itself cannot be async as double + // reading the body would then reject the promise, instead + // of throwing an error. + ;(async () => { + while (!fr[kAborted]) { + // 1. Wait for chunkPromise to be fulfilled or rejected. + try { + const { done, value } = await chunkPromise + + // 2. If chunkPromise is fulfilled, and isFirstChunk is + // true, queue a task to fire a progress event called + // loadstart at fr. + if (isFirstChunk && !fr[kAborted]) { + queueMicrotask(() => { + fireAProgressEvent('loadstart', fr) + }) + } + + // 3. Set isFirstChunk to false. + isFirstChunk = false + + // 4. If chunkPromise is fulfilled with an object whose + // done property is false and whose value property is + // a Uint8Array object, run these steps: + if (!done && types.isUint8Array(value)) { + // 1. Let bs be the byte sequence represented by the + // Uint8Array object. + + // 2. Append bs to bytes. + bytes.push(value) + + // 3. If roughly 50ms have passed since these steps + // were last invoked, queue a task to fire a + // progress event called progress at fr. + if ( + ( + fr[kLastProgressEventFired] === undefined || + Date.now() - fr[kLastProgressEventFired] >= 50 + ) && + !fr[kAborted] + ) { + fr[kLastProgressEventFired] = Date.now() + queueMicrotask(() => { + fireAProgressEvent('progress', fr) + }) + } + + // 4. Set chunkPromise to the result of reading a + // chunk from stream with reader. + chunkPromise = reader.read() + } else if (done) { + // 5. Otherwise, if chunkPromise is fulfilled with an + // object whose done property is true, queue a task + // to run the following steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' + + // 2. Let result be the result of package data given + // bytes, type, blob’s type, and encodingName. + try { + const result = packageData(bytes, type, blob.type, encodingName) + + // 4. Else: + + if (fr[kAborted]) { + return + } + + // 1. Set fr’s result to result. + fr[kResult] = result + + // 2. Fire a progress event called load at the fr. + fireAProgressEvent('load', fr) + } catch (error) { + // 3. If package data threw an exception error: + + // 1. Set fr’s error to error. + fr[kError] = error + + // 2. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) + } + + // 5. If fr’s state is not "loading", fire a progress + // event called loadend at the fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) + + break + } + } catch (error) { + if (fr[kAborted]) { + return + } + + // 6. Otherwise, if chunkPromise is rejected with an + // error error, queue a task to run the following + // steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' + + // 2. Set fr’s error to error. + fr[kError] = error + + // 3. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) + + // 4. If fr’s state is not "loading", fire a progress + // event called loadend at fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) + + break + } + } + })() +} + +/** + * @see https://w3c.github.io/FileAPI/#fire-a-progress-event + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e The name of the event + * @param {import('./filereader').FileReader} reader + */ +function fireAProgressEvent (e, reader) { + // The progress event e does not bubble. e.bubbles must be false + // The progress event e is NOT cancelable. e.cancelable must be false + const event = new ProgressEvent(e, { + bubbles: false, + cancelable: false + }) + + reader.dispatchEvent(event) +} + +/** + * @see https://w3c.github.io/FileAPI/#blob-package-data + * @param {Uint8Array[]} bytes + * @param {string} type + * @param {string?} mimeType + * @param {string?} encodingName + */ +function packageData (bytes, type, mimeType, encodingName) { + // 1. A Blob has an associated package data algorithm, given + // bytes, a type, a optional mimeType, and a optional + // encodingName, which switches on type and runs the + // associated steps: + + switch (type) { + case 'DataURL': { + // 1. Return bytes as a DataURL [RFC2397] subject to + // the considerations below: + // * Use mimeType as part of the Data URL if it is + // available in keeping with the Data URL + // specification [RFC2397]. + // * If mimeType is not available return a Data URL + // without a media-type. [RFC2397]. + + // https://datatracker.ietf.org/doc/html/rfc2397#section-3 + // dataurl := "data:" [ mediatype ] [ ";base64" ] "," data + // mediatype := [ type "/" subtype ] *( ";" parameter ) + // data := *urlchar + // parameter := attribute "=" value + let dataURL = 'data:' + + const parsed = parseMIMEType(mimeType || 'application/octet-stream') + + if (parsed !== 'failure') { + dataURL += serializeAMimeType(parsed) + } + + dataURL += ';base64,' + + const decoder = new StringDecoder('latin1') + + for (const chunk of bytes) { + dataURL += btoa(decoder.write(chunk)) + } + + dataURL += btoa(decoder.end()) + + return dataURL + } + case 'Text': { + // 1. Let encoding be failure + let encoding = 'failure' + + // 2. If the encodingName is present, set encoding to the + // result of getting an encoding from encodingName. + if (encodingName) { + encoding = getEncoding(encodingName) + } + + // 3. If encoding is failure, and mimeType is present: + if (encoding === 'failure' && mimeType) { + // 1. Let type be the result of parse a MIME type + // given mimeType. + const type = parseMIMEType(mimeType) + + // 2. If type is not failure, set encoding to the result + // of getting an encoding from type’s parameters["charset"]. + if (type !== 'failure') { + encoding = getEncoding(type.parameters.get('charset')) + } + } + + // 4. If encoding is failure, then set encoding to UTF-8. + if (encoding === 'failure') { + encoding = 'UTF-8' + } + + // 5. Decode bytes using fallback encoding encoding, and + // return the result. + return decode(bytes, encoding) + } + case 'ArrayBuffer': { + // Return a new ArrayBuffer whose contents are bytes. + const sequence = combineByteSequences(bytes) + + return sequence.buffer + } + case 'BinaryString': { + // Return bytes as a binary string, in which every byte + // is represented by a code unit of equal value [0..255]. + let binaryString = '' + + const decoder = new StringDecoder('latin1') + + for (const chunk of bytes) { + binaryString += decoder.write(chunk) + } + + binaryString += decoder.end() + + return binaryString + } + } +} + +/** + * @see https://encoding.spec.whatwg.org/#decode + * @param {Uint8Array[]} ioQueue + * @param {string} encoding + */ +function decode (ioQueue, encoding) { + const bytes = combineByteSequences(ioQueue) + + // 1. Let BOMEncoding be the result of BOM sniffing ioQueue. + const BOMEncoding = BOMSniffing(bytes) + + let slice = 0 + + // 2. If BOMEncoding is non-null: + if (BOMEncoding !== null) { + // 1. Set encoding to BOMEncoding. + encoding = BOMEncoding + + // 2. Read three bytes from ioQueue, if BOMEncoding is + // UTF-8; otherwise read two bytes. + // (Do nothing with those bytes.) + slice = BOMEncoding === 'UTF-8' ? 3 : 2 + } + + // 3. Process a queue with an instance of encoding’s + // decoder, ioQueue, output, and "replacement". + + // 4. Return output. + + const sliced = bytes.slice(slice) + return new TextDecoder(encoding).decode(sliced) +} + +/** + * @see https://encoding.spec.whatwg.org/#bom-sniff + * @param {Uint8Array} ioQueue + */ +function BOMSniffing (ioQueue) { + // 1. Let BOM be the result of peeking 3 bytes from ioQueue, + // converted to a byte sequence. + const [a, b, c] = ioQueue + + // 2. For each of the rows in the table below, starting with + // the first one and going down, if BOM starts with the + // bytes given in the first column, then return the + // encoding given in the cell in the second column of that + // row. Otherwise, return null. + if (a === 0xEF && b === 0xBB && c === 0xBF) { + return 'UTF-8' + } else if (a === 0xFE && b === 0xFF) { + return 'UTF-16BE' + } else if (a === 0xFF && b === 0xFE) { + return 'UTF-16LE' + } + + return null +} + +/** + * @param {Uint8Array[]} sequences + */ +function combineByteSequences (sequences) { + const size = sequences.reduce((a, b) => { + return a + b.byteLength + }, 0) + + let offset = 0 + + return sequences.reduce((a, b) => { + a.set(b, offset) + offset += b.byteLength + return a + }, new Uint8Array(size)) +} + +module.exports = { + staticPropertyDescriptors, + readOperation, + fireAProgressEvent +} diff --git a/lib/global.js b/lib/global.js new file mode 100644 index 0000000..18bfd73 --- /dev/null +++ b/lib/global.js @@ -0,0 +1,32 @@ +'use strict' + +// We include a version number for the Dispatcher API. In case of breaking changes, +// this version number must be increased to avoid conflicts. +const globalDispatcher = Symbol.for('undici.globalDispatcher.1') +const { InvalidArgumentError } = require('./core/errors') +const Agent = require('./agent') + +if (getGlobalDispatcher() === undefined) { + setGlobalDispatcher(new Agent()) +} + +function setGlobalDispatcher (agent) { + if (!agent || typeof agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument agent must implement Agent') + } + Object.defineProperty(globalThis, globalDispatcher, { + value: agent, + writable: true, + enumerable: false, + configurable: false + }) +} + +function getGlobalDispatcher () { + return globalThis[globalDispatcher] +} + +module.exports = { + setGlobalDispatcher, + getGlobalDispatcher +} diff --git a/lib/handler/DecoratorHandler.js b/lib/handler/DecoratorHandler.js new file mode 100644 index 0000000..9d70a76 --- /dev/null +++ b/lib/handler/DecoratorHandler.js @@ -0,0 +1,35 @@ +'use strict' + +module.exports = class DecoratorHandler { + constructor (handler) { + this.handler = handler + } + + onConnect (...args) { + return this.handler.onConnect(...args) + } + + onError (...args) { + return this.handler.onError(...args) + } + + onUpgrade (...args) { + return this.handler.onUpgrade(...args) + } + + onHeaders (...args) { + return this.handler.onHeaders(...args) + } + + onData (...args) { + return this.handler.onData(...args) + } + + onComplete (...args) { + return this.handler.onComplete(...args) + } + + onBodySent (...args) { + return this.handler.onBodySent(...args) + } +} diff --git a/lib/handler/RedirectHandler.js b/lib/handler/RedirectHandler.js new file mode 100644 index 0000000..baca27e --- /dev/null +++ b/lib/handler/RedirectHandler.js @@ -0,0 +1,216 @@ +'use strict' + +const util = require('../core/util') +const { kBodyUsed } = require('../core/symbols') +const assert = require('assert') +const { InvalidArgumentError } = require('../core/errors') +const EE = require('events') + +const redirectableStatusCodes = [300, 301, 302, 303, 307, 308] + +const kBody = Symbol('body') + +class BodyAsyncIterable { + constructor (body) { + this[kBody] = body + this[kBodyUsed] = false + } + + async * [Symbol.asyncIterator] () { + assert(!this[kBodyUsed], 'disturbed') + this[kBodyUsed] = true + yield * this[kBody] + } +} + +class RedirectHandler { + constructor (dispatch, maxRedirections, opts, handler) { + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + util.validateHandler(handler, opts.method, opts.upgrade) + + this.dispatch = dispatch + this.location = null + this.abort = null + this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy + this.maxRedirections = maxRedirections + this.handler = handler + this.history = [] + + if (util.isStream(this.opts.body)) { + // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp + // so that it can be dispatched again? + // TODO (fix): Do we need 100-expect support to provide a way to do this properly? + if (util.bodyLength(this.opts.body) === 0) { + this.opts.body + .on('data', function () { + assert(false) + }) + } + + if (typeof this.opts.body.readableDidRead !== 'boolean') { + this.opts.body[kBodyUsed] = false + EE.prototype.on.call(this.opts.body, 'data', function () { + this[kBodyUsed] = true + }) + } + } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') { + // TODO (fix): We can't access ReadableStream internal state + // to determine whether or not it has been disturbed. This is just + // a workaround. + this.opts.body = new BodyAsyncIterable(this.opts.body) + } else if ( + this.opts.body && + typeof this.opts.body !== 'string' && + !ArrayBuffer.isView(this.opts.body) && + util.isIterable(this.opts.body) + ) { + // TODO: Should we allow re-using iterable if !this.opts.idempotent + // or through some other flag? + this.opts.body = new BodyAsyncIterable(this.opts.body) + } + } + + onConnect (abort) { + this.abort = abort + this.handler.onConnect(abort, { history: this.history }) + } + + onUpgrade (statusCode, headers, socket) { + this.handler.onUpgrade(statusCode, headers, socket) + } + + onError (error) { + this.handler.onError(error) + } + + onHeaders (statusCode, headers, resume, statusText) { + this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) + ? null + : parseLocation(statusCode, headers) + + if (this.opts.origin) { + this.history.push(new URL(this.opts.path, this.opts.origin)) + } + + if (!this.location) { + return this.handler.onHeaders(statusCode, headers, resume, statusText) + } + + const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))) + const path = search ? `${pathname}${search}` : pathname + + // Remove headers referring to the original URL. + // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers. + // https://tools.ietf.org/html/rfc7231#section-6.4 + this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin) + this.opts.path = path + this.opts.origin = origin + this.opts.maxRedirections = 0 + this.opts.query = null + + // https://tools.ietf.org/html/rfc7231#section-6.4.4 + // In case of HTTP 303, always replace method to be either HEAD or GET + if (statusCode === 303 && this.opts.method !== 'HEAD') { + this.opts.method = 'GET' + this.opts.body = null + } + } + + onData (chunk) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 + + TLDR: undici always ignores 3xx response bodies. + + Redirection is used to serve the requested resource from another URL, so it is assumes that + no body is generated (and thus can be ignored). Even though generating a body is not prohibited. + + For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually + (which means it's optional and not mandated) contain just an hyperlink to the value of + the Location response header, so the body can be ignored safely. + + For status 300, which is "Multiple Choices", the spec mentions both generating a Location + response header AND a response body with the other possible location to follow. + Since the spec explicitily chooses not to specify a format for such body and leave it to + servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it. + */ + } else { + return this.handler.onData(chunk) + } + } + + onComplete (trailers) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 + + TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections + and neither are useful if present. + + See comment on onData method above for more detailed informations. + */ + + this.location = null + this.abort = null + + this.dispatch(this.opts, this) + } else { + this.handler.onComplete(trailers) + } + } + + onBodySent (chunk) { + if (this.handler.onBodySent) { + this.handler.onBodySent(chunk) + } + } +} + +function parseLocation (statusCode, headers) { + if (redirectableStatusCodes.indexOf(statusCode) === -1) { + return null + } + + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toString().toLowerCase() === 'location') { + return headers[i + 1] + } + } +} + +// https://tools.ietf.org/html/rfc7231#section-6.4.4 +function shouldRemoveHeader (header, removeContent, unknownOrigin) { + return ( + (header.length === 4 && header.toString().toLowerCase() === 'host') || + (removeContent && header.toString().toLowerCase().indexOf('content-') === 0) || + (unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') || + (unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie') + ) +} + +// https://tools.ietf.org/html/rfc7231#section-6.4 +function cleanRequestHeaders (headers, removeContent, unknownOrigin) { + const ret = [] + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) { + ret.push(headers[i], headers[i + 1]) + } + } + } else if (headers && typeof headers === 'object') { + for (const key of Object.keys(headers)) { + if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) { + ret.push(key, headers[key]) + } + } + } else { + assert(headers == null, 'headers must be an object or an array') + } + return ret +} + +module.exports = RedirectHandler diff --git a/lib/handler/RetryHandler.js b/lib/handler/RetryHandler.js new file mode 100644 index 0000000..3710447 --- /dev/null +++ b/lib/handler/RetryHandler.js @@ -0,0 +1,336 @@ +const assert = require('assert') + +const { kRetryHandlerDefaultRetry } = require('../core/symbols') +const { RequestRetryError } = require('../core/errors') +const { isDisturbed, parseHeaders, parseRangeHeader } = require('../core/util') + +function calculateRetryAfterHeader (retryAfter) { + const current = Date.now() + const diff = new Date(retryAfter).getTime() - current + + return diff +} + +class RetryHandler { + constructor (opts, handlers) { + const { retryOptions, ...dispatchOpts } = opts + const { + // Retry scoped + retry: retryFn, + maxRetries, + maxTimeout, + minTimeout, + timeoutFactor, + // Response scoped + methods, + errorCodes, + retryAfter, + statusCodes + } = retryOptions ?? {} + + this.dispatch = handlers.dispatch + this.handler = handlers.handler + this.opts = dispatchOpts + this.abort = null + this.aborted = false + this.retryOpts = { + retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry], + retryAfter: retryAfter ?? true, + maxTimeout: maxTimeout ?? 30 * 1000, // 30s, + timeout: minTimeout ?? 500, // .5s + timeoutFactor: timeoutFactor ?? 2, + maxRetries: maxRetries ?? 5, + // What errors we should retry + methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'], + // Indicates which errors to retry + statusCodes: statusCodes ?? [500, 502, 503, 504, 429], + // List of errors to retry + errorCodes: errorCodes ?? [ + 'ECONNRESET', + 'ECONNREFUSED', + 'ENOTFOUND', + 'ENETDOWN', + 'ENETUNREACH', + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'EPIPE' + ] + } + + this.retryCount = 0 + this.start = 0 + this.end = null + this.etag = null + this.resume = null + + // Handle possible onConnect duplication + this.handler.onConnect(reason => { + this.aborted = true + if (this.abort) { + this.abort(reason) + } else { + this.reason = reason + } + }) + } + + onRequestSent () { + if (this.handler.onRequestSent) { + this.handler.onRequestSent() + } + } + + onUpgrade (statusCode, headers, socket) { + if (this.handler.onUpgrade) { + this.handler.onUpgrade(statusCode, headers, socket) + } + } + + onConnect (abort) { + if (this.aborted) { + abort(this.reason) + } else { + this.abort = abort + } + } + + onBodySent (chunk) { + if (this.handler.onBodySent) return this.handler.onBodySent(chunk) + } + + static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) { + const { statusCode, code, headers } = err + const { method, retryOptions } = opts + const { + maxRetries, + timeout, + maxTimeout, + timeoutFactor, + statusCodes, + errorCodes, + methods + } = retryOptions + let { counter, currentTimeout } = state + + currentTimeout = + currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout + + // Any code that is not a Undici's originated and allowed to retry + if ( + code && + code !== 'UND_ERR_REQ_RETRY' && + code !== 'UND_ERR_SOCKET' && + !errorCodes.includes(code) + ) { + cb(err) + return + } + + // If a set of method are provided and the current method is not in the list + if (Array.isArray(methods) && !methods.includes(method)) { + cb(err) + return + } + + // If a set of status code are provided and the current status code is not in the list + if ( + statusCode != null && + Array.isArray(statusCodes) && + !statusCodes.includes(statusCode) + ) { + cb(err) + return + } + + // If we reached the max number of retries + if (counter > maxRetries) { + cb(err) + return + } + + let retryAfterHeader = headers != null && headers['retry-after'] + if (retryAfterHeader) { + retryAfterHeader = Number(retryAfterHeader) + retryAfterHeader = isNaN(retryAfterHeader) + ? calculateRetryAfterHeader(retryAfterHeader) + : retryAfterHeader * 1e3 // Retry-After is in seconds + } + + const retryTimeout = + retryAfterHeader > 0 + ? Math.min(retryAfterHeader, maxTimeout) + : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout) + + state.currentTimeout = retryTimeout + + setTimeout(() => cb(null), retryTimeout) + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const headers = parseHeaders(rawHeaders) + + this.retryCount += 1 + + if (statusCode >= 300) { + this.abort( + new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + // Checkpoint for resume from where we left it + if (this.resume != null) { + this.resume = null + + if (statusCode !== 206) { + return true + } + + const contentRange = parseRangeHeader(headers['content-range']) + // If no content range + if (!contentRange) { + this.abort( + new RequestRetryError('Content-Range mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + // Let's start with a weak etag check + if (this.etag != null && this.etag !== headers.etag) { + this.abort( + new RequestRetryError('ETag mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + const { start, size, end = size } = contentRange + + assert(this.start === start, 'content-range mismatch') + assert(this.end == null || this.end === end, 'content-range mismatch') + + this.resume = resume + return true + } + + if (this.end == null) { + if (statusCode === 206) { + // First time we receive 206 + const range = parseRangeHeader(headers['content-range']) + + if (range == null) { + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } + + const { start, size, end = size } = range + + assert( + start != null && Number.isFinite(start) && this.start !== start, + 'content-range mismatch' + ) + assert(Number.isFinite(start)) + assert( + end != null && Number.isFinite(end) && this.end !== end, + 'invalid content-length' + ) + + this.start = start + this.end = end + } + + // We make our best to checkpoint the body for further range headers + if (this.end == null) { + const contentLength = headers['content-length'] + this.end = contentLength != null ? Number(contentLength) : null + } + + assert(Number.isFinite(this.start)) + assert( + this.end == null || Number.isFinite(this.end), + 'invalid content-length' + ) + + this.resume = resume + this.etag = headers.etag != null ? headers.etag : null + + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } + + const err = new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + + this.abort(err) + + return false + } + + onData (chunk) { + this.start += chunk.length + + return this.handler.onData(chunk) + } + + onComplete (rawTrailers) { + this.retryCount = 0 + return this.handler.onComplete(rawTrailers) + } + + onError (err) { + if (this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } + + this.retryOpts.retry( + err, + { + state: { counter: this.retryCount++, currentTimeout: this.retryAfter }, + opts: { retryOptions: this.retryOpts, ...this.opts } + }, + onRetry.bind(this) + ) + + function onRetry (err) { + if (err != null || this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } + + if (this.start !== 0) { + this.opts = { + ...this.opts, + headers: { + ...this.opts.headers, + range: `bytes=${this.start}-${this.end ?? ''}` + } + } + } + + try { + this.dispatch(this.opts, this) + } catch (err) { + this.handler.onError(err) + } + } + } +} + +module.exports = RetryHandler diff --git a/lib/interceptor/redirectInterceptor.js b/lib/interceptor/redirectInterceptor.js new file mode 100644 index 0000000..7cc035e --- /dev/null +++ b/lib/interceptor/redirectInterceptor.js @@ -0,0 +1,21 @@ +'use strict' + +const RedirectHandler = require('../handler/RedirectHandler') + +function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) { + return (dispatch) => { + return function Intercept (opts, handler) { + const { maxRedirections = defaultMaxRedirections } = opts + + if (!maxRedirections) { + return dispatch(opts, handler) + } + + const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler) + opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting. + return dispatch(opts, redirectHandler) + } + } +} + +module.exports = createRedirectInterceptor diff --git a/lib/llhttp/constants.d.ts b/lib/llhttp/constants.d.ts new file mode 100644 index 0000000..b75ab1b --- /dev/null +++ b/lib/llhttp/constants.d.ts @@ -0,0 +1,199 @@ +import { IEnumMap } from './utils'; +export declare type HTTPMode = 'loose' | 'strict'; +export declare enum ERROR { + OK = 0, + INTERNAL = 1, + STRICT = 2, + LF_EXPECTED = 3, + UNEXPECTED_CONTENT_LENGTH = 4, + CLOSED_CONNECTION = 5, + INVALID_METHOD = 6, + INVALID_URL = 7, + INVALID_CONSTANT = 8, + INVALID_VERSION = 9, + INVALID_HEADER_TOKEN = 10, + INVALID_CONTENT_LENGTH = 11, + INVALID_CHUNK_SIZE = 12, + INVALID_STATUS = 13, + INVALID_EOF_STATE = 14, + INVALID_TRANSFER_ENCODING = 15, + CB_MESSAGE_BEGIN = 16, + CB_HEADERS_COMPLETE = 17, + CB_MESSAGE_COMPLETE = 18, + CB_CHUNK_HEADER = 19, + CB_CHUNK_COMPLETE = 20, + PAUSED = 21, + PAUSED_UPGRADE = 22, + PAUSED_H2_UPGRADE = 23, + USER = 24 +} +export declare enum TYPE { + BOTH = 0, + REQUEST = 1, + RESPONSE = 2 +} +export declare enum FLAGS { + CONNECTION_KEEP_ALIVE = 1, + CONNECTION_CLOSE = 2, + CONNECTION_UPGRADE = 4, + CHUNKED = 8, + UPGRADE = 16, + CONTENT_LENGTH = 32, + SKIPBODY = 64, + TRAILING = 128, + TRANSFER_ENCODING = 512 +} +export declare enum LENIENT_FLAGS { + HEADERS = 1, + CHUNKED_LENGTH = 2, + KEEP_ALIVE = 4 +} +export declare enum METHODS { + DELETE = 0, + GET = 1, + HEAD = 2, + POST = 3, + PUT = 4, + CONNECT = 5, + OPTIONS = 6, + TRACE = 7, + COPY = 8, + LOCK = 9, + MKCOL = 10, + MOVE = 11, + PROPFIND = 12, + PROPPATCH = 13, + SEARCH = 14, + UNLOCK = 15, + BIND = 16, + REBIND = 17, + UNBIND = 18, + ACL = 19, + REPORT = 20, + MKACTIVITY = 21, + CHECKOUT = 22, + MERGE = 23, + 'M-SEARCH' = 24, + NOTIFY = 25, + SUBSCRIBE = 26, + UNSUBSCRIBE = 27, + PATCH = 28, + PURGE = 29, + MKCALENDAR = 30, + LINK = 31, + UNLINK = 32, + SOURCE = 33, + PRI = 34, + DESCRIBE = 35, + ANNOUNCE = 36, + SETUP = 37, + PLAY = 38, + PAUSE = 39, + TEARDOWN = 40, + GET_PARAMETER = 41, + SET_PARAMETER = 42, + REDIRECT = 43, + RECORD = 44, + FLUSH = 45 +} +export declare const METHODS_HTTP: METHODS[]; +export declare const METHODS_ICE: METHODS[]; +export declare const METHODS_RTSP: METHODS[]; +export declare const METHOD_MAP: IEnumMap; +export declare const H_METHOD_MAP: IEnumMap; +export declare enum FINISH { + SAFE = 0, + SAFE_WITH_CB = 1, + UNSAFE = 2 +} +export declare type CharList = Array; +export declare const ALPHA: CharList; +export declare const NUM_MAP: { + 0: number; + 1: number; + 2: number; + 3: number; + 4: number; + 5: number; + 6: number; + 7: number; + 8: number; + 9: number; +}; +export declare const HEX_MAP: { + 0: number; + 1: number; + 2: number; + 3: number; + 4: number; + 5: number; + 6: number; + 7: number; + 8: number; + 9: number; + A: number; + B: number; + C: number; + D: number; + E: number; + F: number; + a: number; + b: number; + c: number; + d: number; + e: number; + f: number; +}; +export declare const NUM: CharList; +export declare const ALPHANUM: CharList; +export declare const MARK: CharList; +export declare const USERINFO_CHARS: CharList; +export declare const STRICT_URL_CHAR: CharList; +export declare const URL_CHAR: CharList; +export declare const HEX: CharList; +export declare const STRICT_TOKEN: CharList; +export declare const TOKEN: CharList; +export declare const HEADER_CHARS: CharList; +export declare const CONNECTION_TOKEN_CHARS: CharList; +export declare const MAJOR: { + 0: number; + 1: number; + 2: number; + 3: number; + 4: number; + 5: number; + 6: number; + 7: number; + 8: number; + 9: number; +}; +export declare const MINOR: { + 0: number; + 1: number; + 2: number; + 3: number; + 4: number; + 5: number; + 6: number; + 7: number; + 8: number; + 9: number; +}; +export declare enum HEADER_STATE { + GENERAL = 0, + CONNECTION = 1, + CONTENT_LENGTH = 2, + TRANSFER_ENCODING = 3, + UPGRADE = 4, + CONNECTION_KEEP_ALIVE = 5, + CONNECTION_CLOSE = 6, + CONNECTION_UPGRADE = 7, + TRANSFER_ENCODING_CHUNKED = 8 +} +export declare const SPECIAL_HEADERS: { + connection: HEADER_STATE; + 'content-length': HEADER_STATE; + 'proxy-connection': HEADER_STATE; + 'transfer-encoding': HEADER_STATE; + upgrade: HEADER_STATE; +}; diff --git a/lib/llhttp/constants.js b/lib/llhttp/constants.js new file mode 100644 index 0000000..fb0b5a2 --- /dev/null +++ b/lib/llhttp/constants.js @@ -0,0 +1,278 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0; +const utils_1 = require("./utils"); +// C headers +var ERROR; +(function (ERROR) { + ERROR[ERROR["OK"] = 0] = "OK"; + ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL"; + ERROR[ERROR["STRICT"] = 2] = "STRICT"; + ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED"; + ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH"; + ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION"; + ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD"; + ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL"; + ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT"; + ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION"; + ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN"; + ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH"; + ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE"; + ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS"; + ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE"; + ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING"; + ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN"; + ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE"; + ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE"; + ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER"; + ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE"; + ERROR[ERROR["PAUSED"] = 21] = "PAUSED"; + ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE"; + ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE"; + ERROR[ERROR["USER"] = 24] = "USER"; +})(ERROR = exports.ERROR || (exports.ERROR = {})); +var TYPE; +(function (TYPE) { + TYPE[TYPE["BOTH"] = 0] = "BOTH"; + TYPE[TYPE["REQUEST"] = 1] = "REQUEST"; + TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE"; +})(TYPE = exports.TYPE || (exports.TYPE = {})); +var FLAGS; +(function (FLAGS) { + FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE"; + FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE"; + FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE"; + FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED"; + FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE"; + FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH"; + FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY"; + FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING"; + // 1 << 8 is unused + FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING"; +})(FLAGS = exports.FLAGS || (exports.FLAGS = {})); +var LENIENT_FLAGS; +(function (LENIENT_FLAGS) { + LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS"; + LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH"; + LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE"; +})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {})); +var METHODS; +(function (METHODS) { + METHODS[METHODS["DELETE"] = 0] = "DELETE"; + METHODS[METHODS["GET"] = 1] = "GET"; + METHODS[METHODS["HEAD"] = 2] = "HEAD"; + METHODS[METHODS["POST"] = 3] = "POST"; + METHODS[METHODS["PUT"] = 4] = "PUT"; + /* pathological */ + METHODS[METHODS["CONNECT"] = 5] = "CONNECT"; + METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS"; + METHODS[METHODS["TRACE"] = 7] = "TRACE"; + /* WebDAV */ + METHODS[METHODS["COPY"] = 8] = "COPY"; + METHODS[METHODS["LOCK"] = 9] = "LOCK"; + METHODS[METHODS["MKCOL"] = 10] = "MKCOL"; + METHODS[METHODS["MOVE"] = 11] = "MOVE"; + METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND"; + METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH"; + METHODS[METHODS["SEARCH"] = 14] = "SEARCH"; + METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK"; + METHODS[METHODS["BIND"] = 16] = "BIND"; + METHODS[METHODS["REBIND"] = 17] = "REBIND"; + METHODS[METHODS["UNBIND"] = 18] = "UNBIND"; + METHODS[METHODS["ACL"] = 19] = "ACL"; + /* subversion */ + METHODS[METHODS["REPORT"] = 20] = "REPORT"; + METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY"; + METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT"; + METHODS[METHODS["MERGE"] = 23] = "MERGE"; + /* upnp */ + METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH"; + METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY"; + METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE"; + METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE"; + /* RFC-5789 */ + METHODS[METHODS["PATCH"] = 28] = "PATCH"; + METHODS[METHODS["PURGE"] = 29] = "PURGE"; + /* CalDAV */ + METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR"; + /* RFC-2068, section 19.6.1.2 */ + METHODS[METHODS["LINK"] = 31] = "LINK"; + METHODS[METHODS["UNLINK"] = 32] = "UNLINK"; + /* icecast */ + METHODS[METHODS["SOURCE"] = 33] = "SOURCE"; + /* RFC-7540, section 11.6 */ + METHODS[METHODS["PRI"] = 34] = "PRI"; + /* RFC-2326 RTSP */ + METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE"; + METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE"; + METHODS[METHODS["SETUP"] = 37] = "SETUP"; + METHODS[METHODS["PLAY"] = 38] = "PLAY"; + METHODS[METHODS["PAUSE"] = 39] = "PAUSE"; + METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN"; + METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER"; + METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER"; + METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT"; + METHODS[METHODS["RECORD"] = 44] = "RECORD"; + /* RAOP */ + METHODS[METHODS["FLUSH"] = 45] = "FLUSH"; +})(METHODS = exports.METHODS || (exports.METHODS = {})); +exports.METHODS_HTTP = [ + METHODS.DELETE, + METHODS.GET, + METHODS.HEAD, + METHODS.POST, + METHODS.PUT, + METHODS.CONNECT, + METHODS.OPTIONS, + METHODS.TRACE, + METHODS.COPY, + METHODS.LOCK, + METHODS.MKCOL, + METHODS.MOVE, + METHODS.PROPFIND, + METHODS.PROPPATCH, + METHODS.SEARCH, + METHODS.UNLOCK, + METHODS.BIND, + METHODS.REBIND, + METHODS.UNBIND, + METHODS.ACL, + METHODS.REPORT, + METHODS.MKACTIVITY, + METHODS.CHECKOUT, + METHODS.MERGE, + METHODS['M-SEARCH'], + METHODS.NOTIFY, + METHODS.SUBSCRIBE, + METHODS.UNSUBSCRIBE, + METHODS.PATCH, + METHODS.PURGE, + METHODS.MKCALENDAR, + METHODS.LINK, + METHODS.UNLINK, + METHODS.PRI, + // TODO(indutny): should we allow it with HTTP? + METHODS.SOURCE, +]; +exports.METHODS_ICE = [ + METHODS.SOURCE, +]; +exports.METHODS_RTSP = [ + METHODS.OPTIONS, + METHODS.DESCRIBE, + METHODS.ANNOUNCE, + METHODS.SETUP, + METHODS.PLAY, + METHODS.PAUSE, + METHODS.TEARDOWN, + METHODS.GET_PARAMETER, + METHODS.SET_PARAMETER, + METHODS.REDIRECT, + METHODS.RECORD, + METHODS.FLUSH, + // For AirPlay + METHODS.GET, + METHODS.POST, +]; +exports.METHOD_MAP = utils_1.enumToMap(METHODS); +exports.H_METHOD_MAP = {}; +Object.keys(exports.METHOD_MAP).forEach((key) => { + if (/^H/.test(key)) { + exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key]; + } +}); +var FINISH; +(function (FINISH) { + FINISH[FINISH["SAFE"] = 0] = "SAFE"; + FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB"; + FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE"; +})(FINISH = exports.FINISH || (exports.FINISH = {})); +exports.ALPHA = []; +for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) { + // Upper case + exports.ALPHA.push(String.fromCharCode(i)); + // Lower case + exports.ALPHA.push(String.fromCharCode(i + 0x20)); +} +exports.NUM_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, +}; +exports.HEX_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, + A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF, + a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf, +}; +exports.NUM = [ + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', +]; +exports.ALPHANUM = exports.ALPHA.concat(exports.NUM); +exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')']; +exports.USERINFO_CHARS = exports.ALPHANUM + .concat(exports.MARK) + .concat(['%', ';', ':', '&', '=', '+', '$', ',']); +// TODO(indutny): use RFC +exports.STRICT_URL_CHAR = [ + '!', '"', '$', '%', '&', '\'', + '(', ')', '*', '+', ',', '-', '.', '/', + ':', ';', '<', '=', '>', + '@', '[', '\\', ']', '^', '_', + '`', + '{', '|', '}', '~', +].concat(exports.ALPHANUM); +exports.URL_CHAR = exports.STRICT_URL_CHAR + .concat(['\t', '\f']); +// All characters with 0x80 bit set to 1 +for (let i = 0x80; i <= 0xff; i++) { + exports.URL_CHAR.push(i); +} +exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']); +/* Tokens as defined by rfc 2616. Also lowercases them. + * token = 1* + * separators = "(" | ")" | "<" | ">" | "@" + * | "," | ";" | ":" | "\" | <"> + * | "/" | "[" | "]" | "?" | "=" + * | "{" | "}" | SP | HT + */ +exports.STRICT_TOKEN = [ + '!', '#', '$', '%', '&', '\'', + '*', '+', '-', '.', + '^', '_', '`', + '|', '~', +].concat(exports.ALPHANUM); +exports.TOKEN = exports.STRICT_TOKEN.concat([' ']); +/* + * Verify that a char is a valid visible (printable) US-ASCII + * character or %x80-FF + */ +exports.HEADER_CHARS = ['\t']; +for (let i = 32; i <= 255; i++) { + if (i !== 127) { + exports.HEADER_CHARS.push(i); + } +} +// ',' = \x44 +exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44); +exports.MAJOR = exports.NUM_MAP; +exports.MINOR = exports.MAJOR; +var HEADER_STATE; +(function (HEADER_STATE) { + HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL"; + HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION"; + HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING"; + HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE"; + HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE"; + HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE"; + HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED"; +})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {})); +exports.SPECIAL_HEADERS = { + 'connection': HEADER_STATE.CONNECTION, + 'content-length': HEADER_STATE.CONTENT_LENGTH, + 'proxy-connection': HEADER_STATE.CONNECTION, + 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING, + 'upgrade': HEADER_STATE.UPGRADE, +}; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/lib/llhttp/utils.d.ts b/lib/llhttp/utils.d.ts new file mode 100644 index 0000000..15497f3 --- /dev/null +++ b/lib/llhttp/utils.d.ts @@ -0,0 +1,4 @@ +export interface IEnumMap { + [key: string]: number; +} +export declare function enumToMap(obj: any): IEnumMap; diff --git a/lib/llhttp/utils.js b/lib/llhttp/utils.js new file mode 100644 index 0000000..8a32e56 --- /dev/null +++ b/lib/llhttp/utils.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.enumToMap = void 0; +function enumToMap(obj) { + const res = {}; + Object.keys(obj).forEach((key) => { + const value = obj[key]; + if (typeof value === 'number') { + res[key] = value; + } + }); + return res; +} +exports.enumToMap = enumToMap; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/lib/llhttp/wasm_build_env.txt b/lib/llhttp/wasm_build_env.txt new file mode 100644 index 0000000..5f478b5 --- /dev/null +++ b/lib/llhttp/wasm_build_env.txt @@ -0,0 +1,32 @@ +alpine-baselayout-data-3.4.0-r0 +musl-1.2.3-r4 +busybox-1.35.0-r29 +busybox-binsh-1.35.0-r29 +alpine-baselayout-3.4.0-r0 +alpine-keys-2.4-r1 +ca-certificates-bundle-20220614-r4 +libcrypto3-3.0.8-r3 +libssl3-3.0.8-r3 +ssl_client-1.35.0-r29 +zlib-1.2.13-r0 +apk-tools-2.12.10-r1 +scanelf-1.3.5-r1 +musl-utils-1.2.3-r4 +libc-utils-0.7.2-r3 +libgcc-12.2.1_git20220924-r4 +libstdc++-12.2.1_git20220924-r4 +libffi-3.4.4-r0 +xz-libs-5.2.9-r0 +libxml2-2.10.4-r0 +zstd-libs-1.5.5-r0 +llvm15-libs-15.0.7-r0 +clang15-libs-15.0.7-r0 +libstdc++-dev-12.2.1_git20220924-r4 +clang15-15.0.7-r0 +lld-libs-15.0.7-r0 +lld-15.0.7-r0 +wasi-libc-0.20220525-r1 +wasi-libcxx-15.0.7-r0 +wasi-libcxxabi-15.0.7-r0 +wasi-compiler-rt-15.0.7-r0 +wasi-sdk-16-r0 diff --git a/lib/mock/mock-agent.js b/lib/mock/mock-agent.js new file mode 100644 index 0000000..828e8af --- /dev/null +++ b/lib/mock/mock-agent.js @@ -0,0 +1,171 @@ +'use strict' + +const { kClients } = require('../core/symbols') +const Agent = require('../agent') +const { + kAgent, + kMockAgentSet, + kMockAgentGet, + kDispatches, + kIsMockActive, + kNetConnect, + kGetNetConnect, + kOptions, + kFactory +} = require('./mock-symbols') +const MockClient = require('./mock-client') +const MockPool = require('./mock-pool') +const { matchValue, buildMockOptions } = require('./mock-utils') +const { InvalidArgumentError, UndiciError } = require('../core/errors') +const Dispatcher = require('../dispatcher') +const Pluralizer = require('./pluralizer') +const PendingInterceptorsFormatter = require('./pending-interceptors-formatter') + +class FakeWeakRef { + constructor (value) { + this.value = value + } + + deref () { + return this.value + } +} + +class MockAgent extends Dispatcher { + constructor (opts) { + super(opts) + + this[kNetConnect] = true + this[kIsMockActive] = true + + // Instantiate Agent and encapsulate + if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + const agent = opts && opts.agent ? opts.agent : new Agent(opts) + this[kAgent] = agent + + this[kClients] = agent[kClients] + this[kOptions] = buildMockOptions(opts) + } + + get (origin) { + let dispatcher = this[kMockAgentGet](origin) + + if (!dispatcher) { + dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + } + return dispatcher + } + + dispatch (opts, handler) { + // Call MockAgent.get to perform additional setup before dispatching as normal + this.get(opts.origin) + return this[kAgent].dispatch(opts, handler) + } + + async close () { + await this[kAgent].close() + this[kClients].clear() + } + + deactivate () { + this[kIsMockActive] = false + } + + activate () { + this[kIsMockActive] = true + } + + enableNetConnect (matcher) { + if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) { + if (Array.isArray(this[kNetConnect])) { + this[kNetConnect].push(matcher) + } else { + this[kNetConnect] = [matcher] + } + } else if (typeof matcher === 'undefined') { + this[kNetConnect] = true + } else { + throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.') + } + } + + disableNetConnect () { + this[kNetConnect] = false + } + + // This is required to bypass issues caused by using global symbols - see: + // https://github.com/nodejs/undici/issues/1447 + get isMockActive () { + return this[kIsMockActive] + } + + [kMockAgentSet] (origin, dispatcher) { + this[kClients].set(origin, new FakeWeakRef(dispatcher)) + } + + [kFactory] (origin) { + const mockOptions = Object.assign({ agent: this }, this[kOptions]) + return this[kOptions] && this[kOptions].connections === 1 + ? new MockClient(origin, mockOptions) + : new MockPool(origin, mockOptions) + } + + [kMockAgentGet] (origin) { + // First check if we can immediately find it + const ref = this[kClients].get(origin) + if (ref) { + return ref.deref() + } + + // If the origin is not a string create a dummy parent pool and return to user + if (typeof origin !== 'string') { + const dispatcher = this[kFactory]('http://localhost:9999') + this[kMockAgentSet](origin, dispatcher) + return dispatcher + } + + // If we match, create a pool and assign the same dispatches + for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) { + const nonExplicitDispatcher = nonExplicitRef.deref() + if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) { + const dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches] + return dispatcher + } + } + } + + [kGetNetConnect] () { + return this[kNetConnect] + } + + pendingInterceptors () { + const mockAgentClients = this[kClients] + + return Array.from(mockAgentClients.entries()) + .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin }))) + .filter(({ pending }) => pending) + } + + assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) { + const pending = this.pendingInterceptors() + + if (pending.length === 0) { + return + } + + const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length) + + throw new UndiciError(` +${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending: + +${pendingInterceptorsFormatter.format(pending)} +`.trim()) + } +} + +module.exports = MockAgent diff --git a/lib/mock/mock-client.js b/lib/mock/mock-client.js new file mode 100644 index 0000000..5f31215 --- /dev/null +++ b/lib/mock/mock-client.js @@ -0,0 +1,59 @@ +'use strict' + +const { promisify } = require('util') +const Client = require('../client') +const { buildMockDispatch } = require('./mock-utils') +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = require('./mock-symbols') +const { MockInterceptor } = require('./mock-interceptor') +const Symbols = require('../core/symbols') +const { InvalidArgumentError } = require('../core/errors') + +/** + * MockClient provides an API that extends the Client to influence the mockDispatches. + */ +class MockClient extends Client { + constructor (origin, opts) { + super(origin, opts) + + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) + + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] + } + + get [Symbols.kConnected] () { + return this[kConnected] + } + + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) + } + + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) + } +} + +module.exports = MockClient diff --git a/lib/mock/mock-errors.js b/lib/mock/mock-errors.js new file mode 100644 index 0000000..5442c0e --- /dev/null +++ b/lib/mock/mock-errors.js @@ -0,0 +1,17 @@ +'use strict' + +const { UndiciError } = require('../core/errors') + +class MockNotMatchedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, MockNotMatchedError) + this.name = 'MockNotMatchedError' + this.message = message || 'The request does not match any registered mock dispatches' + this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED' + } +} + +module.exports = { + MockNotMatchedError +} diff --git a/lib/mock/mock-interceptor.js b/lib/mock/mock-interceptor.js new file mode 100644 index 0000000..781e477 --- /dev/null +++ b/lib/mock/mock-interceptor.js @@ -0,0 +1,206 @@ +'use strict' + +const { getResponseData, buildKey, addMockDispatch } = require('./mock-utils') +const { + kDispatches, + kDispatchKey, + kDefaultHeaders, + kDefaultTrailers, + kContentLength, + kMockDispatch +} = require('./mock-symbols') +const { InvalidArgumentError } = require('../core/errors') +const { buildURL } = require('../core/util') + +/** + * Defines the scope API for an interceptor reply + */ +class MockScope { + constructor (mockDispatch) { + this[kMockDispatch] = mockDispatch + } + + /** + * Delay a reply by a set amount in ms. + */ + delay (waitInMs) { + if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) { + throw new InvalidArgumentError('waitInMs must be a valid integer > 0') + } + + this[kMockDispatch].delay = waitInMs + return this + } + + /** + * For a defined reply, never mark as consumed. + */ + persist () { + this[kMockDispatch].persist = true + return this + } + + /** + * Allow one to define a reply for a set amount of matching requests. + */ + times (repeatTimes) { + if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) { + throw new InvalidArgumentError('repeatTimes must be a valid integer > 0') + } + + this[kMockDispatch].times = repeatTimes + return this + } +} + +/** + * Defines an interceptor for a Mock + */ +class MockInterceptor { + constructor (opts, mockDispatches) { + if (typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object') + } + if (typeof opts.path === 'undefined') { + throw new InvalidArgumentError('opts.path must be defined') + } + if (typeof opts.method === 'undefined') { + opts.method = 'GET' + } + // See https://github.com/nodejs/undici/issues/1245 + // As per RFC 3986, clients are not supposed to send URI + // fragments to servers when they retrieve a document, + if (typeof opts.path === 'string') { + if (opts.query) { + opts.path = buildURL(opts.path, opts.query) + } else { + // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811 + const parsedURL = new URL(opts.path, 'data://') + opts.path = parsedURL.pathname + parsedURL.search + } + } + if (typeof opts.method === 'string') { + opts.method = opts.method.toUpperCase() + } + + this[kDispatchKey] = buildKey(opts) + this[kDispatches] = mockDispatches + this[kDefaultHeaders] = {} + this[kDefaultTrailers] = {} + this[kContentLength] = false + } + + createMockScopeDispatchData (statusCode, data, responseOptions = {}) { + const responseData = getResponseData(data) + const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {} + const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers } + const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers } + + return { statusCode, data, headers, trailers } + } + + validateReplyParameters (statusCode, data, responseOptions) { + if (typeof statusCode === 'undefined') { + throw new InvalidArgumentError('statusCode must be defined') + } + if (typeof data === 'undefined') { + throw new InvalidArgumentError('data must be defined') + } + if (typeof responseOptions !== 'object') { + throw new InvalidArgumentError('responseOptions must be an object') + } + } + + /** + * Mock an undici request with a defined reply. + */ + reply (replyData) { + // Values of reply aren't available right now as they + // can only be available when the reply callback is invoked. + if (typeof replyData === 'function') { + // We'll first wrap the provided callback in another function, + // this function will properly resolve the data from the callback + // when invoked. + const wrappedDefaultsCallback = (opts) => { + // Our reply options callback contains the parameter for statusCode, data and options. + const resolvedData = replyData(opts) + + // Check if it is in the right format + if (typeof resolvedData !== 'object') { + throw new InvalidArgumentError('reply options callback must return an object') + } + + const { statusCode, data = '', responseOptions = {} } = resolvedData + this.validateReplyParameters(statusCode, data, responseOptions) + // Since the values can be obtained immediately we return them + // from this higher order function that will be resolved later. + return { + ...this.createMockScopeDispatchData(statusCode, data, responseOptions) + } + } + + // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data. + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback) + return new MockScope(newMockDispatch) + } + + // We can have either one or three parameters, if we get here, + // we should have 1-3 parameters. So we spread the arguments of + // this function to obtain the parameters, since replyData will always + // just be the statusCode. + const [statusCode, data = '', responseOptions = {}] = [...arguments] + this.validateReplyParameters(statusCode, data, responseOptions) + + // Send in-already provided data like usual + const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions) + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData) + return new MockScope(newMockDispatch) + } + + /** + * Mock an undici request with a defined error. + */ + replyWithError (error) { + if (typeof error === 'undefined') { + throw new InvalidArgumentError('error must be defined') + } + + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error }) + return new MockScope(newMockDispatch) + } + + /** + * Set default reply headers on the interceptor for subsequent replies + */ + defaultReplyHeaders (headers) { + if (typeof headers === 'undefined') { + throw new InvalidArgumentError('headers must be defined') + } + + this[kDefaultHeaders] = headers + return this + } + + /** + * Set default reply trailers on the interceptor for subsequent replies + */ + defaultReplyTrailers (trailers) { + if (typeof trailers === 'undefined') { + throw new InvalidArgumentError('trailers must be defined') + } + + this[kDefaultTrailers] = trailers + return this + } + + /** + * Set reply content length header for replies on the interceptor + */ + replyContentLength () { + this[kContentLength] = true + return this + } +} + +module.exports.MockInterceptor = MockInterceptor +module.exports.MockScope = MockScope diff --git a/lib/mock/mock-pool.js b/lib/mock/mock-pool.js new file mode 100644 index 0000000..0a3a7cd --- /dev/null +++ b/lib/mock/mock-pool.js @@ -0,0 +1,59 @@ +'use strict' + +const { promisify } = require('util') +const Pool = require('../pool') +const { buildMockDispatch } = require('./mock-utils') +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = require('./mock-symbols') +const { MockInterceptor } = require('./mock-interceptor') +const Symbols = require('../core/symbols') +const { InvalidArgumentError } = require('../core/errors') + +/** + * MockPool provides an API that extends the Pool to influence the mockDispatches. + */ +class MockPool extends Pool { + constructor (origin, opts) { + super(origin, opts) + + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) + + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] + } + + get [Symbols.kConnected] () { + return this[kConnected] + } + + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) + } + + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) + } +} + +module.exports = MockPool diff --git a/lib/mock/mock-symbols.js b/lib/mock/mock-symbols.js new file mode 100644 index 0000000..8c4cbb6 --- /dev/null +++ b/lib/mock/mock-symbols.js @@ -0,0 +1,23 @@ +'use strict' + +module.exports = { + kAgent: Symbol('agent'), + kOptions: Symbol('options'), + kFactory: Symbol('factory'), + kDispatches: Symbol('dispatches'), + kDispatchKey: Symbol('dispatch key'), + kDefaultHeaders: Symbol('default headers'), + kDefaultTrailers: Symbol('default trailers'), + kContentLength: Symbol('content length'), + kMockAgent: Symbol('mock agent'), + kMockAgentSet: Symbol('mock agent set'), + kMockAgentGet: Symbol('mock agent get'), + kMockDispatch: Symbol('mock dispatch'), + kClose: Symbol('close'), + kOriginalClose: Symbol('original agent close'), + kOrigin: Symbol('origin'), + kIsMockActive: Symbol('is mock active'), + kNetConnect: Symbol('net connect'), + kGetNetConnect: Symbol('get net connect'), + kConnected: Symbol('connected') +} diff --git a/lib/mock/mock-utils.js b/lib/mock/mock-utils.js new file mode 100644 index 0000000..42ea185 --- /dev/null +++ b/lib/mock/mock-utils.js @@ -0,0 +1,351 @@ +'use strict' + +const { MockNotMatchedError } = require('./mock-errors') +const { + kDispatches, + kMockAgent, + kOriginalDispatch, + kOrigin, + kGetNetConnect +} = require('./mock-symbols') +const { buildURL, nop } = require('../core/util') +const { STATUS_CODES } = require('http') +const { + types: { + isPromise + } +} = require('util') + +function matchValue (match, value) { + if (typeof match === 'string') { + return match === value + } + if (match instanceof RegExp) { + return match.test(value) + } + if (typeof match === 'function') { + return match(value) === true + } + return false +} + +function lowerCaseEntries (headers) { + return Object.fromEntries( + Object.entries(headers).map(([headerName, headerValue]) => { + return [headerName.toLocaleLowerCase(), headerValue] + }) + ) +} + +/** + * @param {import('../../index').Headers|string[]|Record} headers + * @param {string} key + */ +function getHeaderByName (headers, key) { + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) { + return headers[i + 1] + } + } + + return undefined + } else if (typeof headers.get === 'function') { + return headers.get(key) + } else { + return lowerCaseEntries(headers)[key.toLocaleLowerCase()] + } +} + +/** @param {string[]} headers */ +function buildHeadersFromArray (headers) { // fetch HeadersList + const clone = headers.slice() + const entries = [] + for (let index = 0; index < clone.length; index += 2) { + entries.push([clone[index], clone[index + 1]]) + } + return Object.fromEntries(entries) +} + +function matchHeaders (mockDispatch, headers) { + if (typeof mockDispatch.headers === 'function') { + if (Array.isArray(headers)) { // fetch HeadersList + headers = buildHeadersFromArray(headers) + } + return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {}) + } + if (typeof mockDispatch.headers === 'undefined') { + return true + } + if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') { + return false + } + + for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) { + const headerValue = getHeaderByName(headers, matchHeaderName) + + if (!matchValue(matchHeaderValue, headerValue)) { + return false + } + } + return true +} + +function safeUrl (path) { + if (typeof path !== 'string') { + return path + } + + const pathSegments = path.split('?') + + if (pathSegments.length !== 2) { + return path + } + + const qp = new URLSearchParams(pathSegments.pop()) + qp.sort() + return [...pathSegments, qp.toString()].join('?') +} + +function matchKey (mockDispatch, { path, method, body, headers }) { + const pathMatch = matchValue(mockDispatch.path, path) + const methodMatch = matchValue(mockDispatch.method, method) + const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true + const headersMatch = matchHeaders(mockDispatch, headers) + return pathMatch && methodMatch && bodyMatch && headersMatch +} + +function getResponseData (data) { + if (Buffer.isBuffer(data)) { + return data + } else if (typeof data === 'object') { + return JSON.stringify(data) + } else { + return data.toString() + } +} + +function getMockDispatch (mockDispatches, key) { + const basePath = key.query ? buildURL(key.path, key.query) : key.path + const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath + + // Match path + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`) + } + + // Match method + matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`) + } + + // Match body + matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`) + } + + // Match headers + matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`) + } + + return matchedMockDispatches[0] +} + +function addMockDispatch (mockDispatches, key, data) { + const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false } + const replyData = typeof data === 'function' ? { callback: data } : { ...data } + const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } } + mockDispatches.push(newMockDispatch) + return newMockDispatch +} + +function deleteMockDispatch (mockDispatches, key) { + const index = mockDispatches.findIndex(dispatch => { + if (!dispatch.consumed) { + return false + } + return matchKey(dispatch, key) + }) + if (index !== -1) { + mockDispatches.splice(index, 1) + } +} + +function buildKey (opts) { + const { path, method, body, headers, query } = opts + return { + path, + method, + body, + headers, + query + } +} + +function generateKeyValues (data) { + return Object.entries(data).reduce((keyValuePairs, [key, value]) => [ + ...keyValuePairs, + Buffer.from(`${key}`), + Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`) + ], []) +} + +/** + * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status + * @param {number} statusCode + */ +function getStatusText (statusCode) { + return STATUS_CODES[statusCode] || 'unknown' +} + +async function getResponse (body) { + const buffers = [] + for await (const data of body) { + buffers.push(data) + } + return Buffer.concat(buffers).toString('utf8') +} + +/** + * Mock dispatch function used to simulate undici dispatches + */ +function mockDispatch (opts, handler) { + // Get mock dispatch from built key + const key = buildKey(opts) + const mockDispatch = getMockDispatch(this[kDispatches], key) + + mockDispatch.timesInvoked++ + + // Here's where we resolve a callback if a callback is present for the dispatch data. + if (mockDispatch.data.callback) { + mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) } + } + + // Parse mockDispatch data + const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch + const { timesInvoked, times } = mockDispatch + + // If it's used up and not persistent, mark as consumed + mockDispatch.consumed = !persist && timesInvoked >= times + mockDispatch.pending = timesInvoked < times + + // If specified, trigger dispatch error + if (error !== null) { + deleteMockDispatch(this[kDispatches], key) + handler.onError(error) + return true + } + + // Handle the request with a delay if necessary + if (typeof delay === 'number' && delay > 0) { + setTimeout(() => { + handleReply(this[kDispatches]) + }, delay) + } else { + handleReply(this[kDispatches]) + } + + function handleReply (mockDispatches, _data = data) { + // fetch's HeadersList is a 1D string array + const optsHeaders = Array.isArray(opts.headers) + ? buildHeadersFromArray(opts.headers) + : opts.headers + const body = typeof _data === 'function' + ? _data({ ...opts, headers: optsHeaders }) + : _data + + // util.types.isPromise is likely needed for jest. + if (isPromise(body)) { + // If handleReply is asynchronous, throwing an error + // in the callback will reject the promise, rather than + // synchronously throw the error, which breaks some tests. + // Rather, we wait for the callback to resolve if it is a + // promise, and then re-run handleReply with the new body. + body.then((newData) => handleReply(mockDispatches, newData)) + return + } + + const responseData = getResponseData(body) + const responseHeaders = generateKeyValues(headers) + const responseTrailers = generateKeyValues(trailers) + + handler.abort = nop + handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode)) + handler.onData(Buffer.from(responseData)) + handler.onComplete(responseTrailers) + deleteMockDispatch(mockDispatches, key) + } + + function resume () {} + + return true +} + +function buildMockDispatch () { + const agent = this[kMockAgent] + const origin = this[kOrigin] + const originalDispatch = this[kOriginalDispatch] + + return function dispatch (opts, handler) { + if (agent.isMockActive) { + try { + mockDispatch.call(this, opts, handler) + } catch (error) { + if (error instanceof MockNotMatchedError) { + const netConnect = agent[kGetNetConnect]() + if (netConnect === false) { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`) + } + if (checkNetConnect(netConnect, origin)) { + originalDispatch.call(this, opts, handler) + } else { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`) + } + } else { + throw error + } + } + } else { + originalDispatch.call(this, opts, handler) + } + } +} + +function checkNetConnect (netConnect, origin) { + const url = new URL(origin) + if (netConnect === true) { + return true + } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) { + return true + } + return false +} + +function buildMockOptions (opts) { + if (opts) { + const { agent, ...mockOptions } = opts + return mockOptions + } +} + +module.exports = { + getResponseData, + getMockDispatch, + addMockDispatch, + deleteMockDispatch, + buildKey, + generateKeyValues, + matchValue, + getResponse, + getStatusText, + mockDispatch, + buildMockDispatch, + checkNetConnect, + buildMockOptions, + getHeaderByName +} diff --git a/lib/mock/pending-interceptors-formatter.js b/lib/mock/pending-interceptors-formatter.js new file mode 100644 index 0000000..1bc7539 --- /dev/null +++ b/lib/mock/pending-interceptors-formatter.js @@ -0,0 +1,40 @@ +'use strict' + +const { Transform } = require('stream') +const { Console } = require('console') + +/** + * Gets the output of `console.table(…)` as a string. + */ +module.exports = class PendingInterceptorsFormatter { + constructor ({ disableColors } = {}) { + this.transform = new Transform({ + transform (chunk, _enc, cb) { + cb(null, chunk) + } + }) + + this.logger = new Console({ + stdout: this.transform, + inspectOptions: { + colors: !disableColors && !process.env.CI + } + }) + } + + format (pendingInterceptors) { + const withPrettyHeaders = pendingInterceptors.map( + ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + Method: method, + Origin: origin, + Path: path, + 'Status code': statusCode, + Persistent: persist ? '✅' : 'âŒ', + Invocations: timesInvoked, + Remaining: persist ? Infinity : times - timesInvoked + })) + + this.logger.table(withPrettyHeaders) + return this.transform.read().toString() + } +} diff --git a/lib/mock/pluralizer.js b/lib/mock/pluralizer.js new file mode 100644 index 0000000..47f150b --- /dev/null +++ b/lib/mock/pluralizer.js @@ -0,0 +1,29 @@ +'use strict' + +const singulars = { + pronoun: 'it', + is: 'is', + was: 'was', + this: 'this' +} + +const plurals = { + pronoun: 'they', + is: 'are', + was: 'were', + this: 'these' +} + +module.exports = class Pluralizer { + constructor (singular, plural) { + this.singular = singular + this.plural = plural + } + + pluralize (count) { + const one = count === 1 + const keys = one ? singulars : plurals + const noun = one ? this.singular : this.plural + return { ...keys, count, noun } + } +} diff --git a/lib/node/fixed-queue.js b/lib/node/fixed-queue.js new file mode 100644 index 0000000..3572681 --- /dev/null +++ b/lib/node/fixed-queue.js @@ -0,0 +1,117 @@ +/* eslint-disable */ + +'use strict' + +// Extracted from node/lib/internal/fixed_queue.js + +// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two. +const kSize = 2048; +const kMask = kSize - 1; + +// The FixedQueue is implemented as a singly-linked list of fixed-size +// circular buffers. It looks something like this: +// +// head tail +// | | +// v v +// +-----------+ <-----\ +-----------+ <------\ +-----------+ +// | [null] | \----- | next | \------- | next | +// +-----------+ +-----------+ +-----------+ +// | item | <-- bottom | item | <-- bottom | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | bottom --> | item | +// | item | | item | | item | +// | ... | | ... | | ... | +// | item | | item | | item | +// | item | | item | | item | +// | [empty] | <-- top | item | | item | +// | [empty] | | item | | item | +// | [empty] | | [empty] | <-- top top --> | [empty] | +// +-----------+ +-----------+ +-----------+ +// +// Or, if there is only one circular buffer, it looks something +// like either of these: +// +// head tail head tail +// | | | | +// v v v v +// +-----------+ +-----------+ +// | [null] | | [null] | +// +-----------+ +-----------+ +// | [empty] | | item | +// | [empty] | | item | +// | item | <-- bottom top --> | [empty] | +// | item | | [empty] | +// | [empty] | <-- top bottom --> | item | +// | [empty] | | item | +// +-----------+ +-----------+ +// +// Adding a value means moving `top` forward by one, removing means +// moving `bottom` forward by one. After reaching the end, the queue +// wraps around. +// +// When `top === bottom` the current queue is empty and when +// `top + 1 === bottom` it's full. This wastes a single space of storage +// but allows much quicker checks. + +class FixedCircularBuffer { + constructor() { + this.bottom = 0; + this.top = 0; + this.list = new Array(kSize); + this.next = null; + } + + isEmpty() { + return this.top === this.bottom; + } + + isFull() { + return ((this.top + 1) & kMask) === this.bottom; + } + + push(data) { + this.list[this.top] = data; + this.top = (this.top + 1) & kMask; + } + + shift() { + const nextItem = this.list[this.bottom]; + if (nextItem === undefined) + return null; + this.list[this.bottom] = undefined; + this.bottom = (this.bottom + 1) & kMask; + return nextItem; + } +} + +module.exports = class FixedQueue { + constructor() { + this.head = this.tail = new FixedCircularBuffer(); + } + + isEmpty() { + return this.head.isEmpty(); + } + + push(data) { + if (this.head.isFull()) { + // Head is full: Creates a new queue, sets the old queue's `.next` to it, + // and sets it as the new main queue. + this.head = this.head.next = new FixedCircularBuffer(); + } + this.head.push(data); + } + + shift() { + const tail = this.tail; + const next = tail.shift(); + if (tail.isEmpty() && tail.next !== null) { + // If there is another queue, it forms the new tail. + this.tail = tail.next; + } + return next; + } +}; diff --git a/lib/pool-base.js b/lib/pool-base.js new file mode 100644 index 0000000..2a909ee --- /dev/null +++ b/lib/pool-base.js @@ -0,0 +1,194 @@ +'use strict' + +const DispatcherBase = require('./dispatcher-base') +const FixedQueue = require('./node/fixed-queue') +const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = require('./core/symbols') +const PoolStats = require('./pool-stats') + +const kClients = Symbol('clients') +const kNeedDrain = Symbol('needDrain') +const kQueue = Symbol('queue') +const kClosedResolve = Symbol('closed resolve') +const kOnDrain = Symbol('onDrain') +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kGetDispatcher = Symbol('get dispatcher') +const kAddClient = Symbol('add client') +const kRemoveClient = Symbol('remove client') +const kStats = Symbol('stats') + +class PoolBase extends DispatcherBase { + constructor () { + super() + + this[kQueue] = new FixedQueue() + this[kClients] = [] + this[kQueued] = 0 + + const pool = this + + this[kOnDrain] = function onDrain (origin, targets) { + const queue = pool[kQueue] + + let needDrain = false + + while (!needDrain) { + const item = queue.shift() + if (!item) { + break + } + pool[kQueued]-- + needDrain = !this.dispatch(item.opts, item.handler) + } + + this[kNeedDrain] = needDrain + + if (!this[kNeedDrain] && pool[kNeedDrain]) { + pool[kNeedDrain] = false + pool.emit('drain', origin, [pool, ...targets]) + } + + if (pool[kClosedResolve] && queue.isEmpty()) { + Promise + .all(pool[kClients].map(c => c.close())) + .then(pool[kClosedResolve]) + } + } + + this[kOnConnect] = (origin, targets) => { + pool.emit('connect', origin, [pool, ...targets]) + } + + this[kOnDisconnect] = (origin, targets, err) => { + pool.emit('disconnect', origin, [pool, ...targets], err) + } + + this[kOnConnectionError] = (origin, targets, err) => { + pool.emit('connectionError', origin, [pool, ...targets], err) + } + + this[kStats] = new PoolStats(this) + } + + get [kBusy] () { + return this[kNeedDrain] + } + + get [kConnected] () { + return this[kClients].filter(client => client[kConnected]).length + } + + get [kFree] () { + return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length + } + + get [kPending] () { + let ret = this[kQueued] + for (const { [kPending]: pending } of this[kClients]) { + ret += pending + } + return ret + } + + get [kRunning] () { + let ret = 0 + for (const { [kRunning]: running } of this[kClients]) { + ret += running + } + return ret + } + + get [kSize] () { + let ret = this[kQueued] + for (const { [kSize]: size } of this[kClients]) { + ret += size + } + return ret + } + + get stats () { + return this[kStats] + } + + async [kClose] () { + if (this[kQueue].isEmpty()) { + return Promise.all(this[kClients].map(c => c.close())) + } else { + return new Promise((resolve) => { + this[kClosedResolve] = resolve + }) + } + } + + async [kDestroy] (err) { + while (true) { + const item = this[kQueue].shift() + if (!item) { + break + } + item.handler.onError(err) + } + + return Promise.all(this[kClients].map(c => c.destroy(err))) + } + + [kDispatch] (opts, handler) { + const dispatcher = this[kGetDispatcher]() + + if (!dispatcher) { + this[kNeedDrain] = true + this[kQueue].push({ opts, handler }) + this[kQueued]++ + } else if (!dispatcher.dispatch(opts, handler)) { + dispatcher[kNeedDrain] = true + this[kNeedDrain] = !this[kGetDispatcher]() + } + + return !this[kNeedDrain] + } + + [kAddClient] (client) { + client + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].push(client) + + if (this[kNeedDrain]) { + process.nextTick(() => { + if (this[kNeedDrain]) { + this[kOnDrain](client[kUrl], [this, client]) + } + }) + } + + return this + } + + [kRemoveClient] (client) { + client.close(() => { + const idx = this[kClients].indexOf(client) + if (idx !== -1) { + this[kClients].splice(idx, 1) + } + }) + + this[kNeedDrain] = this[kClients].some(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) + } +} + +module.exports = { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} diff --git a/lib/pool-stats.js b/lib/pool-stats.js new file mode 100644 index 0000000..b4af8ae --- /dev/null +++ b/lib/pool-stats.js @@ -0,0 +1,34 @@ +const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = require('./core/symbols') +const kPool = Symbol('pool') + +class PoolStats { + constructor (pool) { + this[kPool] = pool + } + + get connected () { + return this[kPool][kConnected] + } + + get free () { + return this[kPool][kFree] + } + + get pending () { + return this[kPool][kPending] + } + + get queued () { + return this[kPool][kQueued] + } + + get running () { + return this[kPool][kRunning] + } + + get size () { + return this[kPool][kSize] + } +} + +module.exports = PoolStats diff --git a/lib/pool.js b/lib/pool.js new file mode 100644 index 0000000..e3cd339 --- /dev/null +++ b/lib/pool.js @@ -0,0 +1,94 @@ +'use strict' + +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kGetDispatcher +} = require('./pool-base') +const Client = require('./client') +const { + InvalidArgumentError +} = require('./core/errors') +const util = require('./core/util') +const { kUrl, kInterceptors } = require('./core/symbols') +const buildConnector = require('./core/connect') + +const kOptions = Symbol('options') +const kConnections = Symbol('connections') +const kFactory = Symbol('factory') + +function defaultFactory (origin, opts) { + return new Client(origin, opts) +} + +class Pool extends PoolBase { + constructor (origin, { + connections, + factory = defaultFactory, + connect, + connectTimeout, + tls, + maxCachedSessions, + socketPath, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + allowH2, + ...options + } = {}) { + super() + + if (connections != null && (!Number.isFinite(connections) || connections < 0)) { + throw new InvalidArgumentError('invalid connections') + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } + + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } + + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) + } + + this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool) + ? options.interceptors.Pool + : [] + this[kConnections] = connections || null + this[kUrl] = util.parseOrigin(origin) + this[kOptions] = { ...util.deepClone(options), connect, allowH2 } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kFactory] = factory + } + + [kGetDispatcher] () { + let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain]) + + if (dispatcher) { + return dispatcher + } + + if (!this[kConnections] || this[kClients].length < this[kConnections]) { + dispatcher = this[kFactory](this[kUrl], this[kOptions]) + this[kAddClient](dispatcher) + } + + return dispatcher + } +} + +module.exports = Pool diff --git a/lib/proxy-agent.js b/lib/proxy-agent.js new file mode 100644 index 0000000..e3c0f6f --- /dev/null +++ b/lib/proxy-agent.js @@ -0,0 +1,189 @@ +'use strict' + +const { kProxy, kClose, kDestroy, kInterceptors } = require('./core/symbols') +const { URL } = require('url') +const Agent = require('./agent') +const Pool = require('./pool') +const DispatcherBase = require('./dispatcher-base') +const { InvalidArgumentError, RequestAbortedError } = require('./core/errors') +const buildConnector = require('./core/connect') + +const kAgent = Symbol('proxy agent') +const kClient = Symbol('proxy client') +const kProxyHeaders = Symbol('proxy headers') +const kRequestTls = Symbol('request tls settings') +const kProxyTls = Symbol('proxy tls settings') +const kConnectEndpoint = Symbol('connect endpoint function') + +function defaultProtocolPort (protocol) { + return protocol === 'https:' ? 443 : 80 +} + +function buildProxyOptions (opts) { + if (typeof opts === 'string') { + opts = { uri: opts } + } + + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') + } + + return { + uri: opts.uri, + protocol: opts.protocol || 'https' + } +} + +function defaultFactory (origin, opts) { + return new Pool(origin, opts) +} + +class ProxyAgent extends DispatcherBase { + constructor (opts) { + super(opts) + this[kProxy] = buildProxyOptions(opts) + this[kAgent] = new Agent(opts) + this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent) + ? opts.interceptors.ProxyAgent + : [] + + if (typeof opts === 'string') { + opts = { uri: opts } + } + + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') + } + + const { clientFactory = defaultFactory } = opts + + if (typeof clientFactory !== 'function') { + throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.') + } + + this[kRequestTls] = opts.requestTls + this[kProxyTls] = opts.proxyTls + this[kProxyHeaders] = opts.headers || {} + + const resolvedUrl = new URL(opts.uri) + const { origin, port, host, username, password } = resolvedUrl + + if (opts.auth && opts.token) { + throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') + } else if (opts.auth) { + /* @deprecated in favour of opts.token */ + this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}` + } else if (opts.token) { + this[kProxyHeaders]['proxy-authorization'] = opts.token + } else if (username && password) { + this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}` + } + + const connect = buildConnector({ ...opts.proxyTls }) + this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }) + this[kClient] = clientFactory(resolvedUrl, { connect }) + this[kAgent] = new Agent({ + ...opts, + connect: async (opts, callback) => { + let requestedHost = opts.host + if (!opts.port) { + requestedHost += `:${defaultProtocolPort(opts.protocol)}` + } + try { + const { socket, statusCode } = await this[kClient].connect({ + origin, + port, + path: requestedHost, + signal: opts.signal, + headers: { + ...this[kProxyHeaders], + host + } + }) + if (statusCode !== 200) { + socket.on('error', () => {}).destroy() + callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`)) + } + if (opts.protocol !== 'https:') { + callback(null, socket) + return + } + let servername + if (this[kRequestTls]) { + servername = this[kRequestTls].servername + } else { + servername = opts.servername + } + this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback) + } catch (err) { + callback(err) + } + } + }) + } + + dispatch (opts, handler) { + const { host } = new URL(opts.origin) + const headers = buildHeaders(opts.headers) + throwIfProxyAuthIsSent(headers) + return this[kAgent].dispatch( + { + ...opts, + headers: { + ...headers, + host + } + }, + handler + ) + } + + async [kClose] () { + await this[kAgent].close() + await this[kClient].close() + } + + async [kDestroy] () { + await this[kAgent].destroy() + await this[kClient].destroy() + } +} + +/** + * @param {string[] | Record} headers + * @returns {Record} + */ +function buildHeaders (headers) { + // When using undici.fetch, the headers list is stored + // as an array. + if (Array.isArray(headers)) { + /** @type {Record} */ + const headersPair = {} + + for (let i = 0; i < headers.length; i += 2) { + headersPair[headers[i]] = headers[i + 1] + } + + return headersPair + } + + return headers +} + +/** + * @param {Record} headers + * + * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers + * Nevertheless, it was changed and to avoid a security vulnerability by end users + * this check was created. + * It should be removed in the next major version for performance reasons + */ +function throwIfProxyAuthIsSent (headers) { + const existProxyAuth = headers && Object.keys(headers) + .find((key) => key.toLowerCase() === 'proxy-authorization') + if (existProxyAuth) { + throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor') + } +} + +module.exports = ProxyAgent diff --git a/lib/timers.js b/lib/timers.js new file mode 100644 index 0000000..5782217 --- /dev/null +++ b/lib/timers.js @@ -0,0 +1,97 @@ +'use strict' + +let fastNow = Date.now() +let fastNowTimeout + +const fastTimers = [] + +function onTimeout () { + fastNow = Date.now() + + let len = fastTimers.length + let idx = 0 + while (idx < len) { + const timer = fastTimers[idx] + + if (timer.state === 0) { + timer.state = fastNow + timer.delay + } else if (timer.state > 0 && fastNow >= timer.state) { + timer.state = -1 + timer.callback(timer.opaque) + } + + if (timer.state === -1) { + timer.state = -2 + if (idx !== len - 1) { + fastTimers[idx] = fastTimers.pop() + } else { + fastTimers.pop() + } + len -= 1 + } else { + idx += 1 + } + } + + if (fastTimers.length > 0) { + refreshTimeout() + } +} + +function refreshTimeout () { + if (fastNowTimeout && fastNowTimeout.refresh) { + fastNowTimeout.refresh() + } else { + clearTimeout(fastNowTimeout) + fastNowTimeout = setTimeout(onTimeout, 1e3) + if (fastNowTimeout.unref) { + fastNowTimeout.unref() + } + } +} + +class Timeout { + constructor (callback, delay, opaque) { + this.callback = callback + this.delay = delay + this.opaque = opaque + + // -2 not in timer list + // -1 in timer list but inactive + // 0 in timer list waiting for time + // > 0 in timer list waiting for time to expire + this.state = -2 + + this.refresh() + } + + refresh () { + if (this.state === -2) { + fastTimers.push(this) + if (!fastNowTimeout || fastTimers.length === 1) { + refreshTimeout() + } + } + + this.state = 0 + } + + clear () { + this.state = -1 + } +} + +module.exports = { + setTimeout (callback, delay, opaque) { + return delay < 1e3 + ? setTimeout(callback, delay, opaque) + : new Timeout(callback, delay, opaque) + }, + clearTimeout (timeout) { + if (timeout instanceof Timeout) { + timeout.clear() + } else { + clearTimeout(timeout) + } + } +} diff --git a/lib/websocket/connection.js b/lib/websocket/connection.js new file mode 100644 index 0000000..e0fa697 --- /dev/null +++ b/lib/websocket/connection.js @@ -0,0 +1,291 @@ +'use strict' + +const diagnosticsChannel = require('diagnostics_channel') +const { uid, states } = require('./constants') +const { + kReadyState, + kSentClose, + kByteParser, + kReceivedClose +} = require('./symbols') +const { fireEvent, failWebsocketConnection } = require('./util') +const { CloseEvent } = require('./events') +const { makeRequest } = require('../fetch/request') +const { fetching } = require('../fetch/index') +const { Headers } = require('../fetch/headers') +const { getGlobalDispatcher } = require('../global') +const { kHeadersList } = require('../core/symbols') + +const channels = {} +channels.open = diagnosticsChannel.channel('undici:websocket:open') +channels.close = diagnosticsChannel.channel('undici:websocket:close') +channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error') + +/** @type {import('crypto')} */ +let crypto +try { + crypto = require('crypto') +} catch { + +} + +/** + * @see https://websockets.spec.whatwg.org/#concept-websocket-establish + * @param {URL} url + * @param {string|string[]} protocols + * @param {import('./websocket').WebSocket} ws + * @param {(response: any) => void} onEstablish + * @param {Partial} options + */ +function establishWebSocketConnection (url, protocols, ws, onEstablish, options) { + // 1. Let requestURL be a copy of url, with its scheme set to "http", if url’s + // scheme is "ws", and to "https" otherwise. + const requestURL = url + + requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:' + + // 2. Let request be a new request, whose URL is requestURL, client is client, + // service-workers mode is "none", referrer is "no-referrer", mode is + // "websocket", credentials mode is "include", cache mode is "no-store" , + // and redirect mode is "error". + const request = makeRequest({ + urlList: [requestURL], + serviceWorkers: 'none', + referrer: 'no-referrer', + mode: 'websocket', + credentials: 'include', + cache: 'no-store', + redirect: 'error' + }) + + // Note: undici extension, allow setting custom headers. + if (options.headers) { + const headersList = new Headers(options.headers)[kHeadersList] + + request.headersList = headersList + } + + // 3. Append (`Upgrade`, `websocket`) to request’s header list. + // 4. Append (`Connection`, `Upgrade`) to request’s header list. + // Note: both of these are handled by undici currently. + // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397 + + // 5. Let keyValue be a nonce consisting of a randomly selected + // 16-byte value that has been forgiving-base64-encoded and + // isomorphic encoded. + const keyValue = crypto.randomBytes(16).toString('base64') + + // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s + // header list. + request.headersList.append('sec-websocket-key', keyValue) + + // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s + // header list. + request.headersList.append('sec-websocket-version', '13') + + // 8. For each protocol in protocols, combine + // (`Sec-WebSocket-Protocol`, protocol) in request’s header + // list. + for (const protocol of protocols) { + request.headersList.append('sec-websocket-protocol', protocol) + } + + // 9. Let permessageDeflate be a user-agent defined + // "permessage-deflate" extension header value. + // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673 + // TODO: enable once permessage-deflate is supported + const permessageDeflate = '' // 'permessage-deflate; 15' + + // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to + // request’s header list. + // request.headersList.append('sec-websocket-extensions', permessageDeflate) + + // 11. Fetch request with useParallelQueue set to true, and + // processResponse given response being these steps: + const controller = fetching({ + request, + useParallelQueue: true, + dispatcher: options.dispatcher ?? getGlobalDispatcher(), + processResponse (response) { + // 1. If response is a network error or its status is not 101, + // fail the WebSocket connection. + if (response.type === 'error' || response.status !== 101) { + failWebsocketConnection(ws, 'Received network error or non-101 status code.') + return + } + + // 2. If protocols is not the empty list and extracting header + // list values given `Sec-WebSocket-Protocol` and response’s + // header list results in null, failure, or the empty byte + // sequence, then fail the WebSocket connection. + if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Server did not respond with sent protocols.') + return + } + + // 3. Follow the requirements stated step 2 to step 6, inclusive, + // of the last set of steps in section 4.1 of The WebSocket + // Protocol to validate response. This either results in fail + // the WebSocket connection or the WebSocket connection is + // established. + + // 2. If the response lacks an |Upgrade| header field or the |Upgrade| + // header field contains a value that is not an ASCII case- + // insensitive match for the value "websocket", the client MUST + // _Fail the WebSocket Connection_. + if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') { + failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".') + return + } + + // 3. If the response lacks a |Connection| header field or the + // |Connection| header field doesn't contain a token that is an + // ASCII case-insensitive match for the value "Upgrade", the client + // MUST _Fail the WebSocket Connection_. + if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') { + failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".') + return + } + + // 4. If the response lacks a |Sec-WebSocket-Accept| header field or + // the |Sec-WebSocket-Accept| contains a value other than the + // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket- + // Key| (as a string, not base64-decoded) with the string "258EAFA5- + // E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and + // trailing whitespace, the client MUST _Fail the WebSocket + // Connection_. + const secWSAccept = response.headersList.get('Sec-WebSocket-Accept') + const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64') + if (secWSAccept !== digest) { + failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.') + return + } + + // 5. If the response includes a |Sec-WebSocket-Extensions| header + // field and this header field indicates the use of an extension + // that was not present in the client's handshake (the server has + // indicated an extension not requested by the client), the client + // MUST _Fail the WebSocket Connection_. (The parsing of this + // header field to determine which extensions are requested is + // discussed in Section 9.1.) + const secExtension = response.headersList.get('Sec-WebSocket-Extensions') + + if (secExtension !== null && secExtension !== permessageDeflate) { + failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.') + return + } + + // 6. If the response includes a |Sec-WebSocket-Protocol| header field + // and this header field indicates the use of a subprotocol that was + // not present in the client's handshake (the server has indicated a + // subprotocol not requested by the client), the client MUST _Fail + // the WebSocket Connection_. + const secProtocol = response.headersList.get('Sec-WebSocket-Protocol') + + if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.') + return + } + + response.socket.on('data', onSocketData) + response.socket.on('close', onSocketClose) + response.socket.on('error', onSocketError) + + if (channels.open.hasSubscribers) { + channels.open.publish({ + address: response.socket.address(), + protocol: secProtocol, + extensions: secExtension + }) + } + + onEstablish(response) + } + }) + + return controller +} + +/** + * @param {Buffer} chunk + */ +function onSocketData (chunk) { + if (!this.ws[kByteParser].write(chunk)) { + this.pause() + } +} + +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4 + */ +function onSocketClose () { + const { ws } = this + + // If the TCP connection was closed after the + // WebSocket closing handshake was completed, the WebSocket connection + // is said to have been closed _cleanly_. + const wasClean = ws[kSentClose] && ws[kReceivedClose] + + let code = 1005 + let reason = '' + + const result = ws[kByteParser].closingInfo + + if (result) { + code = result.code ?? 1005 + reason = result.reason + } else if (!ws[kSentClose]) { + // If _The WebSocket + // Connection is Closed_ and no Close control frame was received by the + // endpoint (such as could occur if the underlying transport connection + // is lost), _The WebSocket Connection Close Code_ is considered to be + // 1006. + code = 1006 + } + + // 1. Change the ready state to CLOSED (3). + ws[kReadyState] = states.CLOSED + + // 2. If the user agent was required to fail the WebSocket + // connection, or if the WebSocket connection was closed + // after being flagged as full, fire an event named error + // at the WebSocket object. + // TODO + + // 3. Fire an event named close at the WebSocket object, + // using CloseEvent, with the wasClean attribute + // initialized to true if the connection closed cleanly + // and false otherwise, the code attribute initialized to + // the WebSocket connection close code, and the reason + // attribute initialized to the result of applying UTF-8 + // decode without BOM to the WebSocket connection close + // reason. + fireEvent('close', ws, CloseEvent, { + wasClean, code, reason + }) + + if (channels.close.hasSubscribers) { + channels.close.publish({ + websocket: ws, + code, + reason + }) + } +} + +function onSocketError (error) { + const { ws } = this + + ws[kReadyState] = states.CLOSING + + if (channels.socketError.hasSubscribers) { + channels.socketError.publish(error) + } + + this.destroy() +} + +module.exports = { + establishWebSocketConnection +} diff --git a/lib/websocket/constants.js b/lib/websocket/constants.js new file mode 100644 index 0000000..406b8e3 --- /dev/null +++ b/lib/websocket/constants.js @@ -0,0 +1,51 @@ +'use strict' + +// This is a Globally Unique Identifier unique used +// to validate that the endpoint accepts websocket +// connections. +// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3 +const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' + +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} + +const states = { + CONNECTING: 0, + OPEN: 1, + CLOSING: 2, + CLOSED: 3 +} + +const opcodes = { + CONTINUATION: 0x0, + TEXT: 0x1, + BINARY: 0x2, + CLOSE: 0x8, + PING: 0x9, + PONG: 0xA +} + +const maxUnsigned16Bit = 2 ** 16 - 1 // 65535 + +const parserStates = { + INFO: 0, + PAYLOADLENGTH_16: 2, + PAYLOADLENGTH_64: 3, + READ_DATA: 4 +} + +const emptyBuffer = Buffer.allocUnsafe(0) + +module.exports = { + uid, + staticPropertyDescriptors, + states, + opcodes, + maxUnsigned16Bit, + parserStates, + emptyBuffer +} diff --git a/lib/websocket/events.js b/lib/websocket/events.js new file mode 100644 index 0000000..621a226 --- /dev/null +++ b/lib/websocket/events.js @@ -0,0 +1,303 @@ +'use strict' + +const { webidl } = require('../fetch/webidl') +const { kEnumerableProperty } = require('../core/util') +const { MessagePort } = require('worker_threads') + +/** + * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent + */ +class MessageEvent extends Event { + #eventInit + + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' }) + + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.MessageEventInit(eventInitDict) + + super(type, eventInitDict) + + this.#eventInit = eventInitDict + } + + get data () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.data + } + + get origin () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.origin + } + + get lastEventId () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.lastEventId + } + + get source () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.source + } + + get ports () { + webidl.brandCheck(this, MessageEvent) + + if (!Object.isFrozen(this.#eventInit.ports)) { + Object.freeze(this.#eventInit.ports) + } + + return this.#eventInit.ports + } + + initMessageEvent ( + type, + bubbles = false, + cancelable = false, + data = null, + origin = '', + lastEventId = '', + source = null, + ports = [] + ) { + webidl.brandCheck(this, MessageEvent) + + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' }) + + return new MessageEvent(type, { + bubbles, cancelable, data, origin, lastEventId, source, ports + }) + } +} + +/** + * @see https://websockets.spec.whatwg.org/#the-closeevent-interface + */ +class CloseEvent extends Event { + #eventInit + + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' }) + + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.CloseEventInit(eventInitDict) + + super(type, eventInitDict) + + this.#eventInit = eventInitDict + } + + get wasClean () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.wasClean + } + + get code () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.code + } + + get reason () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.reason + } +} + +// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface +class ErrorEvent extends Event { + #eventInit + + constructor (type, eventInitDict) { + webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' }) + + super(type, eventInitDict) + + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {}) + + this.#eventInit = eventInitDict + } + + get message () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.message + } + + get filename () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.filename + } + + get lineno () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.lineno + } + + get colno () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.colno + } + + get error () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.error + } +} + +Object.defineProperties(MessageEvent.prototype, { + [Symbol.toStringTag]: { + value: 'MessageEvent', + configurable: true + }, + data: kEnumerableProperty, + origin: kEnumerableProperty, + lastEventId: kEnumerableProperty, + source: kEnumerableProperty, + ports: kEnumerableProperty, + initMessageEvent: kEnumerableProperty +}) + +Object.defineProperties(CloseEvent.prototype, { + [Symbol.toStringTag]: { + value: 'CloseEvent', + configurable: true + }, + reason: kEnumerableProperty, + code: kEnumerableProperty, + wasClean: kEnumerableProperty +}) + +Object.defineProperties(ErrorEvent.prototype, { + [Symbol.toStringTag]: { + value: 'ErrorEvent', + configurable: true + }, + message: kEnumerableProperty, + filename: kEnumerableProperty, + lineno: kEnumerableProperty, + colno: kEnumerableProperty, + error: kEnumerableProperty +}) + +webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort) + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.MessagePort +) + +const eventInit = [ + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false + } +] + +webidl.converters.MessageEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'data', + converter: webidl.converters.any, + defaultValue: null + }, + { + key: 'origin', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lastEventId', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'source', + // Node doesn't implement WindowProxy or ServiceWorker, so the only + // valid value for source is a MessagePort. + converter: webidl.nullableConverter(webidl.converters.MessagePort), + defaultValue: null + }, + { + key: 'ports', + converter: webidl.converters['sequence'], + get defaultValue () { + return [] + } + } +]) + +webidl.converters.CloseEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'wasClean', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'code', + converter: webidl.converters['unsigned short'], + defaultValue: 0 + }, + { + key: 'reason', + converter: webidl.converters.USVString, + defaultValue: '' + } +]) + +webidl.converters.ErrorEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'message', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'filename', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lineno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'colno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'error', + converter: webidl.converters.any + } +]) + +module.exports = { + MessageEvent, + CloseEvent, + ErrorEvent +} diff --git a/lib/websocket/frame.js b/lib/websocket/frame.js new file mode 100644 index 0000000..d867ad1 --- /dev/null +++ b/lib/websocket/frame.js @@ -0,0 +1,73 @@ +'use strict' + +const { maxUnsigned16Bit } = require('./constants') + +/** @type {import('crypto')} */ +let crypto +try { + crypto = require('crypto') +} catch { + +} + +class WebsocketFrameSend { + /** + * @param {Buffer|undefined} data + */ + constructor (data) { + this.frameData = data + this.maskKey = crypto.randomBytes(4) + } + + createFrame (opcode) { + const bodyLength = this.frameData?.byteLength ?? 0 + + /** @type {number} */ + let payloadLength = bodyLength // 0-125 + let offset = 6 + + if (bodyLength > maxUnsigned16Bit) { + offset += 8 // payload length is next 8 bytes + payloadLength = 127 + } else if (bodyLength > 125) { + offset += 2 // payload length is next 2 bytes + payloadLength = 126 + } + + const buffer = Buffer.allocUnsafe(bodyLength + offset) + + // Clear first 2 bytes, everything else is overwritten + buffer[0] = buffer[1] = 0 + buffer[0] |= 0x80 // FIN + buffer[0] = (buffer[0] & 0xF0) + opcode // opcode + + /*! ws. MIT License. Einar Otto Stangvik */ + buffer[offset - 4] = this.maskKey[0] + buffer[offset - 3] = this.maskKey[1] + buffer[offset - 2] = this.maskKey[2] + buffer[offset - 1] = this.maskKey[3] + + buffer[1] = payloadLength + + if (payloadLength === 126) { + buffer.writeUInt16BE(bodyLength, 2) + } else if (payloadLength === 127) { + // Clear extended payload length + buffer[2] = buffer[3] = 0 + buffer.writeUIntBE(bodyLength, 4, 6) + } + + buffer[1] |= 0x80 // MASK + + // mask body + for (let i = 0; i < bodyLength; i++) { + buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4] + } + + return buffer + } +} + +module.exports = { + WebsocketFrameSend +} diff --git a/lib/websocket/receiver.js b/lib/websocket/receiver.js new file mode 100644 index 0000000..bdd2031 --- /dev/null +++ b/lib/websocket/receiver.js @@ -0,0 +1,344 @@ +'use strict' + +const { Writable } = require('stream') +const diagnosticsChannel = require('diagnostics_channel') +const { parserStates, opcodes, states, emptyBuffer } = require('./constants') +const { kReadyState, kSentClose, kResponse, kReceivedClose } = require('./symbols') +const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = require('./util') +const { WebsocketFrameSend } = require('./frame') + +// This code was influenced by ws released under the MIT license. +// Copyright (c) 2011 Einar Otto Stangvik +// Copyright (c) 2013 Arnout Kazemier and contributors +// Copyright (c) 2016 Luigi Pinca and contributors + +const channels = {} +channels.ping = diagnosticsChannel.channel('undici:websocket:ping') +channels.pong = diagnosticsChannel.channel('undici:websocket:pong') + +class ByteParser extends Writable { + #buffers = [] + #byteOffset = 0 + + #state = parserStates.INFO + + #info = {} + #fragments = [] + + constructor (ws) { + super() + + this.ws = ws + } + + /** + * @param {Buffer} chunk + * @param {() => void} callback + */ + _write (chunk, _, callback) { + this.#buffers.push(chunk) + this.#byteOffset += chunk.length + + this.run(callback) + } + + /** + * Runs whenever a new chunk is received. + * Callback is called whenever there are no more chunks buffering, + * or not enough bytes are buffered to parse. + */ + run (callback) { + while (true) { + if (this.#state === parserStates.INFO) { + // If there aren't enough bytes to parse the payload length, etc. + if (this.#byteOffset < 2) { + return callback() + } + + const buffer = this.consume(2) + + this.#info.fin = (buffer[0] & 0x80) !== 0 + this.#info.opcode = buffer[0] & 0x0F + + // If we receive a fragmented message, we use the type of the first + // frame to parse the full message as binary/text, when it's terminated + this.#info.originalOpcode ??= this.#info.opcode + + this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION + + if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) { + // Only text and binary frames can be fragmented + failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.') + return + } + + const payloadLength = buffer[1] & 0x7F + + if (payloadLength <= 125) { + this.#info.payloadLength = payloadLength + this.#state = parserStates.READ_DATA + } else if (payloadLength === 126) { + this.#state = parserStates.PAYLOADLENGTH_16 + } else if (payloadLength === 127) { + this.#state = parserStates.PAYLOADLENGTH_64 + } + + if (this.#info.fragmented && payloadLength > 125) { + // A fragmented frame can't be fragmented itself + failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.') + return + } else if ( + (this.#info.opcode === opcodes.PING || + this.#info.opcode === opcodes.PONG || + this.#info.opcode === opcodes.CLOSE) && + payloadLength > 125 + ) { + // Control frames can have a payload length of 125 bytes MAX + failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.') + return + } else if (this.#info.opcode === opcodes.CLOSE) { + if (payloadLength === 1) { + failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.') + return + } + + const body = this.consume(payloadLength) + + this.#info.closeInfo = this.parseCloseBody(false, body) + + if (!this.ws[kSentClose]) { + // If an endpoint receives a Close frame and did not previously send a + // Close frame, the endpoint MUST send a Close frame in response. (When + // sending a Close frame in response, the endpoint typically echos the + // status code it received.) + const body = Buffer.allocUnsafe(2) + body.writeUInt16BE(this.#info.closeInfo.code, 0) + const closeFrame = new WebsocketFrameSend(body) + + this.ws[kResponse].socket.write( + closeFrame.createFrame(opcodes.CLOSE), + (err) => { + if (!err) { + this.ws[kSentClose] = true + } + } + ) + } + + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this.ws[kReadyState] = states.CLOSING + this.ws[kReceivedClose] = true + + this.end() + + return + } else if (this.#info.opcode === opcodes.PING) { + // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in + // response, unless it already received a Close frame. + // A Pong frame sent in response to a Ping frame must have identical + // "Application data" + + const body = this.consume(payloadLength) + + if (!this.ws[kReceivedClose]) { + const frame = new WebsocketFrameSend(body) + + this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG)) + + if (channels.ping.hasSubscribers) { + channels.ping.publish({ + payload: body + }) + } + } + + this.#state = parserStates.INFO + + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } else if (this.#info.opcode === opcodes.PONG) { + // A Pong frame MAY be sent unsolicited. This serves as a + // unidirectional heartbeat. A response to an unsolicited Pong frame is + // not expected. + + const body = this.consume(payloadLength) + + if (channels.pong.hasSubscribers) { + channels.pong.publish({ + payload: body + }) + } + + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } + } else if (this.#state === parserStates.PAYLOADLENGTH_16) { + if (this.#byteOffset < 2) { + return callback() + } + + const buffer = this.consume(2) + + this.#info.payloadLength = buffer.readUInt16BE(0) + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.PAYLOADLENGTH_64) { + if (this.#byteOffset < 8) { + return callback() + } + + const buffer = this.consume(8) + const upper = buffer.readUInt32BE(0) + + // 2^31 is the maxinimum bytes an arraybuffer can contain + // on 32-bit systems. Although, on 64-bit systems, this is + // 2^53-1 bytes. + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275 + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e + if (upper > 2 ** 31 - 1) { + failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.') + return + } + + const lower = buffer.readUInt32BE(4) + + this.#info.payloadLength = (upper << 8) + lower + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.READ_DATA) { + if (this.#byteOffset < this.#info.payloadLength) { + // If there is still more data in this chunk that needs to be read + return callback() + } else if (this.#byteOffset >= this.#info.payloadLength) { + // If the server sent multiple frames in a single chunk + + const body = this.consume(this.#info.payloadLength) + + this.#fragments.push(body) + + // If the frame is unfragmented, or a fragmented frame was terminated, + // a message was received + if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) { + const fullMessage = Buffer.concat(this.#fragments) + + websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage) + + this.#info = {} + this.#fragments.length = 0 + } + + this.#state = parserStates.INFO + } + } + + if (this.#byteOffset > 0) { + continue + } else { + callback() + break + } + } + } + + /** + * Take n bytes from the buffered Buffers + * @param {number} n + * @returns {Buffer|null} + */ + consume (n) { + if (n > this.#byteOffset) { + return null + } else if (n === 0) { + return emptyBuffer + } + + if (this.#buffers[0].length === n) { + this.#byteOffset -= this.#buffers[0].length + return this.#buffers.shift() + } + + const buffer = Buffer.allocUnsafe(n) + let offset = 0 + + while (offset !== n) { + const next = this.#buffers[0] + const { length } = next + + if (length + offset === n) { + buffer.set(this.#buffers.shift(), offset) + break + } else if (length + offset > n) { + buffer.set(next.subarray(0, n - offset), offset) + this.#buffers[0] = next.subarray(n - offset) + break + } else { + buffer.set(this.#buffers.shift(), offset) + offset += next.length + } + } + + this.#byteOffset -= n + + return buffer + } + + parseCloseBody (onlyCode, data) { + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5 + /** @type {number|undefined} */ + let code + + if (data.length >= 2) { + // _The WebSocket Connection Close Code_ is + // defined as the status code (Section 7.4) contained in the first Close + // control frame received by the application + code = data.readUInt16BE(0) + } + + if (onlyCode) { + if (!isValidStatusCode(code)) { + return null + } + + return { code } + } + + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6 + /** @type {Buffer} */ + let reason = data.subarray(2) + + // Remove BOM + if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) { + reason = reason.subarray(3) + } + + if (code !== undefined && !isValidStatusCode(code)) { + return null + } + + try { + // TODO: optimize this + reason = new TextDecoder('utf-8', { fatal: true }).decode(reason) + } catch { + return null + } + + return { code, reason } + } + + get closingInfo () { + return this.#info.closeInfo + } +} + +module.exports = { + ByteParser +} diff --git a/lib/websocket/symbols.js b/lib/websocket/symbols.js new file mode 100644 index 0000000..11d03e3 --- /dev/null +++ b/lib/websocket/symbols.js @@ -0,0 +1,12 @@ +'use strict' + +module.exports = { + kWebSocketURL: Symbol('url'), + kReadyState: Symbol('ready state'), + kController: Symbol('controller'), + kResponse: Symbol('response'), + kBinaryType: Symbol('binary type'), + kSentClose: Symbol('sent close'), + kReceivedClose: Symbol('received close'), + kByteParser: Symbol('byte parser') +} diff --git a/lib/websocket/util.js b/lib/websocket/util.js new file mode 100644 index 0000000..6c59b2c --- /dev/null +++ b/lib/websocket/util.js @@ -0,0 +1,200 @@ +'use strict' + +const { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = require('./symbols') +const { states, opcodes } = require('./constants') +const { MessageEvent, ErrorEvent } = require('./events') + +/* globals Blob */ + +/** + * @param {import('./websocket').WebSocket} ws + */ +function isEstablished (ws) { + // If the server's response is validated as provided for above, it is + // said that _The WebSocket Connection is Established_ and that the + // WebSocket Connection is in the OPEN state. + return ws[kReadyState] === states.OPEN +} + +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosing (ws) { + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + return ws[kReadyState] === states.CLOSING +} + +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosed (ws) { + return ws[kReadyState] === states.CLOSED +} + +/** + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e + * @param {EventTarget} target + * @param {EventInit | undefined} eventInitDict + */ +function fireEvent (e, target, eventConstructor = Event, eventInitDict) { + // 1. If eventConstructor is not given, then let eventConstructor be Event. + + // 2. Let event be the result of creating an event given eventConstructor, + // in the relevant realm of target. + // 3. Initialize event’s type attribute to e. + const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap + + // 4. Initialize any other IDL attributes of event as described in the + // invocation of this algorithm. + + // 5. Return the result of dispatching event at target, with legacy target + // override flag set if set. + target.dispatchEvent(event) +} + +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @param {import('./websocket').WebSocket} ws + * @param {number} type Opcode + * @param {Buffer} data application data + */ +function websocketMessageReceived (ws, type, data) { + // 1. If ready state is not OPEN (1), then return. + if (ws[kReadyState] !== states.OPEN) { + return + } + + // 2. Let dataForEvent be determined by switching on type and binary type: + let dataForEvent + + if (type === opcodes.TEXT) { + // -> type indicates that the data is Text + // a new DOMString containing data + try { + dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data) + } catch { + failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.') + return + } + } else if (type === opcodes.BINARY) { + if (ws[kBinaryType] === 'blob') { + // -> type indicates that the data is Binary and binary type is "blob" + // a new Blob object, created in the relevant Realm of the WebSocket + // object, that represents data as its raw data + dataForEvent = new Blob([data]) + } else { + // -> type indicates that the data is Binary and binary type is "arraybuffer" + // a new ArrayBuffer object, created in the relevant Realm of the + // WebSocket object, whose contents are data + dataForEvent = new Uint8Array(data).buffer + } + } + + // 3. Fire an event named message at the WebSocket object, using MessageEvent, + // with the origin attribute initialized to the serialization of the WebSocket + // object’s url's origin, and the data attribute initialized to dataForEvent. + fireEvent('message', ws, MessageEvent, { + origin: ws[kWebSocketURL].origin, + data: dataForEvent + }) +} + +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455 + * @see https://datatracker.ietf.org/doc/html/rfc2616 + * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407 + * @param {string} protocol + */ +function isValidSubprotocol (protocol) { + // If present, this value indicates one + // or more comma-separated subprotocol the client wishes to speak, + // ordered by preference. The elements that comprise this value + // MUST be non-empty strings with characters in the range U+0021 to + // U+007E not including separator characters as defined in + // [RFC2616] and MUST all be unique strings. + if (protocol.length === 0) { + return false + } + + for (const char of protocol) { + const code = char.charCodeAt(0) + + if ( + code < 0x21 || + code > 0x7E || + char === '(' || + char === ')' || + char === '<' || + char === '>' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' || + code === 32 || // SP + code === 9 // HT + ) { + return false + } + } + + return true +} + +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4 + * @param {number} code + */ +function isValidStatusCode (code) { + if (code >= 1000 && code < 1015) { + return ( + code !== 1004 && // reserved + code !== 1005 && // "MUST NOT be set as a status code" + code !== 1006 // "MUST NOT be set as a status code" + ) + } + + return code >= 3000 && code <= 4999 +} + +/** + * @param {import('./websocket').WebSocket} ws + * @param {string|undefined} reason + */ +function failWebsocketConnection (ws, reason) { + const { [kController]: controller, [kResponse]: response } = ws + + controller.abort() + + if (response?.socket && !response.socket.destroyed) { + response.socket.destroy() + } + + if (reason) { + fireEvent('error', ws, ErrorEvent, { + error: new Error(reason) + }) + } +} + +module.exports = { + isEstablished, + isClosing, + isClosed, + fireEvent, + isValidSubprotocol, + isValidStatusCode, + failWebsocketConnection, + websocketMessageReceived +} diff --git a/lib/websocket/websocket.js b/lib/websocket/websocket.js new file mode 100644 index 0000000..e4aa58f --- /dev/null +++ b/lib/websocket/websocket.js @@ -0,0 +1,641 @@ +'use strict' + +const { webidl } = require('../fetch/webidl') +const { DOMException } = require('../fetch/constants') +const { URLSerializer } = require('../fetch/dataURL') +const { getGlobalOrigin } = require('../fetch/global') +const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = require('./constants') +const { + kWebSocketURL, + kReadyState, + kController, + kBinaryType, + kResponse, + kSentClose, + kByteParser +} = require('./symbols') +const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = require('./util') +const { establishWebSocketConnection } = require('./connection') +const { WebsocketFrameSend } = require('./frame') +const { ByteParser } = require('./receiver') +const { kEnumerableProperty, isBlobLike } = require('../core/util') +const { getGlobalDispatcher } = require('../global') +const { types } = require('util') + +let experimentalWarned = false + +// https://websockets.spec.whatwg.org/#interface-definition +class WebSocket extends EventTarget { + #events = { + open: null, + error: null, + close: null, + message: null + } + + #bufferedAmount = 0 + #protocol = '' + #extensions = '' + + /** + * @param {string} url + * @param {string|string[]} protocols + */ + constructor (url, protocols = []) { + super() + + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' }) + + if (!experimentalWarned) { + experimentalWarned = true + process.emitWarning('WebSockets are experimental, expect them to change at any time.', { + code: 'UNDICI-WS' + }) + } + + const options = webidl.converters['DOMString or sequence or WebSocketInit'](protocols) + + url = webidl.converters.USVString(url) + protocols = options.protocols + + // 1. Let baseURL be this's relevant settings object's API base URL. + const baseURL = getGlobalOrigin() + + // 1. Let urlRecord be the result of applying the URL parser to url with baseURL. + let urlRecord + + try { + urlRecord = new URL(url, baseURL) + } catch (e) { + // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException. + throw new DOMException(e, 'SyntaxError') + } + + // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws". + if (urlRecord.protocol === 'http:') { + urlRecord.protocol = 'ws:' + } else if (urlRecord.protocol === 'https:') { + // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss". + urlRecord.protocol = 'wss:' + } + + // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException. + if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') { + throw new DOMException( + `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`, + 'SyntaxError' + ) + } + + // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError" + // DOMException. + if (urlRecord.hash || urlRecord.href.endsWith('#')) { + throw new DOMException('Got fragment', 'SyntaxError') + } + + // 8. If protocols is a string, set protocols to a sequence consisting + // of just that string. + if (typeof protocols === 'string') { + protocols = [protocols] + } + + // 9. If any of the values in protocols occur more than once or otherwise + // fail to match the requirements for elements that comprise the value + // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket + // protocol, then throw a "SyntaxError" DOMException. + if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } + + if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } + + // 10. Set this's url to urlRecord. + this[kWebSocketURL] = new URL(urlRecord.href) + + // 11. Let client be this's relevant settings object. + + // 12. Run this step in parallel: + + // 1. Establish a WebSocket connection given urlRecord, protocols, + // and client. + this[kController] = establishWebSocketConnection( + urlRecord, + protocols, + this, + (response) => this.#onConnectionEstablished(response), + options + ) + + // Each WebSocket object has an associated ready state, which is a + // number representing the state of the connection. Initially it must + // be CONNECTING (0). + this[kReadyState] = WebSocket.CONNECTING + + // The extensions attribute must initially return the empty string. + + // The protocol attribute must initially return the empty string. + + // Each WebSocket object has an associated binary type, which is a + // BinaryType. Initially it must be "blob". + this[kBinaryType] = 'blob' + } + + /** + * @see https://websockets.spec.whatwg.org/#dom-websocket-close + * @param {number|undefined} code + * @param {string|undefined} reason + */ + close (code = undefined, reason = undefined) { + webidl.brandCheck(this, WebSocket) + + if (code !== undefined) { + code = webidl.converters['unsigned short'](code, { clamp: true }) + } + + if (reason !== undefined) { + reason = webidl.converters.USVString(reason) + } + + // 1. If code is present, but is neither an integer equal to 1000 nor an + // integer in the range 3000 to 4999, inclusive, throw an + // "InvalidAccessError" DOMException. + if (code !== undefined) { + if (code !== 1000 && (code < 3000 || code > 4999)) { + throw new DOMException('invalid code', 'InvalidAccessError') + } + } + + let reasonByteLength = 0 + + // 2. If reason is present, then run these substeps: + if (reason !== undefined) { + // 1. Let reasonBytes be the result of encoding reason. + // 2. If reasonBytes is longer than 123 bytes, then throw a + // "SyntaxError" DOMException. + reasonByteLength = Buffer.byteLength(reason) + + if (reasonByteLength > 123) { + throw new DOMException( + `Reason must be less than 123 bytes; received ${reasonByteLength}`, + 'SyntaxError' + ) + } + } + + // 3. Run the first matching steps from the following list: + if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) { + // If this's ready state is CLOSING (2) or CLOSED (3) + // Do nothing. + } else if (!isEstablished(this)) { + // If the WebSocket connection is not yet established + // Fail the WebSocket connection and set this's ready state + // to CLOSING (2). + failWebsocketConnection(this, 'Connection was closed before it was established.') + this[kReadyState] = WebSocket.CLOSING + } else if (!isClosing(this)) { + // If the WebSocket closing handshake has not yet been started + // Start the WebSocket closing handshake and set this's ready + // state to CLOSING (2). + // - If neither code nor reason is present, the WebSocket Close + // message must not have a body. + // - If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + // - If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + + const frame = new WebsocketFrameSend() + + // If neither code nor reason is present, the WebSocket Close + // message must not have a body. + + // If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + if (code !== undefined && reason === undefined) { + frame.frameData = Buffer.allocUnsafe(2) + frame.frameData.writeUInt16BE(code, 0) + } else if (code !== undefined && reason !== undefined) { + // If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength) + frame.frameData.writeUInt16BE(code, 0) + // the body MAY contain UTF-8-encoded data with value /reason/ + frame.frameData.write(reason, 2, 'utf-8') + } else { + frame.frameData = emptyBuffer + } + + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket + + socket.write(frame.createFrame(opcodes.CLOSE), (err) => { + if (!err) { + this[kSentClose] = true + } + }) + + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this[kReadyState] = states.CLOSING + } else { + // Otherwise + // Set this's ready state to CLOSING (2). + this[kReadyState] = WebSocket.CLOSING + } + } + + /** + * @see https://websockets.spec.whatwg.org/#dom-websocket-send + * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data + */ + send (data) { + webidl.brandCheck(this, WebSocket) + + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' }) + + data = webidl.converters.WebSocketSendData(data) + + // 1. If this's ready state is CONNECTING, then throw an + // "InvalidStateError" DOMException. + if (this[kReadyState] === WebSocket.CONNECTING) { + throw new DOMException('Sent before connected.', 'InvalidStateError') + } + + // 2. Run the appropriate set of steps from the following list: + // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1 + // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + + if (!isEstablished(this) || isClosing(this)) { + return + } + + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket + + // If data is a string + if (typeof data === 'string') { + // If the WebSocket connection is established and the WebSocket + // closing handshake has not yet started, then the user agent + // must send a WebSocket Message comprised of the data argument + // using a text frame opcode; if the data cannot be sent, e.g. + // because it would need to be buffered but the buffer is full, + // the user agent must flag the WebSocket as full and then close + // the WebSocket connection. Any invocation of this method with a + // string argument that does not throw an exception must increase + // the bufferedAmount attribute by the number of bytes needed to + // express the argument as UTF-8. + + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.TEXT) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (types.isArrayBuffer(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need + // to be buffered but the buffer is full, the user agent must flag + // the WebSocket as full and then close the WebSocket connection. + // The data to be sent is the data stored in the buffer described + // by the ArrayBuffer object. Any invocation of this method with an + // ArrayBuffer argument that does not throw an exception must + // increase the bufferedAmount attribute by the length of the + // ArrayBuffer in bytes. + + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (ArrayBuffer.isView(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The + // data to be sent is the data stored in the section of the buffer + // described by the ArrayBuffer object that data references. Any + // invocation of this method with this kind of argument that does + // not throw an exception must increase the bufferedAmount attribute + // by the length of data’s buffer in bytes. + + const ab = Buffer.from(data, data.byteOffset, data.byteLength) + + const frame = new WebsocketFrameSend(ab) + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += ab.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= ab.byteLength + }) + } else if (isBlobLike(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The data + // to be sent is the raw data represented by the Blob object. Any + // invocation of this method with a Blob argument that does not throw + // an exception must increase the bufferedAmount attribute by the size + // of the Blob object’s raw data, in bytes. + + const frame = new WebsocketFrameSend() + + data.arrayBuffer().then((ab) => { + const value = Buffer.from(ab) + frame.frameData = value + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + }) + } + } + + get readyState () { + webidl.brandCheck(this, WebSocket) + + // The readyState getter steps are to return this's ready state. + return this[kReadyState] + } + + get bufferedAmount () { + webidl.brandCheck(this, WebSocket) + + return this.#bufferedAmount + } + + get url () { + webidl.brandCheck(this, WebSocket) + + // The url getter steps are to return this's url, serialized. + return URLSerializer(this[kWebSocketURL]) + } + + get extensions () { + webidl.brandCheck(this, WebSocket) + + return this.#extensions + } + + get protocol () { + webidl.brandCheck(this, WebSocket) + + return this.#protocol + } + + get onopen () { + webidl.brandCheck(this, WebSocket) + + return this.#events.open + } + + set onopen (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.open) { + this.removeEventListener('open', this.#events.open) + } + + if (typeof fn === 'function') { + this.#events.open = fn + this.addEventListener('open', fn) + } else { + this.#events.open = null + } + } + + get onerror () { + webidl.brandCheck(this, WebSocket) + + return this.#events.error + } + + set onerror (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.error) { + this.removeEventListener('error', this.#events.error) + } + + if (typeof fn === 'function') { + this.#events.error = fn + this.addEventListener('error', fn) + } else { + this.#events.error = null + } + } + + get onclose () { + webidl.brandCheck(this, WebSocket) + + return this.#events.close + } + + set onclose (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.close) { + this.removeEventListener('close', this.#events.close) + } + + if (typeof fn === 'function') { + this.#events.close = fn + this.addEventListener('close', fn) + } else { + this.#events.close = null + } + } + + get onmessage () { + webidl.brandCheck(this, WebSocket) + + return this.#events.message + } + + set onmessage (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.message) { + this.removeEventListener('message', this.#events.message) + } + + if (typeof fn === 'function') { + this.#events.message = fn + this.addEventListener('message', fn) + } else { + this.#events.message = null + } + } + + get binaryType () { + webidl.brandCheck(this, WebSocket) + + return this[kBinaryType] + } + + set binaryType (type) { + webidl.brandCheck(this, WebSocket) + + if (type !== 'blob' && type !== 'arraybuffer') { + this[kBinaryType] = 'blob' + } else { + this[kBinaryType] = type + } + } + + /** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + */ + #onConnectionEstablished (response) { + // processResponse is called when the "response’s header list has been received and initialized." + // once this happens, the connection is open + this[kResponse] = response + + const parser = new ByteParser(this) + parser.on('drain', function onParserDrain () { + this.ws[kResponse].socket.resume() + }) + + response.socket.ws = this + this[kByteParser] = parser + + // 1. Change the ready state to OPEN (1). + this[kReadyState] = states.OPEN + + // 2. Change the extensions attribute’s value to the extensions in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1 + const extensions = response.headersList.get('sec-websocket-extensions') + + if (extensions !== null) { + this.#extensions = extensions + } + + // 3. Change the protocol attribute’s value to the subprotocol in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9 + const protocol = response.headersList.get('sec-websocket-protocol') + + if (protocol !== null) { + this.#protocol = protocol + } + + // 4. Fire an event named open at the WebSocket object. + fireEvent('open', this) + } +} + +// https://websockets.spec.whatwg.org/#dom-websocket-connecting +WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING +// https://websockets.spec.whatwg.org/#dom-websocket-open +WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN +// https://websockets.spec.whatwg.org/#dom-websocket-closing +WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING +// https://websockets.spec.whatwg.org/#dom-websocket-closed +WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED + +Object.defineProperties(WebSocket.prototype, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors, + url: kEnumerableProperty, + readyState: kEnumerableProperty, + bufferedAmount: kEnumerableProperty, + onopen: kEnumerableProperty, + onerror: kEnumerableProperty, + onclose: kEnumerableProperty, + close: kEnumerableProperty, + onmessage: kEnumerableProperty, + binaryType: kEnumerableProperty, + send: kEnumerableProperty, + extensions: kEnumerableProperty, + protocol: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'WebSocket', + writable: false, + enumerable: false, + configurable: true + } +}) + +Object.defineProperties(WebSocket, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors +}) + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.DOMString +) + +webidl.converters['DOMString or sequence'] = function (V) { + if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) { + return webidl.converters['sequence'](V) + } + + return webidl.converters.DOMString(V) +} + +// This implements the propsal made in https://github.com/whatwg/websockets/issues/42 +webidl.converters.WebSocketInit = webidl.dictionaryConverter([ + { + key: 'protocols', + converter: webidl.converters['DOMString or sequence'], + get defaultValue () { + return [] + } + }, + { + key: 'dispatcher', + converter: (V) => V, + get defaultValue () { + return getGlobalDispatcher() + } + }, + { + key: 'headers', + converter: webidl.nullableConverter(webidl.converters.HeadersInit) + } +]) + +webidl.converters['DOMString or sequence or WebSocketInit'] = function (V) { + if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) { + return webidl.converters.WebSocketInit(V) + } + + return { protocols: webidl.converters['DOMString or sequence'](V) } +} + +webidl.converters.WebSocketSendData = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } + + if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) { + return webidl.converters.BufferSource(V) + } + } + + return webidl.converters.USVString(V) +} + +module.exports = { + WebSocket +} diff --git a/llhttp/.dockerignore b/llhttp/.dockerignore new file mode 100644 index 0000000..11b226d --- /dev/null +++ b/llhttp/.dockerignore @@ -0,0 +1,6 @@ +* +!package.json +!package-lock.json +!tsconfig.json +!bin +!src diff --git a/llhttp/.eslintrc.js b/llhttp/.eslintrc.js new file mode 100644 index 0000000..595cf53 --- /dev/null +++ b/llhttp/.eslintrc.js @@ -0,0 +1,31 @@ +module.exports = { + 'env': { + 'browser': false, + 'commonjs': true, + 'es6': true, + 'node': true + }, + 'extends': 'eslint:recommended', + 'rules': { + 'max-len': [ 2, { + 'code': 80, + 'ignoreComments': true + } ], + 'indent': [ + 'error', + 2 + ], + 'linebreak-style': [ + 'error', + 'unix' + ], + 'quotes': [ + 'error', + 'single' + ], + 'semi': [ + 'error', + 'always' + ] + } +}; diff --git a/llhttp/.github/workflows/aiohttp.yml b/llhttp/.github/workflows/aiohttp.yml new file mode 100644 index 0000000..8ae8eb3 --- /dev/null +++ b/llhttp/.github/workflows/aiohttp.yml @@ -0,0 +1,61 @@ +name: Aiohttp +# If you don't understand the reason for a test failure, ping @Dreamsorcerer or open an issue in aio-libs/aiohttp. + +on: + push: + branches: + - 'main' + pull_request: + branches: + - 'main' + +jobs: + test: + permissions: + contents: read # to fetch code (actions/checkout) + + name: Aiohttp regression tests + runs-on: ubuntu-latest + steps: + - name: Checkout aiohttp + uses: actions/checkout@v4 + with: + repository: aio-libs/aiohttp + - name: Checkout llhttp + uses: actions/checkout@v4 + with: + path: vendor/llhttp + - name: Restore node_modules cache + uses: actions/cache@v3 + with: + path: vendor/llhttp/.npm + key: ubuntu-latest-node-${{ hashFiles('vendor/llhttp/**/package-lock.json') }} + restore-keys: ubuntu-latest-node- + - name: Install llhttp dependencies + run: npm install --ignore-scripts + working-directory: vendor/llhttp + - name: Build llhttp + run: make + working-directory: vendor/llhttp + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: 3.x + cache: 'pip' + cache-dependency-path: 'requirements/*.txt' + - name: Provision the dev env + run: >- + PATH="${HOME}/.local/bin:${PATH}" + make .develop + - name: Run tests + env: + COLOR: yes + run: >- + PATH="${HOME}/.local/bin:${PATH}" + pytest tests/test_http_parser.py tests/test_web_functional.py + - name: Run dev_mode tests + env: + COLOR: yes + run: >- + PATH="${HOME}/.local/bin:${PATH}" + python -X dev -m pytest -m dev_mode tests/test_http_parser.py tests/test_web_functional.py diff --git a/llhttp/.github/workflows/ci.yaml b/llhttp/.github/workflows/ci.yaml new file mode 100644 index 0000000..d1b3a65 --- /dev/null +++ b/llhttp/.github/workflows/ci.yaml @@ -0,0 +1,117 @@ +name: CI + +on: [push, pull_request] + +env: + CI: true + +jobs: + build: + name: Build libllhttp.a + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: + - macos-latest + - ubuntu-latest + - windows-latest + steps: + - name: Install clang for Windows + if: runner.os == 'Windows' + run: | + iwr -useb get.scoop.sh -outfile 'install.ps1' + .\install.ps1 -RunAsAdmin + scoop install llvm --global + + # Scoop modifies the PATH so we make the modified PATH global. + echo $env:PATH >> $env:GITHUB_PATH + + - name: Fetch code + uses: actions/checkout@v3 + with: + fetch-depth: 1 + + # Skip macOS & Windows, cache there is slower + - name: Restore node_modules cache for Linux + uses: actions/cache@v3 + if: runner.os == 'Linux' + with: + path: ~/.npm + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- + + - name: Install dependencies + run: npm install --ignore-scripts + + - name: Build libllhttp.a + shell: bash + run: | + make build/libllhttp.a + + test: + name: Run tests + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: + - macos-latest + - ubuntu-latest + - windows-latest + steps: + - name: Install clang for Windows + if: runner.os == 'Windows' + run: | + iwr -useb get.scoop.sh -outfile 'install.ps1' + .\install.ps1 -RunAsAdmin + scoop install llvm --global + + # Scoop modifies the PATH so we make the modified PATH global. + echo $env:PATH >> $env:GITHUB_PATH + + - name: Fetch code + uses: actions/checkout@v3 + with: + fetch-depth: 1 + + # Skip macOS & Windows, cache there is slower + - name: Restore node_modules cache for Linux + uses: actions/cache@v3 + if: runner.os == 'Linux' + with: + path: ~/.npm + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- + + - name: Install dependencies + run: npm install --ignore-scripts + + # Custom script, because progress looks not good in CI + - name: Run tests + env: + CFLAGS: -O0 + run: npx mocha --timeout 30000 -r ts-node/register/type-check test/*-test.ts + + lint: + name: Run TSLint + runs-on: ubuntu-latest + steps: + - name: Fetch code + uses: actions/checkout@v3 + with: + fetch-depth: 1 + + - name: Restore node_modules cache + uses: actions/cache@v3 + with: + path: ~/.npm + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- + + - name: Install dependencies + run: npm install --ignore-scripts + + - name: Run lint command + run: npm run lint diff --git a/llhttp/.gitignore b/llhttp/.gitignore new file mode 100644 index 0000000..c2e9902 --- /dev/null +++ b/llhttp/.gitignore @@ -0,0 +1,6 @@ +node_modules/ +npm-debug.log +test/tmp/ +lib/ +build/ +release/ diff --git a/llhttp/.npmrc b/llhttp/.npmrc new file mode 100644 index 0000000..cafe685 --- /dev/null +++ b/llhttp/.npmrc @@ -0,0 +1 @@ +package-lock=true diff --git a/llhttp/CMakeLists.txt b/llhttp/CMakeLists.txt new file mode 100644 index 0000000..97fa408 --- /dev/null +++ b/llhttp/CMakeLists.txt @@ -0,0 +1,117 @@ +cmake_minimum_required(VERSION 3.5.1) +cmake_policy(SET CMP0069 NEW) + +project(llhttp VERSION _RELEASE_) +include(GNUInstallDirs) + +set(CMAKE_C_STANDARD 99) + +# By default build in relwithdebinfo type, supports both lowercase and uppercase +if(NOT CMAKE_CONFIGURATION_TYPES) + set(allowableBuildTypes DEBUG RELEASE RELWITHDEBINFO MINSIZEREL) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "${allowableBuildTypes}") + if(NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE RELWITHDEBINFO CACHE STRING "" FORCE) + else() + string(TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE) + if(NOT CMAKE_BUILD_TYPE IN_LIST allowableBuildTypes) + message(FATAL_ERROR "Invalid build type: ${CMAKE_BUILD_TYPE}") + endif() + endif() +endif() + +# +# Options +# +# Generic option +option(BUILD_SHARED_LIBS "Build shared libraries (.dll/.so)" ON) +option(BUILD_STATIC_LIBS "Build static libraries (.lib/.a)" OFF) + +# Source code +set(LLHTTP_SOURCES + ${CMAKE_CURRENT_SOURCE_DIR}/src/llhttp.c + ${CMAKE_CURRENT_SOURCE_DIR}/src/http.c + ${CMAKE_CURRENT_SOURCE_DIR}/src/api.c +) + +set(LLHTTP_HEADERS + ${CMAKE_CURRENT_SOURCE_DIR}/include/llhttp.h +) + +configure_file( + ${CMAKE_CURRENT_SOURCE_DIR}/libllhttp.pc.in + ${CMAKE_CURRENT_SOURCE_DIR}/libllhttp.pc + @ONLY +) + +function(config_library target) + target_sources(${target} PRIVATE ${LLHTTP_SOURCES} ${LLHTTP_HEADERS}) + + target_include_directories(${target} PUBLIC + $ + $ + ) + + set_target_properties(${target} PROPERTIES + OUTPUT_NAME llhttp + VERSION ${PROJECT_VERSION} + SOVERSION ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR} + PUBLIC_HEADER ${LLHTTP_HEADERS} + ) + + install(TARGETS ${target} + EXPORT llhttp + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + PUBLIC_HEADER DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} + ) + + install(FILES + ${CMAKE_CURRENT_SOURCE_DIR}/libllhttp.pc + DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig + ) + + # This is required to work with FetchContent + install(EXPORT llhttp + FILE llhttp-config.cmake + NAMESPACE llhttp:: + DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/llhttp + ) +endfunction(config_library target) + +if(BUILD_SHARED_LIBS) + add_library(llhttp_shared SHARED + ${llhttp_src} + ) + add_library(llhttp::llhttp ALIAS llhttp_shared) + config_library(llhttp_shared) +endif() + +if(BUILD_STATIC_LIBS) + add_library(llhttp_static STATIC + ${llhttp_src} + ) + if(BUILD_SHARED_LIBS) + add_library(llhttp::llhttp ALIAS llhttp_shared) + else() + add_library(llhttp::llhttp ALIAS llhttp_static) + endif() + config_library(llhttp_static) +endif() + +# On windows with Visual Studio, add a debug postfix so that release +# and debug libraries can coexist. +if(MSVC) + set(CMAKE_DEBUG_POSTFIX "d") +endif() + +# Print project configure summary +message(STATUS "") +message(STATUS "") +message(STATUS "Project configure summary:") +message(STATUS "") +message(STATUS " CMake build type .................: ${CMAKE_BUILD_TYPE}") +message(STATUS " Install prefix ...................: ${CMAKE_INSTALL_PREFIX}") +message(STATUS " Build shared library .............: ${BUILD_SHARED_LIBS}") +message(STATUS " Build static library .............: ${BUILD_STATIC_LIBS}") +message(STATUS "") diff --git a/llhttp/CNAME b/llhttp/CNAME new file mode 100644 index 0000000..4c4e078 --- /dev/null +++ b/llhttp/CNAME @@ -0,0 +1 @@ +llhttp.org \ No newline at end of file diff --git a/llhttp/CODE_OF_CONDUCT.md b/llhttp/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..8470ae4 --- /dev/null +++ b/llhttp/CODE_OF_CONDUCT.md @@ -0,0 +1,4 @@ +# Code of Conduct + +* [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/main/CODE_OF_CONDUCT.md) +* [Node.js Moderation Policy](https://github.com/nodejs/admin/blob/main/Moderation-Policy.md) diff --git a/llhttp/Dockerfile b/llhttp/Dockerfile new file mode 100644 index 0000000..2b5bfae --- /dev/null +++ b/llhttp/Dockerfile @@ -0,0 +1,13 @@ +FROM node:18-alpine +ARG UID=1000 +ARG GID=1000 + +RUN apk add -U clang lld wasi-sdk && mkdir /home/node/llhttp + +WORKDIR /home/node/llhttp + +COPY . . + +RUN npm ci + +USER node diff --git a/llhttp/LICENSE-MIT b/llhttp/LICENSE-MIT new file mode 100644 index 0000000..6c1512d --- /dev/null +++ b/llhttp/LICENSE-MIT @@ -0,0 +1,22 @@ +This software is licensed under the MIT License. + +Copyright Fedor Indutny, 2018. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/llhttp/Makefile b/llhttp/Makefile new file mode 100644 index 0000000..d9c6d35 --- /dev/null +++ b/llhttp/Makefile @@ -0,0 +1,93 @@ +CLANG ?= clang +CFLAGS ?= +OS ?= + +CFLAGS += -Os -g3 -Wall -Wextra -Wno-unused-parameter +ifneq ($(OS),Windows_NT) + # NOTE: clang on windows does not support fPIC + CFLAGS += -fPIC +endif + +INCLUDES += -Ibuild/ + +INSTALL ?= install +PREFIX ?= /usr/local +LIBDIR = $(PREFIX)/lib +INCLUDEDIR = $(PREFIX)/include + +all: build/libllhttp.a build/libllhttp.so + +clean: + rm -rf release/ + rm -rf build/ + +build/libllhttp.so: build/c/llhttp.o build/native/api.o \ + build/native/http.o + $(CLANG) -shared $^ -o $@ + +build/libllhttp.a: build/c/llhttp.o build/native/api.o \ + build/native/http.o + $(AR) rcs $@ build/c/llhttp.o build/native/api.o build/native/http.o + +build/c/llhttp.o: build/c/llhttp.c + $(CLANG) $(CFLAGS) $(INCLUDES) -c $< -o $@ + +build/native/%.o: src/native/%.c build/llhttp.h src/native/api.h \ + build/native + $(CLANG) $(CFLAGS) $(INCLUDES) -c $< -o $@ + +build/llhttp.h: generate +build/c/llhttp.c: generate + +build/native: + mkdir -p build/native + +release: clean generate + @echo "${RELEASE}" | grep -q -E ".+" || { echo "Please make sure the RELEASE argument is set."; exit 1; } + rm -rf release + mkdir -p release/src + mkdir -p release/include + cp -rf build/llhttp.h release/include/ + cp -rf build/c/llhttp.c release/src/ + cp -rf src/native/*.c release/src/ + cp -rf src/llhttp.gyp release/ + cp -rf src/common.gypi release/ + sed s/_RELEASE_/$(RELEASE)/ CMakeLists.txt > release/CMakeLists.txt + cp -rf libllhttp.pc.in release/ + cp -rf README.md release/ + cp -rf LICENSE-MIT release/ + +github-release: + @echo "${RELEASE_V}" | grep -q -E "^v" || { echo "Please make sure version starts with \"v\"."; exit 1; } + gh release create -d --generate-notes ${RELEASE_V} + @sleep 5 + gh release view ${RELEASE_V} -t "{{.body}}" --json body > RELEASE_NOTES + gh release delete ${RELEASE_V} -y + gh release create -F RELEASE_NOTES -d --title ${RELEASE_V} --target release release/${RELEASE_V} + @sleep 5 + rm -rf RELEASE_NOTES + open $$(gh release view release/${RELEASE_V} --json url -t "{{.url}}") + +postversion: release + git fetch origin + git push + git checkout release -- + cp -rf release/* ./ + rm -rf release + git add include src *.gyp *.gypi CMakeLists.txt README.md LICENSE-MIT libllhttp.pc.in + git commit -a -m "release: $(RELEASE)" + git tag "release/v$(RELEASE)" + git push && git push --tags + git checkout main + +generate: + npx ts-node bin/generate.ts + +install: build/libllhttp.a build/libllhttp.so + $(INSTALL) -d $(DESTDIR)$(INCLUDEDIR) + $(INSTALL) -d $(DESTDIR)$(LIBDIR) + $(INSTALL) -C build/llhttp.h $(DESTDIR)$(INCLUDEDIR)/llhttp.h + $(INSTALL) -C build/libllhttp.a $(DESTDIR)$(LIBDIR)/libllhttp.a + $(INSTALL) build/libllhttp.so $(DESTDIR)$(LIBDIR)/libllhttp.so + +.PHONY: all generate clean release postversion github-release diff --git a/llhttp/README.md b/llhttp/README.md new file mode 100644 index 0000000..4960dbb --- /dev/null +++ b/llhttp/README.md @@ -0,0 +1,501 @@ +# llhttp +[![CI](https://github.com/nodejs/llhttp/workflows/CI/badge.svg)](https://github.com/nodejs/llhttp/actions?query=workflow%3ACI) + +Port of [http_parser][0] to [llparse][1]. + +## Why? + +Let's face it, [http_parser][0] is practically unmaintainable. Even +introduction of a single new method results in a significant code churn. + +This project aims to: + +* Make it maintainable +* Verifiable +* Improving benchmarks where possible + +More details in [Fedor Indutny's talk at JSConf EU 2019](https://youtu.be/x3k_5Mi66sY) + +## How? + +Over time, different approaches for improving [http_parser][0]'s code base +were tried. However, all of them failed due to resulting significant performance +degradation. + +This project is a port of [http_parser][0] to TypeScript. [llparse][1] is used +to generate the output C source file, which could be compiled and +linked with the embedder's program (like [Node.js][7]). + +## Performance + +So far llhttp outperforms http_parser: + +| | input size | bandwidth | reqs/sec | time | +|:----------------|-----------:|-------------:|-----------:|--------:| +| **llhttp** | 8192.00 mb | 1777.24 mb/s | 3583799.39 req/sec | 4.61 s | +| **http_parser** | 8192.00 mb | 694.66 mb/s | 1406180.33 req/sec | 11.79 s | + +llhttp is faster by approximately **156%**. + +## Maintenance + +llhttp project has about 1400 lines of TypeScript code describing the parser +itself and around 450 lines of C code and headers providing the helper methods. +The whole [http_parser][0] is implemented in approximately 2500 lines of C, and +436 lines of headers. + +All optimizations and multi-character matching in llhttp are generated +automatically, and thus doesn't add any extra maintenance cost. On the contrary, +most of http_parser's code is hand-optimized and unrolled. Instead describing +"how" it should parse the HTTP requests/responses, a maintainer should +implement the new features in [http_parser][0] cautiously, considering +possible performance degradation and manually optimizing the new code. + +## Verification + +The state machine graph is encoded explicitly in llhttp. The [llparse][1] +automatically checks the graph for absence of loops and correct reporting of the +input ranges (spans) like header names and values. In the future, additional +checks could be performed to get even stricter verification of the llhttp. + +## Usage + +```C +#include "stdio.h" +#include "llhttp.h" +#include "string.h" + +int handle_on_message_complete(llhttp_t* parser) { + fprintf(stdout, "Message completed!\n"); + return 0; +} + +int main() { + llhttp_t parser; + llhttp_settings_t settings; + + /*Initialize user callbacks and settings */ + llhttp_settings_init(&settings); + + /*Set user callback */ + settings.on_message_complete = handle_on_message_complete; + + /*Initialize the parser in HTTP_BOTH mode, meaning that it will select between + *HTTP_REQUEST and HTTP_RESPONSE parsing automatically while reading the first + *input. + */ + llhttp_init(&parser, HTTP_BOTH, &settings); + + /*Parse request! */ + const char* request = "GET / HTTP/1.1\r\n\r\n"; + int request_len = strlen(request); + + enum llhttp_errno err = llhttp_execute(&parser, request, request_len); + if (err == HPE_OK) { + fprintf(stdout, "Successfully parsed!\n"); + } else { + fprintf(stderr, "Parse error: %s %s\n", llhttp_errno_name(err), parser.reason); + } +} +``` +For more information on API usage, please refer to [src/native/api.h](https://github.com/nodejs/llhttp/blob/main/src/native/api.h). + +## API + +### llhttp_settings_t + +The settings object contains a list of callbacks that the parser will invoke. + +The following callbacks can return `0` (proceed normally), `-1` (error) or `HPE_PAUSED` (pause the parser): + +* `on_message_begin`: Invoked when a new request/response starts. +* `on_message_complete`: Invoked when a request/response has been completedly parsed. +* `on_url_complete`: Invoked after the URL has been parsed. +* `on_method_complete`: Invoked after the HTTP method has been parsed. +* `on_version_complete`: Invoked after the HTTP version has been parsed. +* `on_status_complete`: Invoked after the status code has been parsed. +* `on_header_field_complete`: Invoked after a header name has been parsed. +* `on_header_value_complete`: Invoked after a header value has been parsed. +* `on_chunk_header`: Invoked after a new chunk is started. The current chunk length is stored in `parser->content_length`. +* `on_chunk_extension_name_complete`: Invoked after a chunk extension name is started. +* `on_chunk_extension_value_complete`: Invoked after a chunk extension value is started. +* `on_chunk_complete`: Invoked after a new chunk is received. +* `on_reset`: Invoked after `on_message_complete` and before `on_message_begin` when a new message + is received on the same parser. This is not invoked for the first message of the parser. + +The following callbacks can return `0` (proceed normally), `-1` (error) or `HPE_USER` (error from the callback): + +* `on_url`: Invoked when another character of the URL is received. +* `on_status`: Invoked when another character of the status is received. +* `on_method`: Invoked when another character of the method is received. + When parser is created with `HTTP_BOTH` and the input is a response, this also invoked for the sequence `HTTP/` + of the first message. +* `on_version`: Invoked when another character of the version is received. +* `on_header_field`: Invoked when another character of a header name is received. +* `on_header_value`: Invoked when another character of a header value is received. +* `on_chunk_extension_name`: Invoked when another character of a chunk extension name is received. +* `on_chunk_extension_value`: Invoked when another character of a extension value is received. + +The callback `on_headers_complete`, invoked when headers are completed, can return: + +* `0`: Proceed normally. +* `1`: Assume that request/response has no body, and proceed to parsing the next message. +* `2`: Assume absence of body (as above) and make `llhttp_execute()` return `HPE_PAUSED_UPGRADE`. +* `-1`: Error +* `HPE_PAUSED`: Pause the parser. + +### `void llhttp_init(llhttp_t* parser, llhttp_type_t type, const llhttp_settings_t* settings)` + +Initialize the parser with specific type and user settings. + +### `uint8_t llhttp_get_type(llhttp_t* parser)` + +Returns the type of the parser. + +### `uint8_t llhttp_get_http_major(llhttp_t* parser)` + +Returns the major version of the HTTP protocol of the current request/response. + +### `uint8_t llhttp_get_http_minor(llhttp_t* parser)` + +Returns the minor version of the HTTP protocol of the current request/response. + +### `uint8_t llhttp_get_method(llhttp_t* parser)` + +Returns the method of the current request. + +### `int llhttp_get_status_code(llhttp_t* parser)` + +Returns the method of the current response. + +### `uint8_t llhttp_get_upgrade(llhttp_t* parser)` + +Returns `1` if request includes the `Connection: upgrade` header. + +### `void llhttp_reset(llhttp_t* parser)` + +Reset an already initialized parser back to the start state, preserving the +existing parser type, callback settings, user data, and lenient flags. + +### `void llhttp_settings_init(llhttp_settings_t* settings)` + +Initialize the settings object. + +### `llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)` + +Parse full or partial request/response, invoking user callbacks along the way. + +If any of `llhttp_data_cb` returns errno not equal to `HPE_OK` - the parsing interrupts, +and such errno is returned from `llhttp_execute()`. If `HPE_PAUSED` was used as a errno, +the execution can be resumed with `llhttp_resume()` call. + +In a special case of CONNECT/Upgrade request/response `HPE_PAUSED_UPGRADE` is returned +after fully parsing the request/response. If the user wishes to continue parsing, +they need to invoke `llhttp_resume_after_upgrade()`. + +**if this function ever returns a non-pause type error, it will continue to return +the same error upon each successive call up until `llhttp_init()` is called.** + +### `llhttp_errno_t llhttp_finish(llhttp_t* parser)` + +This method should be called when the other side has no further bytes to +send (e.g. shutdown of readable side of the TCP connection.) + +Requests without `Content-Length` and other messages might require treating +all incoming bytes as the part of the body, up to the last byte of the +connection. + +This method will invoke `on_message_complete()` callback if the +request was terminated safely. Otherwise a error code would be returned. + + +### `int llhttp_message_needs_eof(const llhttp_t* parser)` + +Returns `1` if the incoming message is parsed until the last byte, and has to be completed by calling `llhttp_finish()` on EOF. + +### `int llhttp_should_keep_alive(const llhttp_t* parser)` + +Returns `1` if there might be any other messages following the last that was +successfully parsed. + +### `void llhttp_pause(llhttp_t* parser)` + +Make further calls of `llhttp_execute()` return `HPE_PAUSED` and set +appropriate error reason. + +**Do not call this from user callbacks! User callbacks must return +`HPE_PAUSED` if pausing is required.** + +### `void llhttp_resume(llhttp_t* parser)` + +Might be called to resume the execution after the pause in user's callback. + +See `llhttp_execute()` above for details. + +**Call this only if `llhttp_execute()` returns `HPE_PAUSED`.** + +### `void llhttp_resume_after_upgrade(llhttp_t* parser)` + +Might be called to resume the execution after the pause in user's callback. +See `llhttp_execute()` above for details. + +**Call this only if `llhttp_execute()` returns `HPE_PAUSED_UPGRADE`** + +### `llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)` + +Returns the latest error. + +### `const char* llhttp_get_error_reason(const llhttp_t* parser)` + +Returns the verbal explanation of the latest returned error. + +**User callback should set error reason when returning the error. See +`llhttp_set_error_reason()` for details.** + +### `void llhttp_set_error_reason(llhttp_t* parser, const char* reason)` + +Assign verbal description to the returned error. Must be called in user +callbacks right before returning the errno. + +**`HPE_USER` error code might be useful in user callbacks.** + +### `const char* llhttp_get_error_pos(const llhttp_t* parser)` + +Returns the pointer to the last parsed byte before the returned error. The +pointer is relative to the `data` argument of `llhttp_execute()`. + +**This method might be useful for counting the number of parsed bytes.** + +### `const char* llhttp_errno_name(llhttp_errno_t err)` + +Returns textual name of error code. + +### `const char* llhttp_method_name(llhttp_method_t method)` + +Returns textual name of HTTP method. + +### `const char* llhttp_status_name(llhttp_status_t status)` + +Returns textual name of HTTP status. + +### `void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)` + +Enables/disables lenient header value parsing (disabled by default). +Lenient parsing disables header value token checks, extending llhttp's +protocol support to highly non-compliant clients/server. + +No `HPE_INVALID_HEADER_TOKEN` will be raised for incorrect header values when +lenient parsing is "on". + +**Enabling this flag can pose a security issue since you will be exposed to request smuggling attacks. USE WITH CAUTION!** + +### `void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled)` + +Enables/disables lenient handling of conflicting `Transfer-Encoding` and +`Content-Length` headers (disabled by default). + +Normally `llhttp` would error when `Transfer-Encoding` is present in +conjunction with `Content-Length`. + +This error is important to prevent HTTP request smuggling, but may be less desirable +for small number of cases involving legacy servers. + +**Enabling this flag can pose a security issue since you will be exposed to request smuggling attacks. USE WITH CAUTION!** + +### `void llhttp_set_lenient_keep_alive(llhttp_t* parser, int enabled)` + +Enables/disables lenient handling of `Connection: close` and HTTP/1.0 +requests responses. + +Normally `llhttp` would error the HTTP request/response +after the request/response with `Connection: close` and `Content-Length`. + +This is important to prevent cache poisoning attacks, +but might interact badly with outdated and insecure clients. + +With this flag the extra request/response will be parsed normally. + +**Enabling this flag can pose a security issue since you will be exposed to poisoning attacks. USE WITH CAUTION!** + +### `void llhttp_set_lenient_transfer_encoding(llhttp_t* parser, int enabled)` + +Enables/disables lenient handling of `Transfer-Encoding` header. + +Normally `llhttp` would error when a `Transfer-Encoding` has `chunked` value +and another value after it (either in a single header or in multiple +headers whose value are internally joined using `, `). + +This is mandated by the spec to reliably determine request body size and thus +avoid request smuggling. + +With this flag the extra value will be parsed normally. + +**Enabling this flag can pose a security issue since you will be exposed to request smuggling attacks. USE WITH CAUTION!** + +### `void llhttp_set_lenient_version(llhttp_t* parser, int enabled)` + +Enables/disables lenient handling of HTTP version. + +Normally `llhttp` would error when the HTTP version in the request or status line +is not `0.9`, `1.0`, `1.1` or `2.0`. +With this flag the extra value will be parsed normally. + +**Enabling this flag can pose a security issue since you will allow unsupported HTTP versions. USE WITH CAUTION!** + +### `void llhttp_set_lenient_data_after_close(llhttp_t* parser, int enabled)` + +Enables/disables lenient handling of additional data received after a message ends +and keep-alive is disabled. + +Normally `llhttp` would error when additional unexpected data is received if the message +contains the `Connection` header with `close` value. +With this flag the extra data will discarded without throwing an error. + +**Enabling this flag can pose a security issue since you will be exposed to poisoning attacks. USE WITH CAUTION!** + +### `void llhttp_set_lenient_optional_lf_after_cr(llhttp_t* parser, int enabled)` + +Enables/disables lenient handling of incomplete CRLF sequences. + +Normally `llhttp` would error when a CR is not followed by LF when terminating the +request line, the status line, the headers or a chunk header. +With this flag only a CR is required to terminate such sections. + +**Enabling this flag can pose a security issue since you will be exposed to request smuggling attacks. USE WITH CAUTION!** + +### `void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)` + +Enables/disables lenient handling of line separators. + +Normally `llhttp` would error when a LF is not preceded by CR when terminating the +request line, the status line, the headers, a chunk header or a chunk data. +With this flag only a LF is required to terminate such sections. + +**Enabling this flag can pose a security issue since you will be exposed to request smuggling attacks. USE WITH CAUTION!** + +### `void llhttp_set_lenient_optional_crlf_after_chunk(llhttp_t* parser, int enabled)` + +Enables/disables lenient handling of chunks not separated via CRLF. + +Normally `llhttp` would error when after a chunk data a CRLF is missing before +starting a new chunk. +With this flag the new chunk can start immediately after the previous one. + +**Enabling this flag can pose a security issue since you will be exposed to request smuggling attacks. USE WITH CAUTION!** + +### `void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)` + +Enables/disables lenient handling of spaces after chunk size. + +Normally `llhttp` would error when after a chunk size is followed by one or more spaces are present instead of a CRLF or `;`. +With this flag this check is disabled. + +**Enabling this flag can pose a security issue since you will be exposed to request smuggling attacks. USE WITH CAUTION!** + +## Build Instructions + +Make sure you have [Node.js](https://nodejs.org/), npm and npx installed. Then under project directory run: + +```sh +npm install +make +``` + +--- + +### Bindings to other languages + +* Lua: [MunifTanjim/llhttp.lua][11] +* Python: [pallas/pyllhttp][8] +* Ruby: [metabahn/llhttp][9] +* Rust: [JackLiar/rust-llhttp][10] + +### Using with CMake + +If you want to use this library in a CMake project as a shared library, you can use the snippet below. + +``` +FetchContent_Declare(llhttp + URL "https://github.com/nodejs/llhttp/archive/refs/tags/release/v8.1.0.tar.gz") + +FetchContent_MakeAvailable(llhttp) + +# Link with the llhttp_shared target +target_link_libraries(${EXAMPLE_PROJECT_NAME} ${PROJECT_LIBRARIES} llhttp_shared ${PROJECT_NAME}) +``` + +If you want to use this library in a CMake project as a static library, you can set some cache variables first. + +``` +FetchContent_Declare(llhttp + URL "https://github.com/nodejs/llhttp/archive/refs/tags/release/v8.1.0.tar.gz") + +set(BUILD_SHARED_LIBS OFF CACHE INTERNAL "") +set(BUILD_STATIC_LIBS ON CACHE INTERNAL "") +FetchContent_MakeAvailable(llhttp) + +# Link with the llhttp_static target +target_link_libraries(${EXAMPLE_PROJECT_NAME} ${PROJECT_LIBRARIES} llhttp_static ${PROJECT_NAME}) +``` + +_Note that using the git repo directly (e.g., via a git repo url and tag) will not work with FetchContent_Declare because [CMakeLists.txt](./CMakeLists.txt) requires string replacements (e.g., `_RELEASE_`) before it will build._ + +## Building on Windows + +### Installation + +* `choco install git` +* `choco install node` +* `choco install llvm` (or install the `C++ Clang tools for Windows` optional package from the Visual Studio 2019 installer) +* `choco install make` (or if you have MinGW, it comes bundled) + +1. Ensure that `Clang` and `make` are in your system path. +2. Using Git Bash, clone the repo to your preferred location. +3. Cd into the cloned directory and run `npm install` +5. Run `make` +6. Your `repo/build` directory should now have `libllhttp.a` and `libllhttp.so` static and dynamic libraries. +7. When building your executable, you can link to these libraries. Make sure to set the build folder as an include path when building so you can reference the declarations in `repo/build/llhttp.h`. + +### A simple example on linking with the library: + +Assuming you have an executable `main.cpp` in your current working directory, you would run: `clang++ -Os -g3 -Wall -Wextra -Wno-unused-parameter -I/path/to/llhttp/build main.cpp /path/to/llhttp/build/libllhttp.a -o main.exe`. + +If you are getting `unresolved external symbol` linker errors you are likely attempting to build `llhttp.c` without linking it with object files from `api.c` and `http.c`. + +#### LICENSE + +This software is licensed under the MIT License. + +Copyright Fedor Indutny, 2018. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. + +[0]: https://github.com/nodejs/http-parser +[1]: https://github.com/nodejs/llparse +[2]: https://en.wikipedia.org/wiki/Register_allocation#Spilling +[3]: https://en.wikipedia.org/wiki/Tail_call +[4]: https://llvm.org/docs/LangRef.html +[5]: https://llvm.org/docs/LangRef.html#call-instruction +[6]: https://clang.llvm.org/ +[7]: https://github.com/nodejs/node +[8]: https://github.com/pallas/pyllhttp +[9]: https://github.com/metabahn/llhttp +[10]: https://github.com/JackLiar/rust-llhttp +[11]: https://github.com/MunifTanjim/llhttp.lua diff --git a/llhttp/_config.yml b/llhttp/_config.yml new file mode 100644 index 0000000..1885487 --- /dev/null +++ b/llhttp/_config.yml @@ -0,0 +1 @@ +theme: jekyll-theme-midnight \ No newline at end of file diff --git a/llhttp/bench/index.ts b/llhttp/bench/index.ts new file mode 100644 index 0000000..b3ff2e1 --- /dev/null +++ b/llhttp/bench/index.ts @@ -0,0 +1,71 @@ +import * as assert from "assert"; +import { spawnSync } from "child_process"; +import { existsSync } from "fs"; +import { resolve } from "path"; + +function request(tpl: TemplateStringsArray): string { + return tpl.raw[0].replace(/^\s+/gm, '').replace(/\n/gm, '').replace(/\\r/gm, '\r').replace(/\\n/gm, '\n') +} + +const urlExecutable = resolve(__dirname, "../test/tmp/url-url-c"); +const httpExecutable = resolve(__dirname, "../test/tmp/http-request-c"); + +const httpRequests: Record = { + "seanmonstar/httparse": request` + GET /wp-content/uploads/2010/03/hello-kitty-darth-vader-pink.jpg HTTP/1.1\r\n + Host: www.kittyhell.com\r\n + User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; ja-JP-mac; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3 Pathtraq/0.9\r\n + Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n + Accept-Language: ja,en-us;q=0.7,en;q=0.3\r\n + Accept-Encoding: gzip,deflate\r\n + Accept-Charset: Shift_JIS,utf-8;q=0.7,*;q=0.7\r\n + Keep-Alive: 115\r\n + Connection: keep-alive\r\n + Cookie: wp_ozh_wsa_visits=2; wp_ozh_wsa_visit_lasttime=xxxxxxxxxx; __utma=xxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.xxxxxxxxxx.x; __utmz=xxxxxxxxx.xxxxxxxxxx.x.x.utmccn=(referral)|utmcsr=reader.livedoor.com|utmcct=/reader/|utmcmd=referral\r\n\r\n + `, + "nodejs/http-parser": request` + POST /joyent/http-parser HTTP/1.1\r\n + Host: github.com\r\n + DNT: 1\r\n + Accept-Encoding: gzip, deflate, sdch\r\n + Accept-Language: ru-RU,ru;q=0.8,en-US;q=0.6,en;q=0.4\r\n + User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) + AppleWebKit/537.36 (KHTML, like Gecko) + Chrome/39.0.2171.65 Safari/537.36\r\n + Accept: text/html,application/xhtml+xml,application/xml;q=0.9, + image/webp,*/*;q=0.8\r\n + Referer: https://github.com/joyent/http-parser\r\n + Connection: keep-alive\r\n + Transfer-Encoding: chunked\r\n + Cache-Control: max-age=0\r\n\r\nb\r\nhello world\r\n0\r\n\r\n + ` +} +const urlRequest = "http://example.com/path/to/file?query=value#fragment"; + +if (!existsSync(urlExecutable) || !existsSync(urlExecutable)) { + console.error( + "\x1b[31m\x1b[1mPlease run npm test in order to create required executables." + ); + process.exit(1); +} + +if (process.argv[2] === "loop") { + const reqName = process.argv[3]; + const request = httpRequests[reqName]!; + + assert(request, `Unknown request name: "${reqName}"`); + spawnSync(httpExecutable, ["loop", request], { stdio: "inherit" }); + process.exit(0); +} + +if (!process.argv[2] || process.argv[2] === "url") { + console.log("url (C)"); + spawnSync(urlExecutable, ["bench", urlRequest], { stdio: "inherit" }); +} + +if (!process.argv[2] || process.argv[2] === "http") { + for (const [name, request] of Object.entries(httpRequests)) { + console.log('http: "%s" (C)', name); + spawnSync(httpExecutable, ["bench", request], { stdio: "inherit" }); + } +} diff --git a/llhttp/bin/build_wasm.ts b/llhttp/bin/build_wasm.ts new file mode 100644 index 0000000..a885703 --- /dev/null +++ b/llhttp/bin/build_wasm.ts @@ -0,0 +1,95 @@ +import { execSync } from 'child_process'; +import { copyFileSync, mkdirSync } from 'fs'; +import { join, resolve } from 'path'; + +let platform = process.env.WASM_PLATFORM ?? ''; +const WASM_OUT = resolve(__dirname, '../build/wasm'); +const WASM_SRC = resolve(__dirname, '../'); + +if (!platform && process.argv[2]) { + platform = execSync('docker info -f "{{.OSType}}/{{.Architecture}}"').toString().trim(); +} + +if (process.argv[2] === '--prebuild') { + const cmd = `docker build --platform=${platform.toString().trim()} -t llhttp_wasm_builder .`; + + /* tslint:disable-next-line no-console */ + console.log(`> ${cmd}\n\n`); + execSync(cmd, { stdio: 'inherit' }); + + process.exit(0); +} + +if (process.argv[2] === '--setup') { + try { + mkdirSync(join(WASM_SRC, 'build')); + process.exit(0); + } catch (error) { + if (error.code !== 'EEXIST') { + throw error; + } + process.exit(0); + } +} + +if (process.argv[2] === '--docker') { + let cmd = `docker run --rm -it --platform=${platform.toString().trim()}`; + // Try to avoid root permission problems on compiled assets + // when running on linux. + // It will work flawessly if uid === gid === 1000 + // there will be some warnings otherwise. + if (process.platform === 'linux') { + cmd += ` --user ${process.getuid()}:${process.getegid()}`; + } + cmd += ` --mount type=bind,source=${WASM_SRC}/build,target=/home/node/llhttp/build llhttp_wasm_builder npm run wasm`; + + /* tslint:disable-next-line no-console */ + console.log(`> ${cmd}\n\n`); + execSync(cmd, { cwd: WASM_SRC, stdio: 'inherit' }); + process.exit(0); +} + +try { + mkdirSync(WASM_OUT); +} catch (error) { + if (error.code !== 'EEXIST') { + throw error; + } +} + +// Build ts +execSync('npm run build', { cwd: WASM_SRC, stdio: 'inherit' }); + +// Build wasm binary +execSync( + `clang \ + --sysroot=/usr/share/wasi-sysroot \ + -target wasm32-unknown-wasi \ + -Ofast \ + -fno-exceptions \ + -fvisibility=hidden \ + -mexec-model=reactor \ + -Wl,-error-limit=0 \ + -Wl,-O3 \ + -Wl,--lto-O3 \ + -Wl,--strip-all \ + -Wl,--allow-undefined \ + -Wl,--export-dynamic \ + -Wl,--export-table \ + -Wl,--export=malloc \ + -Wl,--export=free \ + -Wl,--no-entry \ + ${join(WASM_SRC, 'build', 'c')}/*.c \ + ${join(WASM_SRC, 'src', 'native')}/*.c \ + -I${join(WASM_SRC, 'build')} \ + -o ${join(WASM_OUT, 'llhttp.wasm')}`, + { stdio: 'inherit' }, +); + +// Copy constants for `.js` and `.ts` users. +copyFileSync(join(WASM_SRC, 'lib', 'llhttp', 'constants.js'), join(WASM_OUT, 'constants.js')); +copyFileSync(join(WASM_SRC, 'lib', 'llhttp', 'constants.js.map'), join(WASM_OUT, 'constants.js.map')); +copyFileSync(join(WASM_SRC, 'lib', 'llhttp', 'constants.d.ts'), join(WASM_OUT, 'constants.d.ts')); +copyFileSync(join(WASM_SRC, 'lib', 'llhttp', 'utils.js'), join(WASM_OUT, 'utils.js')); +copyFileSync(join(WASM_SRC, 'lib', 'llhttp', 'utils.js.map'), join(WASM_OUT, 'utils.js.map')); +copyFileSync(join(WASM_SRC, 'lib', 'llhttp', 'utils.d.ts'), join(WASM_OUT, 'utils.d.ts')); diff --git a/llhttp/bin/generate.ts b/llhttp/bin/generate.ts new file mode 100755 index 0000000..edb7f49 --- /dev/null +++ b/llhttp/bin/generate.ts @@ -0,0 +1,47 @@ +#!/usr/bin/env -S npx ts-node + +import { mkdirSync, readFileSync, writeFileSync } from 'fs'; +import { LLParse } from 'llparse'; +import { dirname, resolve } from 'path'; +import { parse } from 'semver'; +import { CHeaders, HTTP } from '../src/llhttp'; + +const C_FILE = resolve(__dirname, '../build/c/llhttp.c'); +const HEADER_FILE = resolve(__dirname, '../build/llhttp.h'); + +const pkg = JSON.parse( + readFileSync(resolve(__dirname, '..', 'package.json')).toString(), +); +const version = parse(pkg.version)!; +const llparse = new LLParse('llhttp__internal'); + +const cHeaders = new CHeaders(); +const nativeHeaders = readFileSync(resolve(__dirname, '../src/native/api.h')); +const generated = llparse.build(new HTTP(llparse).build().entry, { + c: { + header: 'llhttp', + }, + debug: process.env.LLPARSE_DEBUG ? 'llhttp__debug' : undefined, + headerGuard: 'INCLUDE_LLHTTP_ITSELF_H_', +}); + +const headers = ` +#ifndef INCLUDE_LLHTTP_H_ +#define INCLUDE_LLHTTP_H_ + +#define LLHTTP_VERSION_MAJOR ${version.major} +#define LLHTTP_VERSION_MINOR ${version.minor} +#define LLHTTP_VERSION_PATCH ${version.patch} + +${generated.header} + +${cHeaders.build()} + +${nativeHeaders} + +#endif /* INCLUDE_LLHTTP_H_ */ +`; + +mkdirSync(dirname(C_FILE), { recursive: true }); +writeFileSync(HEADER_FILE, headers); +writeFileSync(C_FILE, generated.c); diff --git a/llhttp/docs/releasing.md b/llhttp/docs/releasing.md new file mode 100644 index 0000000..f83e0f7 --- /dev/null +++ b/llhttp/docs/releasing.md @@ -0,0 +1,65 @@ +# How to release a new version of llhttp + +## What does releasing involves? + +These are the required steps to release a new version of llhttp: + +1. Increase the version number. +2. Build it locally. +3. Create a new build and push it to GitHub. +4. Create a new release on GitHub release. + +> Do not try to execute the commands in the Makefile manually. This is really error-prone! + +## Which commands to run? + +First of all, make sure you have [GitHub CLI](https://cli.github.com) installed and configured. While this is not strictly necessary, it will make your life way easier. + +As a preliminary check, run the build command and execute the test suite locally: + +``` +npm run build +npm test +``` + +If all goes good, you are ready to go! + +To release a new version of llhttp, first increase the version using `npm` and make sure it also execute the `postversion` script. Unless you have some very specific setup, this should happen automatically, which means the following command will suffice: + +``` +npm version [major|minor|patch] +``` + +The command will increase the version and then will create a new release branch on GitHub. + +> Even thought there is a package on NPM, it is not updated anymore. NEVER RUN `npm publish`! + +It's now time to create the release on GitHub. If you DON'T have GitHub CLI available, skip to the next section, otherwise run the following command: + +``` +npm run github-release +``` + +This command will create a draft release on GitHub and then show it in your browser so you can review and publish it. + +Congratulation, you are all set! + +## Create a GitHub release without GitHub CLI + +> From now on, `$VERSION` will be the new version you are trying to create, including the leading letter, for instance `v6.0.9`. + +If you don't want to or can't use GitHub CLI, you can still create the release on GitHub following this procedure. + +1. Go on GitHub and start creating a new release which targets tag `$VERSION`. Generate the notes using the `Generate release notes` button. + +2. At the bottom of the generated notes, make sure the previous and current version in the notes are correct. + + The last line should be something like this: `**Full Changelog**: https://github.com/nodejs/llhttp/compare/v6.0.8...v6.0.9` + + In this case it says we are creating release `v6.0.9` and we are showing the changes between `v6.0.8` and `v6.0.9`. + +3. Change the target of the release to point to tag `release/$VERSION`. + +4. Review and then publish the release. + +Congratulation, you are all set! \ No newline at end of file diff --git a/llhttp/examples/wasm.ts b/llhttp/examples/wasm.ts new file mode 100644 index 0000000..995fed8 --- /dev/null +++ b/llhttp/examples/wasm.ts @@ -0,0 +1,248 @@ +/** + * A minimal Parser that mimicks a small fraction of the Node.js parser + * API. + * To run: + * - `npm run build-wasm` + * - `npx ts-node examples/wasm.ts` + */ +import { readFileSync } from 'fs'; +import { resolve } from 'path'; +import * as constants from '../build/wasm/constants'; + +const bin = readFileSync(resolve(__dirname, '../build/wasm/llhttp.wasm')); +const mod = new WebAssembly.Module(bin); + +const REQUEST = constants.TYPE.REQUEST; +const RESPONSE = constants.TYPE.RESPONSE; +const kOnMessageBegin = 0; +const kOnHeaders = 1; +const kOnHeadersComplete = 2; +const kOnBody = 3; +const kOnMessageComplete = 4; +const kOnExecute = 5; + +const kPtr = Symbol('kPtr'); +const kUrl = Symbol('kUrl'); +const kStatusMessage = Symbol('kStatusMessage'); +const kHeadersFields = Symbol('kHeadersFields'); +const kHeadersValues = Symbol('kHeadersValues'); +const kBody = Symbol('kBody'); +const kReset = Symbol('kReset'); +const kCheckErr = Symbol('kCheckErr'); + +const cstr = (ptr: number, len: number): string => + Buffer.from(memory.buffer, ptr, len).toString(); + +const wasm_on_message_begin = (p: number) => { + const i = instMap.get(p); + i[kReset](); + return i[kOnMessageBegin](); +}; + +const wasm_on_url = (p: number, at: number, length: number) => { + instMap.get(p)[kUrl] = cstr(at, length); + return 0; +}; + +const wasm_on_status = (p: number, at: number, length: number) => { + instMap.get(p)[kStatusMessage] = cstr(at, length); + return 0; +}; + +const wasm_on_header_field = (p: number, at: number, length: number) => { + const i= instMap.get(p) + i[kHeadersFields].push(cstr(at, length)); + return 0; +}; + +const wasm_on_header_value = (p: number, at: number, length: number) => { + const i = instMap.get(p); + i[kHeadersValues].push(cstr(at, length)); + return 0; +}; + +const wasm_on_headers_complete = (p: number) => { + const i = instMap.get(p); + const type = get_type(p); + const versionMajor = get_version_major(p); + const versionMinor = get_version_minor(p); + const rawHeaders = []; + let method; + let url; + let statusCode; + let statusMessage; + const upgrade = get_upgrade(p); + const shouldKeepAlive = should_keep_alive(p); + + for (let c = 0; c < i[kHeadersFields].length; c++) { + rawHeaders.push(i[kHeadersFields][c], i[kHeadersValues][c]) + } + + if (type === HTTPParser.REQUEST) { + method = constants.METHODS[get_method(p)]; + url = i[kUrl]; + } else if (type === HTTPParser.RESPONSE) { + statusCode = get_status_code(p); + statusMessage = i[kStatusMessage]; + } + return i[kOnHeadersComplete](versionMajor, versionMinor, rawHeaders, method, +url, statusCode, statusMessage, upgrade, shouldKeepAlive); +}; + +const wasm_on_body = (p: number, at: number, length: number) => { + const i = instMap.get(p); + const body = Buffer.from(memory.buffer, at, length); + return i[kOnBody](body); +}; + +const wasm_on_message_complete = (p: number) => { + return instMap.get(p)[kOnMessageComplete](); +}; + +const instMap = new Map(); + +const inst = new WebAssembly.Instance(mod, { + env: { + wasm_on_message_begin, + wasm_on_url, + wasm_on_status, + wasm_on_header_field, + wasm_on_header_value, + wasm_on_headers_complete, + wasm_on_body, + wasm_on_message_complete, + }, +}); + +const memory = inst.exports.memory as any; +const alloc = inst.exports.llhttp_alloc as CallableFunction; +const malloc = inst.exports.malloc as CallableFunction; +const execute = inst.exports.llhttp_execute as CallableFunction; +const get_type = inst.exports.llhttp_get_type as CallableFunction; +const get_upgrade = inst.exports.llhttp_get_upgrade as CallableFunction; +const should_keep_alive = inst.exports.llhttp_should_keep_alive as CallableFunction; +const get_method = inst.exports.llhttp_get_method as CallableFunction; +const get_status_code = inst.exports.llhttp_get_status_code as CallableFunction; +const get_version_minor = inst.exports.llhttp_get_http_minor as CallableFunction; +const get_version_major = inst.exports.llhttp_get_http_major as CallableFunction; +const get_error_reason = inst.exports.llhttp_get_error_reason as CallableFunction; +const free = inst.exports.free as CallableFunction; +const initialize = inst.exports._initialize as CallableFunction; + +initialize(); // wasi reactor + +class HTTPParser { + static REQUEST = REQUEST; + static RESPONSE = RESPONSE; + static kOnMessageBegin = kOnMessageBegin; + static kOnHeaders = kOnHeaders; + static kOnHeadersComplete = kOnHeadersComplete; + static kOnBody = kOnBody; + static kOnMessageComplete = kOnMessageComplete; + static kOnExecute = kOnExecute; + + [kPtr]: number; + [kUrl]: string; + [kStatusMessage]: null|string; + [kHeadersFields]: []|[string]; + [kHeadersValues]: []|[string]; + [kBody]: null|Buffer; + + constructor(type: constants.TYPE) { + this[kPtr] = alloc(constants.TYPE[type]); + instMap.set(this[kPtr], this); + + this[kUrl] = ''; + this[kStatusMessage] = null; + this[kHeadersFields] = []; + this[kHeadersValues] = []; + this[kBody] = null; + } + + [kReset]() { + this[kUrl] = ''; + this[kStatusMessage] = null; + this[kHeadersFields] = []; + this[kHeadersValues] = []; + this[kBody] = null; + } + + [kOnMessageBegin]() { + return 0; + } + + [kOnHeaders](rawHeaders: [string]) {} + + [kOnHeadersComplete](versionMajor: number, versionMinor: number, rawHeaders: [string], method: string, + url: string, statusCode: number, statusMessage: string, upgrade: boolean, shouldKeepAlive: boolean) { + return 0; + } + + [kOnBody](body: Buffer) { + this[kBody] = body; + return 0; + } + + [kOnMessageComplete]() { + return 0; + } + + destroy() { + instMap.delete(this[kPtr]); + free(this[kPtr]); + } + + execute(data: Buffer) { + const ptr = malloc(data.byteLength); + const u8 = new Uint8Array(memory.buffer); + u8.set(data, ptr); + const ret = execute(this[kPtr], ptr, data.length); + free(ptr); + this[kCheckErr](ret); + return ret; + } + + [kCheckErr](n: number) { + if (n === constants.ERROR.OK) { + return; + } + const ptr = get_error_reason(this[kPtr]); + const u8 = new Uint8Array(memory.buffer); + const len = u8.indexOf(0, ptr) - ptr; + throw new Error(cstr(ptr, len)); + } +} + + +{ + const p = new HTTPParser(HTTPParser.REQUEST); + + p.execute(Buffer.from([ + 'POST /owo HTTP/1.1', + 'X: Y', + 'Content-Length: 9', + '', + 'uh, meow?', + '', + ].join('\r\n'))); + + console.log(p); + + p.destroy(); +} + +{ + const p = new HTTPParser(HTTPParser.RESPONSE); + + p.execute(Buffer.from([ + 'HTTP/1.1 200 OK', + 'X: Y', + 'Content-Length: 9', + '', + 'uh, meow?' + ].join('\r\n'))); + + console.log(p); + + p.destroy(); +} diff --git a/llhttp/images/http-loose-none.png b/llhttp/images/http-loose-none.png new file mode 100644 index 0000000..3187765 Binary files /dev/null and b/llhttp/images/http-loose-none.png differ diff --git a/llhttp/images/http-strict-none.png b/llhttp/images/http-strict-none.png new file mode 100644 index 0000000..8f2aacf Binary files /dev/null and b/llhttp/images/http-strict-none.png differ diff --git a/llhttp/libllhttp.pc.in b/llhttp/libllhttp.pc.in new file mode 100644 index 0000000..67d280a --- /dev/null +++ b/llhttp/libllhttp.pc.in @@ -0,0 +1,10 @@ +prefix=@CMAKE_INSTALL_PREFIX@ +exec_prefix=@CMAKE_INSTALL_PREFIX@ +libdir=@CMAKE_INSTALL_PREFIX@/@CMAKE_INSTALL_LIBDIR@ +includedir=@CMAKE_INSTALL_PREFIX@/@CMAKE_INSTALL_INCLUDEDIR@ + +Name: libllhttp +Description: Node.js llhttp Library +Version: @PROJECT_VERSION@ +Libs: -L${libdir} -lllhttp +Cflags: -I${includedir} \ No newline at end of file diff --git a/llhttp/package-lock.json b/llhttp/package-lock.json new file mode 100644 index 0000000..a49ed36 --- /dev/null +++ b/llhttp/package-lock.json @@ -0,0 +1,2995 @@ +{ + "name": "llhttp", + "version": "9.1.3", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "version": "9.1.3", + "license": "MIT", + "dependencies": { + "@types/semver": "^5.5.0", + "llparse": "^7.1.1", + "semver": "^5.7.1" + }, + "devDependencies": { + "@types/mocha": "^5.2.7", + "@types/node": "^10.17.52", + "javascript-stringify": "^2.0.1", + "llparse-dot": "^1.0.1", + "llparse-test-fixture": "^5.0.1", + "mdgator": "^1.1.2", + "mocha": "^10.2.0", + "ts-node": "^7.0.1", + "tslint": "^5.20.1", + "typescript": "^3.9.9" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.13.tgz", + "integrity": "sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.12.13" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.0.tgz", + "integrity": "sha512-V3ts7zMSu5lfiwWDVWzRDGIN+lnCEUdaXgtVHJgLb1rGaA6jMrtB9EmE7L18foXJIE8Un/A/h6NJfGQp/e1J4A==", + "dev": true + }, + "node_modules/@babel/highlight": { + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.0.tgz", + "integrity": "sha512-YSCOwxvTYEIMSGaBQb5kDDsCopDdiUGsqpatp3fOlI4+2HQSkTmEVWnVuySdAC5EWCqSWWTv0ib63RjR7dTBdg==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.14.0", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "node_modules/@types/debug": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.5.tgz", + "integrity": "sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ==" + }, + "node_modules/@types/markdown-it": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-0.0.4.tgz", + "integrity": "sha512-FWR7QB7EqBRq1s9BMk0ccOSOuRLfVEWYpHQYpFPaXtCoqN6dJx2ttdsdQbUxLLnAlKpYeVjveGGhQ3583TTa7g==", + "dev": true + }, + "node_modules/@types/mocha": { + "version": "5.2.7", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-5.2.7.tgz", + "integrity": "sha512-NYrtPht0wGzhwe9+/idPaBB+TqkY9AhTvOLMkThm0IoEfLaiVQZwBwyJ5puCkO3AUCWrmcoePjp2mbFocKy4SQ==", + "dev": true + }, + "node_modules/@types/node": { + "version": "10.17.59", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.59.tgz", + "integrity": "sha512-7Uc8IRrL8yZz5ti45RaFxpbU8TxlzdC3HvxV+hOWo1EyLsuKv/w7y0n+TwZzwL3vdx3oZ2k3ubxPq131hNtXyg==", + "dev": true + }, + "node_modules/@types/semver": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", + "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" + }, + "node_modules/ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/binary-search": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/binary-search/-/binary-search-1.3.6.tgz", + "integrity": "sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "node_modules/buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", + "dev": true + }, + "node_modules/builtin-modules": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", + "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/diff": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/entities": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz", + "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==", + "dev": true + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "bin": { + "flat": "cli.js" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.4.0.tgz", + "integrity": "sha512-6A2fkfq1rfeQZjxrZJGerpLCTHRNEBiSgnu0+obeJpEPZRUooHgsizvzv0ZjJwOz3iWIHdJtVWJ/tmPr3D21/A==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/javascript-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.1.0.tgz", + "integrity": "sha512-JVAfqNPTvNq3sB/VHQJAFxN/sPgKnsKrCwyRt15zwNCdrMMJDdcEOdubuy+DuJYYdm0ox1J4uzEuYKkN+9yhVg==", + "dev": true + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/linkify-it": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-2.2.0.tgz", + "integrity": "sha512-GnAl/knGn+i1U/wjBz3akz2stz+HrHLsxMwHQGofCDfPvlf+gDKN58UtfmUquTY4/MXeE2x7k19KQmeoZi94Iw==", + "dev": true, + "dependencies": { + "uc.micro": "^1.0.1" + } + }, + "node_modules/llparse": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/llparse/-/llparse-7.1.1.tgz", + "integrity": "sha512-lBxN5O6sKq6KSOaRFIGczoVpO/U/37mHhjJioQbPuiXdfZmwzP1zC3txV9xx778TRNFENzeCM0Uoo+mE1rfJOA==", + "dependencies": { + "debug": "^4.2.0", + "llparse-frontend": "^3.0.0" + } + }, + "node_modules/llparse-builder": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/llparse-builder/-/llparse-builder-1.5.2.tgz", + "integrity": "sha512-i862UNC3YUEdlfK/NUCJxlKjtWjgAI9AJXDRgjcfRHfwFt4Sf8eFPTRsc91/2R9MBZ0kyFdfhi8SVhMsZf1gNQ==", + "dependencies": { + "@types/debug": "4.1.5 ", + "binary-search": "^1.3.6", + "debug": "^4.2.0" + } + }, + "node_modules/llparse-dot": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/llparse-dot/-/llparse-dot-1.0.1.tgz", + "integrity": "sha512-3e271C2LuDWBzhxaCUDzjpufamoEBuTYQz83QyMixI/i99BntCEk6ngHWOhhDb0XdtNNh6qAfRmXyjgNP+Nxpw==", + "dev": true, + "dependencies": { + "llparse-builder": "^1.0.0" + } + }, + "node_modules/llparse-frontend": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/llparse-frontend/-/llparse-frontend-3.0.0.tgz", + "integrity": "sha512-G/o0Po2C+G5OtP8MJeQDjDf5qwDxcO7K6x4r6jqGsJwxk7yblbJnRqpmye7G/lZ8dD0Hv5neY4/KB5BhDmEc9Q==", + "dependencies": { + "debug": "^3.2.6", + "llparse-builder": "^1.5.2" + } + }, + "node_modules/llparse-frontend/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/llparse-test-fixture": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/llparse-test-fixture/-/llparse-test-fixture-5.0.2.tgz", + "integrity": "sha512-61KI5J/b5uyRktD0y1EezleEW6UfaxhHkn1adLKNVemRZzklE+SpLakr251qo04kb9jN/ytk8lllgK+yFOj4cQ==", + "dev": true, + "dependencies": { + "esm": "^3.2.25", + "llparse": "^7.0.0", + "yargs": "^15.4.1" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/log-symbols/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/log-symbols/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/log-symbols/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/log-symbols/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/log-symbols/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "node_modules/markdown-it": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-8.4.2.tgz", + "integrity": "sha512-GcRz3AWTqSUphY3vsUqQSFMbgR38a4Lh3GWlHRh/7MRwz8mcu9n2IO7HOh+bXHrR9kOPDl5RNCaEsrneb+xhHQ==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "entities": "~1.1.1", + "linkify-it": "^2.0.0", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + }, + "bin": { + "markdown-it": "bin/markdown-it.js" + } + }, + "node_modules/mdgator": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/mdgator/-/mdgator-1.1.2.tgz", + "integrity": "sha512-S2GvsLIznUQ2McXfpe6BCD+IqhnRuHcBO7krqnvnsHgDpjjO1mLhr0vZtVa5ca4WZET037g3G+94DznpicKkOA==", + "dev": true, + "dependencies": { + "@types/markdown-it": "0.0.4", + "markdown-it": "^8.4.1" + } + }, + "node_modules/mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=", + "dev": true + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", + "dev": true + }, + "node_modules/mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "dependencies": { + "minimist": "^1.2.5" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mocha": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", + "integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==", + "dev": true, + "dependencies": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.3", + "debug": "4.3.4", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.2.0", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "5.0.1", + "ms": "2.1.3", + "nanoid": "3.3.3", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "workerpool": "6.2.1", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha.js" + }, + "engines": { + "node": ">= 14.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mochajs" + } + }, + "node_modules/mocha/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/mocha/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/mocha/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/mocha/node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/mocha/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/mocha/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/mocha/node_modules/diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/mocha/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mocha/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/mocha/node_modules/minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/mocha/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/mocha/node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/nanoid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz", + "integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==", + "dev": true, + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "node_modules/resolve": { + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", + "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "dev": true, + "dependencies": { + "is-core-module": "^2.2.0", + "path-parse": "^1.0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", + "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", + "dev": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/supports-color/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-node": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-7.0.1.tgz", + "integrity": "sha512-BVwVbPJRspzNh2yfslyT1PSbl5uIk03EZlb493RKHN4qej/D06n1cEhjlOJG69oFsE7OT8XjpTUcYf6pKTLMhw==", + "dev": true, + "dependencies": { + "arrify": "^1.0.0", + "buffer-from": "^1.1.0", + "diff": "^3.1.0", + "make-error": "^1.1.1", + "minimist": "^1.2.0", + "mkdirp": "^0.5.1", + "source-map-support": "^0.5.6", + "yn": "^2.0.0" + }, + "bin": { + "ts-node": "dist/bin.js" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/tslint": { + "version": "5.20.1", + "resolved": "https://registry.npmjs.org/tslint/-/tslint-5.20.1.tgz", + "integrity": "sha512-EcMxhzCFt8k+/UP5r8waCf/lzmeSyVlqxqMEDQE7rWYiQky8KpIBz1JAoYXfROHrPZ1XXd43q8yQnULOLiBRQg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "builtin-modules": "^1.1.1", + "chalk": "^2.3.0", + "commander": "^2.12.1", + "diff": "^4.0.1", + "glob": "^7.1.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "resolve": "^1.3.2", + "semver": "^5.3.0", + "tslib": "^1.8.0", + "tsutils": "^2.29.0" + }, + "bin": { + "tslint": "bin/tslint" + }, + "engines": { + "node": ">=4.8.0" + }, + "peerDependencies": { + "typescript": ">=2.3.0-dev || >=2.4.0-dev || >=2.5.0-dev || >=2.6.0-dev || >=2.7.0-dev || >=2.8.0-dev || >=2.9.0-dev || >=3.0.0-dev || >= 3.1.0-dev || >= 3.2.0-dev" + } + }, + "node_modules/tslint/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/tsutils": { + "version": "2.29.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-2.29.0.tgz", + "integrity": "sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA==", + "dev": true, + "dependencies": { + "tslib": "^1.8.1" + }, + "peerDependencies": { + "typescript": ">=2.1.0 || >=2.1.0-dev || >=2.2.0-dev || >=2.3.0-dev || >=2.4.0-dev || >=2.5.0-dev || >=2.6.0-dev || >=2.7.0-dev || >=2.8.0-dev || >=2.9.0-dev || >= 3.0.0-dev || >= 3.1.0-dev" + } + }, + "node_modules/typescript": { + "version": "3.9.9", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.9.tgz", + "integrity": "sha512-kdMjTiekY+z/ubJCATUPlRDl39vXYiMV9iyeMuEuXZh2we6zz80uovNN2WlAxmmdE/Z/YQe+EbOEXB5RHEED3w==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", + "dev": true + }, + "node_modules/which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "node_modules/workerpool": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", + "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", + "dev": true + }, + "node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/wrap-ansi/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "node_modules/y18n": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", + "dev": true + }, + "node_modules/yargs": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", + "dev": true, + "dependencies": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yargs-unparser/node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yargs/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yn": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yn/-/yn-2.0.0.tgz", + "integrity": "sha1-5a2ryKz0CPY4X8dklWhMiOavaJo=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + }, + "dependencies": { + "@babel/code-frame": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.13.tgz", + "integrity": "sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g==", + "dev": true, + "requires": { + "@babel/highlight": "^7.12.13" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.0.tgz", + "integrity": "sha512-V3ts7zMSu5lfiwWDVWzRDGIN+lnCEUdaXgtVHJgLb1rGaA6jMrtB9EmE7L18foXJIE8Un/A/h6NJfGQp/e1J4A==", + "dev": true + }, + "@babel/highlight": { + "version": "7.14.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.0.tgz", + "integrity": "sha512-YSCOwxvTYEIMSGaBQb5kDDsCopDdiUGsqpatp3fOlI4+2HQSkTmEVWnVuySdAC5EWCqSWWTv0ib63RjR7dTBdg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.14.0", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@types/debug": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.5.tgz", + "integrity": "sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ==" + }, + "@types/markdown-it": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-0.0.4.tgz", + "integrity": "sha512-FWR7QB7EqBRq1s9BMk0ccOSOuRLfVEWYpHQYpFPaXtCoqN6dJx2ttdsdQbUxLLnAlKpYeVjveGGhQ3583TTa7g==", + "dev": true + }, + "@types/mocha": { + "version": "5.2.7", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-5.2.7.tgz", + "integrity": "sha512-NYrtPht0wGzhwe9+/idPaBB+TqkY9AhTvOLMkThm0IoEfLaiVQZwBwyJ5puCkO3AUCWrmcoePjp2mbFocKy4SQ==", + "dev": true + }, + "@types/node": { + "version": "10.17.59", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.59.tgz", + "integrity": "sha512-7Uc8IRrL8yZz5ti45RaFxpbU8TxlzdC3HvxV+hOWo1EyLsuKv/w7y0n+TwZzwL3vdx3oZ2k3ubxPq131hNtXyg==", + "dev": true + }, + "@types/semver": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz", + "integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==" + }, + "ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "dev": true + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true + }, + "binary-search": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/binary-search/-/binary-search-1.3.6.tgz", + "integrity": "sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", + "dev": true + }, + "builtin-modules": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", + "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=", + "dev": true + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "dependencies": { + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "requires": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "fsevents": "~2.3.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + } + }, + "cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true + }, + "diff": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "entities": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz", + "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==", + "dev": true + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "dev": true + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-core-module": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.4.0.tgz", + "integrity": "sha512-6A2fkfq1rfeQZjxrZJGerpLCTHRNEBiSgnu0+obeJpEPZRUooHgsizvzv0ZjJwOz3iWIHdJtVWJ/tmPr3D21/A==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true + }, + "is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true + }, + "javascript-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.1.0.tgz", + "integrity": "sha512-JVAfqNPTvNq3sB/VHQJAFxN/sPgKnsKrCwyRt15zwNCdrMMJDdcEOdubuy+DuJYYdm0ox1J4uzEuYKkN+9yhVg==", + "dev": true + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "linkify-it": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-2.2.0.tgz", + "integrity": "sha512-GnAl/knGn+i1U/wjBz3akz2stz+HrHLsxMwHQGofCDfPvlf+gDKN58UtfmUquTY4/MXeE2x7k19KQmeoZi94Iw==", + "dev": true, + "requires": { + "uc.micro": "^1.0.1" + } + }, + "llparse": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/llparse/-/llparse-7.1.1.tgz", + "integrity": "sha512-lBxN5O6sKq6KSOaRFIGczoVpO/U/37mHhjJioQbPuiXdfZmwzP1zC3txV9xx778TRNFENzeCM0Uoo+mE1rfJOA==", + "requires": { + "debug": "^4.2.0", + "llparse-frontend": "^3.0.0" + } + }, + "llparse-builder": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/llparse-builder/-/llparse-builder-1.5.2.tgz", + "integrity": "sha512-i862UNC3YUEdlfK/NUCJxlKjtWjgAI9AJXDRgjcfRHfwFt4Sf8eFPTRsc91/2R9MBZ0kyFdfhi8SVhMsZf1gNQ==", + "requires": { + "@types/debug": "4.1.5 ", + "binary-search": "^1.3.6", + "debug": "^4.2.0" + } + }, + "llparse-dot": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/llparse-dot/-/llparse-dot-1.0.1.tgz", + "integrity": "sha512-3e271C2LuDWBzhxaCUDzjpufamoEBuTYQz83QyMixI/i99BntCEk6ngHWOhhDb0XdtNNh6qAfRmXyjgNP+Nxpw==", + "dev": true, + "requires": { + "llparse-builder": "^1.0.0" + } + }, + "llparse-frontend": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/llparse-frontend/-/llparse-frontend-3.0.0.tgz", + "integrity": "sha512-G/o0Po2C+G5OtP8MJeQDjDf5qwDxcO7K6x4r6jqGsJwxk7yblbJnRqpmye7G/lZ8dD0Hv5neY4/KB5BhDmEc9Q==", + "requires": { + "debug": "^3.2.6", + "llparse-builder": "^1.5.2" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "llparse-test-fixture": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/llparse-test-fixture/-/llparse-test-fixture-5.0.2.tgz", + "integrity": "sha512-61KI5J/b5uyRktD0y1EezleEW6UfaxhHkn1adLKNVemRZzklE+SpLakr251qo04kb9jN/ytk8lllgK+yFOj4cQ==", + "dev": true, + "requires": { + "esm": "^3.2.25", + "llparse": "^7.0.0", + "yargs": "^15.4.1" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "requires": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "markdown-it": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-8.4.2.tgz", + "integrity": "sha512-GcRz3AWTqSUphY3vsUqQSFMbgR38a4Lh3GWlHRh/7MRwz8mcu9n2IO7HOh+bXHrR9kOPDl5RNCaEsrneb+xhHQ==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "entities": "~1.1.1", + "linkify-it": "^2.0.0", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + } + }, + "mdgator": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/mdgator/-/mdgator-1.1.2.tgz", + "integrity": "sha512-S2GvsLIznUQ2McXfpe6BCD+IqhnRuHcBO7krqnvnsHgDpjjO1mLhr0vZtVa5ca4WZET037g3G+94DznpicKkOA==", + "dev": true, + "requires": { + "@types/markdown-it": "0.0.4", + "markdown-it": "^8.4.1" + } + }, + "mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=", + "dev": true + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==", + "dev": true + }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "mocha": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", + "integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==", + "dev": true, + "requires": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.3", + "debug": "4.3.4", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.2.0", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "5.0.1", + "ms": "2.1.3", + "nanoid": "3.3.3", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "workerpool": "6.2.1", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + } + }, + "minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true + }, + "yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + } + } + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "nanoid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz", + "integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==", + "dev": true + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + }, + "dependencies": { + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + } + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "resolve": { + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", + "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "dev": true, + "requires": { + "is-core-module": "^2.2.0", + "path-parse": "^1.0.6" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + }, + "semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==" + }, + "serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "source-map-support": { + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", + "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + }, + "dependencies": { + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + } + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "ts-node": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-7.0.1.tgz", + "integrity": "sha512-BVwVbPJRspzNh2yfslyT1PSbl5uIk03EZlb493RKHN4qej/D06n1cEhjlOJG69oFsE7OT8XjpTUcYf6pKTLMhw==", + "dev": true, + "requires": { + "arrify": "^1.0.0", + "buffer-from": "^1.1.0", + "diff": "^3.1.0", + "make-error": "^1.1.1", + "minimist": "^1.2.0", + "mkdirp": "^0.5.1", + "source-map-support": "^0.5.6", + "yn": "^2.0.0" + } + }, + "tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "tslint": { + "version": "5.20.1", + "resolved": "https://registry.npmjs.org/tslint/-/tslint-5.20.1.tgz", + "integrity": "sha512-EcMxhzCFt8k+/UP5r8waCf/lzmeSyVlqxqMEDQE7rWYiQky8KpIBz1JAoYXfROHrPZ1XXd43q8yQnULOLiBRQg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "builtin-modules": "^1.1.1", + "chalk": "^2.3.0", + "commander": "^2.12.1", + "diff": "^4.0.1", + "glob": "^7.1.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "resolve": "^1.3.2", + "semver": "^5.3.0", + "tslib": "^1.8.0", + "tsutils": "^2.29.0" + }, + "dependencies": { + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + } + } + }, + "tsutils": { + "version": "2.29.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-2.29.0.tgz", + "integrity": "sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA==", + "dev": true, + "requires": { + "tslib": "^1.8.1" + } + }, + "typescript": { + "version": "3.9.9", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.9.tgz", + "integrity": "sha512-kdMjTiekY+z/ubJCATUPlRDl39vXYiMV9iyeMuEuXZh2we6zz80uovNN2WlAxmmdE/Z/YQe+EbOEXB5RHEED3w==", + "dev": true + }, + "uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", + "dev": true + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "workerpool": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", + "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", + "dev": true + }, + "wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "y18n": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz", + "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==", + "dev": true + }, + "yargs": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", + "dev": true, + "requires": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + }, + "dependencies": { + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + } + } + }, + "yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true + }, + "yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "requires": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "dependencies": { + "camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true + }, + "decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true + } + } + }, + "yn": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yn/-/yn-2.0.0.tgz", + "integrity": "sha1-5a2ryKz0CPY4X8dklWhMiOavaJo=", + "dev": true + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true + } + } +} diff --git a/llhttp/package.json b/llhttp/package.json new file mode 100644 index 0000000..be715b8 --- /dev/null +++ b/llhttp/package.json @@ -0,0 +1,60 @@ +{ + "name": "llhttp", + "version": "9.1.3", + "description": "HTTP parser in LLVM IR", + "main": "lib/llhttp.js", + "types": "lib/llhttp.d.ts", + "files": [ + "lib", + "src" + ], + "scripts": { + "bench": "ts-node bench/", + "build": "ts-node bin/generate.ts", + "build-ts": "tsc", + "prebuild-wasm": "npm run wasm -- --prebuild && npm run wasm -- --setup", + "build-wasm": "npm run wasm -- --docker", + "wasm": "ts-node bin/build_wasm.ts", + "clean": "rm -rf lib && rm -rf test/tmp", + "prepare": "npm run clean && npm run build-ts", + "lint": "tslint -c tslint.json bin/*.ts src/*.ts src/**/*.ts test/*.ts test/**/*.ts", + "lint-fix": "tslint --fix -c tslint.json bin/*.ts src/*.ts src/**/*.ts test/*.ts test/**/*.ts", + "mocha": "mocha --timeout=10000 -r ts-node/register/type-check --reporter progress test/*-test.ts", + "test": "npm run mocha && npm run lint", + "postversion": "RELEASE=`node -e \"process.stdout.write(require('./package').version)\"` make -B postversion", + "github-release": "RELEASE_V=`node -e \"process.stdout.write('v' + require('./package').version)\"` make github-release" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/nodejs/llhttp.git" + }, + "keywords": [ + "http", + "llvm", + "ir", + "llparse" + ], + "author": "Fedor Indutny (http://darksi.de/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/nodejs/llhttp/issues" + }, + "homepage": "https://github.com/nodejs/llhttp#readme", + "devDependencies": { + "@types/mocha": "^5.2.7", + "@types/node": "^10.17.52", + "javascript-stringify": "^2.0.1", + "llparse-dot": "^1.0.1", + "llparse-test-fixture": "^5.0.1", + "mdgator": "^1.1.2", + "mocha": "^10.2.0", + "ts-node": "^7.0.1", + "tslint": "^5.20.1", + "typescript": "^3.9.9" + }, + "dependencies": { + "@types/semver": "^5.5.0", + "llparse": "^7.1.1", + "semver": "^5.7.1" + } +} diff --git a/llhttp/src/common.gypi b/llhttp/src/common.gypi new file mode 100644 index 0000000..ef7549f --- /dev/null +++ b/llhttp/src/common.gypi @@ -0,0 +1,46 @@ +{ + 'target_defaults': { + 'default_configuration': 'Debug', + 'configurations': { + # TODO: hoist these out and put them somewhere common, because + # RuntimeLibrary MUST MATCH across the entire project + 'Debug': { + 'defines': [ 'DEBUG', '_DEBUG' ], + 'cflags': [ '-Wall', '-Wextra', '-O0', '-g', '-ftrapv' ], + 'msvs_settings': { + 'VCCLCompilerTool': { + 'RuntimeLibrary': 1, # static debug + }, + }, + }, + 'Release': { + 'defines': [ 'NDEBUG' ], + 'cflags': [ '-Wall', '-Wextra', '-O3' ], + 'msvs_settings': { + 'VCCLCompilerTool': { + 'RuntimeLibrary': 0, # static release + }, + }, + } + }, + 'msvs_settings': { + 'VCCLCompilerTool': { + # Compile as C++. llhttp.c is actually C99, but C++ is + # close enough in this case. + 'CompileAs': 2, + }, + 'VCLibrarianTool': { + }, + 'VCLinkerTool': { + 'GenerateDebugInformation': 'true', + }, + }, + 'conditions': [ + ['OS == "win"', { + 'defines': [ + 'WIN32' + ], + }] + ], + }, +} diff --git a/llhttp/src/llhttp.gyp b/llhttp/src/llhttp.gyp new file mode 100644 index 0000000..c7b8800 --- /dev/null +++ b/llhttp/src/llhttp.gyp @@ -0,0 +1,22 @@ +{ + 'variables': { + 'llhttp_sources': [ + 'src/llhttp.c', + 'src/api.c', + 'src/http.c', + ] + }, + 'targets': [ + { + 'target_name': 'llhttp', + 'type': 'static_library', + 'include_dirs': [ '.', 'include' ], + 'direct_dependent_settings': { + 'include_dirs': [ 'include' ], + }, + 'sources': [ + '<@(llhttp_sources)', + ], + }, + ] +} diff --git a/llhttp/src/llhttp.ts b/llhttp/src/llhttp.ts new file mode 100644 index 0000000..ba36b01 --- /dev/null +++ b/llhttp/src/llhttp.ts @@ -0,0 +1,7 @@ +import * as constants from './llhttp/constants'; + +export { constants }; + +export { HTTP } from './llhttp/http'; +export { URL } from './llhttp/url'; +export { CHeaders } from './llhttp/c-headers'; diff --git a/llhttp/src/llhttp/c-headers.ts b/llhttp/src/llhttp/c-headers.ts new file mode 100644 index 0000000..fad66de --- /dev/null +++ b/llhttp/src/llhttp/c-headers.ts @@ -0,0 +1,106 @@ +import * as constants from './constants'; +import { enumToMap, IEnumMap } from './utils'; + +type Encoding = 'none' | 'hex'; + +export class CHeaders { + public build(): string { + let res = ''; + + res += '#ifndef LLLLHTTP_C_HEADERS_\n'; + res += '#define LLLLHTTP_C_HEADERS_\n'; + + res += '#ifdef __cplusplus\n'; + res += 'extern "C" {\n'; + res += '#endif\n'; + + res += '\n'; + + const errorMap = enumToMap(constants.ERROR); + const methodMap = enumToMap(constants.METHODS); + const httpMethodMap = enumToMap(constants.METHODS, constants.METHODS_HTTP, [ + constants.METHODS.PRI, + ]); + const rtspMethodMap = enumToMap(constants.METHODS, constants.METHODS_RTSP); + const statusMap = enumToMap(constants.STATUSES, constants.STATUSES_HTTP); + + res += this.buildEnum('llhttp_errno', 'HPE', errorMap); + res += '\n'; + res += this.buildEnum('llhttp_flags', 'F', enumToMap(constants.FLAGS), + 'hex'); + res += '\n'; + res += this.buildEnum('llhttp_lenient_flags', 'LENIENT', + enumToMap(constants.LENIENT_FLAGS), 'hex'); + res += '\n'; + res += this.buildEnum('llhttp_type', 'HTTP', + enumToMap(constants.TYPE)); + res += '\n'; + res += this.buildEnum('llhttp_finish', 'HTTP_FINISH', + enumToMap(constants.FINISH)); + res += '\n'; + res += this.buildEnum('llhttp_method', 'HTTP', methodMap); + res += '\n'; + res += this.buildEnum('llhttp_status', 'HTTP_STATUS', statusMap); + + res += '\n'; + + res += this.buildMap('HTTP_ERRNO', errorMap); + res += '\n'; + res += this.buildMap('HTTP_METHOD', httpMethodMap); + res += '\n'; + res += this.buildMap('RTSP_METHOD', rtspMethodMap); + res += '\n'; + res += this.buildMap('HTTP_ALL_METHOD', methodMap); + res += '\n'; + res += this.buildMap('HTTP_STATUS', statusMap); + + res += '\n'; + + res += '#ifdef __cplusplus\n'; + res += '} /* extern "C" */\n'; + res += '#endif\n'; + res += '#endif /* LLLLHTTP_C_HEADERS_ */\n'; + + return res; + } + + private buildEnum(name: string, prefix: string, map: IEnumMap, + encoding: Encoding = 'none'): string { + let res = ''; + + res += `enum ${name} {\n`; + const keys = Object.keys(map); + const keysLength = keys.length; + for (let i = 0; i < keysLength; i++) { + const key = keys[i]; + const isLast = i === keysLength - 1; + + let value: number | string = map[key]; + + if (encoding === 'hex') { + value = `0x${value.toString(16)}`; + } + + res += ` ${prefix}_${key.replace(/-/g, '')} = ${value}`; + if (!isLast) { + res += ',\n'; + } + } + res += '\n};\n'; + res += `typedef enum ${name} ${name}_t;\n`; + + return res; + } + + private buildMap(name: string, map: IEnumMap): string { + let res = ''; + + res += `#define ${name}_MAP(XX) \\\n`; + for (const [key, value] of Object.entries(map)) { + res += ` XX(${value!}, ${key.replace(/-/g, '')}, ${key}) \\\n`; + } + res += '\n'; + + return res; + } +} diff --git a/llhttp/src/llhttp/constants.ts b/llhttp/src/llhttp/constants.ts new file mode 100644 index 0000000..00fc523 --- /dev/null +++ b/llhttp/src/llhttp/constants.ts @@ -0,0 +1,540 @@ +import { enumToMap, IEnumMap } from './utils'; + +// C headers + +export enum ERROR { + OK = 0, + INTERNAL = 1, + STRICT = 2, + CR_EXPECTED = 25, + LF_EXPECTED = 3, + UNEXPECTED_CONTENT_LENGTH = 4, + UNEXPECTED_SPACE = 30, + CLOSED_CONNECTION = 5, + INVALID_METHOD = 6, + INVALID_URL = 7, + INVALID_CONSTANT = 8, + INVALID_VERSION = 9, + INVALID_HEADER_TOKEN = 10, + INVALID_CONTENT_LENGTH = 11, + INVALID_CHUNK_SIZE = 12, + INVALID_STATUS = 13, + INVALID_EOF_STATE = 14, + INVALID_TRANSFER_ENCODING = 15, + + CB_MESSAGE_BEGIN = 16, + CB_HEADERS_COMPLETE = 17, + CB_MESSAGE_COMPLETE = 18, + CB_CHUNK_HEADER = 19, + CB_CHUNK_COMPLETE = 20, + + PAUSED = 21, + PAUSED_UPGRADE = 22, + PAUSED_H2_UPGRADE = 23, + + USER = 24, + + CB_URL_COMPLETE = 26, + CB_STATUS_COMPLETE = 27, + CB_METHOD_COMPLETE = 32, + CB_VERSION_COMPLETE = 33, + CB_HEADER_FIELD_COMPLETE = 28, + CB_HEADER_VALUE_COMPLETE = 29, + CB_CHUNK_EXTENSION_NAME_COMPLETE = 34, + CB_CHUNK_EXTENSION_VALUE_COMPLETE = 35, + CB_RESET = 31, +} + +export enum TYPE { + BOTH = 0, // default + REQUEST = 1, + RESPONSE = 2, +} + +export enum FLAGS { + CONNECTION_KEEP_ALIVE = 1 << 0, + CONNECTION_CLOSE = 1 << 1, + CONNECTION_UPGRADE = 1 << 2, + CHUNKED = 1 << 3, + UPGRADE = 1 << 4, + CONTENT_LENGTH = 1 << 5, + SKIPBODY = 1 << 6, + TRAILING = 1 << 7, + // 1 << 8 is unused + TRANSFER_ENCODING = 1 << 9, +} + +export enum LENIENT_FLAGS { + HEADERS = 1 << 0, + CHUNKED_LENGTH = 1 << 1, + KEEP_ALIVE = 1 << 2, + TRANSFER_ENCODING = 1 << 3, + VERSION = 1 << 4, + DATA_AFTER_CLOSE = 1 << 5, + OPTIONAL_LF_AFTER_CR = 1 << 6, + OPTIONAL_CRLF_AFTER_CHUNK = 1 << 7, + OPTIONAL_CR_BEFORE_LF = 1 << 8, + SPACES_AFTER_CHUNK_SIZE = 1 << 9, +} + +export enum METHODS { + DELETE = 0, + GET = 1, + HEAD = 2, + POST = 3, + PUT = 4, + /* pathological */ + CONNECT = 5, + OPTIONS = 6, + TRACE = 7, + /* WebDAV */ + COPY = 8, + LOCK = 9, + MKCOL = 10, + MOVE = 11, + PROPFIND = 12, + PROPPATCH = 13, + SEARCH = 14, + UNLOCK = 15, + BIND = 16, + REBIND = 17, + UNBIND = 18, + ACL = 19, + /* subversion */ + REPORT = 20, + MKACTIVITY = 21, + CHECKOUT = 22, + MERGE = 23, + /* upnp */ + 'M-SEARCH' = 24, + NOTIFY = 25, + SUBSCRIBE = 26, + UNSUBSCRIBE = 27, + /* RFC-5789 */ + PATCH = 28, + PURGE = 29, + /* CalDAV */ + MKCALENDAR = 30, + /* RFC-2068, section 19.6.1.2 */ + LINK = 31, + UNLINK = 32, + /* icecast */ + SOURCE = 33, + /* RFC-7540, section 11.6 */ + PRI = 34, + /* RFC-2326 RTSP */ + DESCRIBE = 35, + ANNOUNCE = 36, + SETUP = 37, + PLAY = 38, + PAUSE = 39, + TEARDOWN = 40, + GET_PARAMETER = 41, + SET_PARAMETER = 42, + REDIRECT = 43, + RECORD = 44, + /* RAOP */ + FLUSH = 45, +} + +export const METHODS_HTTP = [ + METHODS.DELETE, + METHODS.GET, + METHODS.HEAD, + METHODS.POST, + METHODS.PUT, + METHODS.CONNECT, + METHODS.OPTIONS, + METHODS.TRACE, + METHODS.COPY, + METHODS.LOCK, + METHODS.MKCOL, + METHODS.MOVE, + METHODS.PROPFIND, + METHODS.PROPPATCH, + METHODS.SEARCH, + METHODS.UNLOCK, + METHODS.BIND, + METHODS.REBIND, + METHODS.UNBIND, + METHODS.ACL, + METHODS.REPORT, + METHODS.MKACTIVITY, + METHODS.CHECKOUT, + METHODS.MERGE, + METHODS['M-SEARCH'], + METHODS.NOTIFY, + METHODS.SUBSCRIBE, + METHODS.UNSUBSCRIBE, + METHODS.PATCH, + METHODS.PURGE, + METHODS.MKCALENDAR, + METHODS.LINK, + METHODS.UNLINK, + METHODS.PRI, + + // TODO(indutny): should we allow it with HTTP? + METHODS.SOURCE, +]; + +export const METHODS_ICE = [ + METHODS.SOURCE, +]; + +export const METHODS_RTSP = [ + METHODS.OPTIONS, + METHODS.DESCRIBE, + METHODS.ANNOUNCE, + METHODS.SETUP, + METHODS.PLAY, + METHODS.PAUSE, + METHODS.TEARDOWN, + METHODS.GET_PARAMETER, + METHODS.SET_PARAMETER, + METHODS.REDIRECT, + METHODS.RECORD, + METHODS.FLUSH, + + // For AirPlay + METHODS.GET, + METHODS.POST, +]; + +export const METHOD_MAP = enumToMap(METHODS); +export const H_METHOD_MAP: IEnumMap = {}; + +for (const key of Object.keys(METHOD_MAP)) { + if (/^H/.test(key)) { + H_METHOD_MAP[key] = METHOD_MAP[key]; + } +} + +export enum STATUSES { + CONTINUE = 100, + SWITCHING_PROTOCOLS = 101, + PROCESSING = 102, + EARLY_HINTS = 103, + RESPONSE_IS_STALE = 110, // Unofficial + REVALIDATION_FAILED = 111, // Unofficial + DISCONNECTED_OPERATION = 112, // Unofficial + HEURISTIC_EXPIRATION = 113, // Unofficial + MISCELLANEOUS_WARNING = 199, // Unofficial + OK = 200, + CREATED = 201, + ACCEPTED = 202, + NON_AUTHORITATIVE_INFORMATION = 203, + NO_CONTENT = 204, + RESET_CONTENT = 205, + PARTIAL_CONTENT = 206, + MULTI_STATUS = 207, + ALREADY_REPORTED = 208, + TRANSFORMATION_APPLIED = 214, // Unofficial + IM_USED = 226, + MISCELLANEOUS_PERSISTENT_WARNING = 299, // Unofficial + MULTIPLE_CHOICES = 300, + MOVED_PERMANENTLY = 301, + FOUND = 302, + SEE_OTHER = 303, + NOT_MODIFIED = 304, + USE_PROXY = 305, + SWITCH_PROXY = 306, // No longer used + TEMPORARY_REDIRECT = 307, + PERMANENT_REDIRECT = 308, + BAD_REQUEST = 400, + UNAUTHORIZED = 401, + PAYMENT_REQUIRED = 402, + FORBIDDEN = 403, + NOT_FOUND = 404, + METHOD_NOT_ALLOWED = 405, + NOT_ACCEPTABLE = 406, + PROXY_AUTHENTICATION_REQUIRED = 407, + REQUEST_TIMEOUT = 408, + CONFLICT = 409, + GONE = 410, + LENGTH_REQUIRED = 411, + PRECONDITION_FAILED = 412, + PAYLOAD_TOO_LARGE = 413, + URI_TOO_LONG = 414, + UNSUPPORTED_MEDIA_TYPE = 415, + RANGE_NOT_SATISFIABLE = 416, + EXPECTATION_FAILED = 417, + IM_A_TEAPOT = 418, + PAGE_EXPIRED = 419, // Unofficial + ENHANCE_YOUR_CALM = 420, // Unofficial + MISDIRECTED_REQUEST = 421, + UNPROCESSABLE_ENTITY = 422, + LOCKED = 423, + FAILED_DEPENDENCY = 424, + TOO_EARLY = 425, + UPGRADE_REQUIRED = 426, + PRECONDITION_REQUIRED = 428, + TOO_MANY_REQUESTS = 429, + REQUEST_HEADER_FIELDS_TOO_LARGE_UNOFFICIAL = 430, // Unofficial + REQUEST_HEADER_FIELDS_TOO_LARGE = 431, + LOGIN_TIMEOUT = 440, // Unofficial + NO_RESPONSE = 444, // Unofficial + RETRY_WITH = 449, // Unofficial + BLOCKED_BY_PARENTAL_CONTROL = 450, // Unofficial + UNAVAILABLE_FOR_LEGAL_REASONS = 451, + CLIENT_CLOSED_LOAD_BALANCED_REQUEST = 460, // Unofficial + INVALID_X_FORWARDED_FOR = 463, // Unofficial + REQUEST_HEADER_TOO_LARGE = 494, // Unofficial + SSL_CERTIFICATE_ERROR = 495, // Unofficial + SSL_CERTIFICATE_REQUIRED = 496, // Unofficial + HTTP_REQUEST_SENT_TO_HTTPS_PORT = 497, // Unofficial + INVALID_TOKEN = 498, // Unofficial + CLIENT_CLOSED_REQUEST = 499, // Unofficial + INTERNAL_SERVER_ERROR = 500, + NOT_IMPLEMENTED = 501, + BAD_GATEWAY = 502, + SERVICE_UNAVAILABLE = 503, + GATEWAY_TIMEOUT = 504, + HTTP_VERSION_NOT_SUPPORTED = 505, + VARIANT_ALSO_NEGOTIATES = 506, + INSUFFICIENT_STORAGE = 507, + LOOP_DETECTED = 508, + BANDWIDTH_LIMIT_EXCEEDED = 509, + NOT_EXTENDED = 510, + NETWORK_AUTHENTICATION_REQUIRED = 511, + WEB_SERVER_UNKNOWN_ERROR = 520, // Unofficial + WEB_SERVER_IS_DOWN = 521, // Unofficial + CONNECTION_TIMEOUT = 522, // Unofficial + ORIGIN_IS_UNREACHABLE = 523, // Unofficial + TIMEOUT_OCCURED = 524, // Unofficial + SSL_HANDSHAKE_FAILED = 525, // Unofficial + INVALID_SSL_CERTIFICATE = 526, // Unofficial + RAILGUN_ERROR = 527, // Unofficial + SITE_IS_OVERLOADED = 529, // Unofficial + SITE_IS_FROZEN = 530, // Unofficial + IDENTITY_PROVIDER_AUTHENTICATION_ERROR = 561, // Unofficial + NETWORK_READ_TIMEOUT = 598, // Unofficial + NETWORK_CONNECT_TIMEOUT = 599, // Unofficial +} + +export const STATUSES_HTTP = [ + STATUSES.CONTINUE, + STATUSES.SWITCHING_PROTOCOLS, + STATUSES.PROCESSING, + STATUSES.EARLY_HINTS, + STATUSES.RESPONSE_IS_STALE, + STATUSES.REVALIDATION_FAILED, + STATUSES.DISCONNECTED_OPERATION, + STATUSES.HEURISTIC_EXPIRATION, + STATUSES.MISCELLANEOUS_WARNING, + STATUSES.OK, + STATUSES.CREATED, + STATUSES.ACCEPTED, + STATUSES.NON_AUTHORITATIVE_INFORMATION, + STATUSES.NO_CONTENT, + STATUSES.RESET_CONTENT, + STATUSES.PARTIAL_CONTENT, + STATUSES.MULTI_STATUS, + STATUSES.ALREADY_REPORTED, + STATUSES.TRANSFORMATION_APPLIED, + STATUSES.IM_USED, + STATUSES.MISCELLANEOUS_PERSISTENT_WARNING, + STATUSES.MULTIPLE_CHOICES, + STATUSES.MOVED_PERMANENTLY, + STATUSES.FOUND, + STATUSES.SEE_OTHER, + STATUSES.NOT_MODIFIED, + STATUSES.USE_PROXY, + STATUSES.SWITCH_PROXY, + STATUSES.TEMPORARY_REDIRECT, + STATUSES.PERMANENT_REDIRECT, + STATUSES.BAD_REQUEST, + STATUSES.UNAUTHORIZED, + STATUSES.PAYMENT_REQUIRED, + STATUSES.FORBIDDEN, + STATUSES.NOT_FOUND, + STATUSES.METHOD_NOT_ALLOWED, + STATUSES.NOT_ACCEPTABLE, + STATUSES.PROXY_AUTHENTICATION_REQUIRED, + STATUSES.REQUEST_TIMEOUT, + STATUSES.CONFLICT, + STATUSES.GONE, + STATUSES.LENGTH_REQUIRED, + STATUSES.PRECONDITION_FAILED, + STATUSES.PAYLOAD_TOO_LARGE, + STATUSES.URI_TOO_LONG, + STATUSES.UNSUPPORTED_MEDIA_TYPE, + STATUSES.RANGE_NOT_SATISFIABLE, + STATUSES.EXPECTATION_FAILED, + STATUSES.IM_A_TEAPOT, + STATUSES.PAGE_EXPIRED, + STATUSES.ENHANCE_YOUR_CALM, + STATUSES.MISDIRECTED_REQUEST, + STATUSES.UNPROCESSABLE_ENTITY, + STATUSES.LOCKED, + STATUSES.FAILED_DEPENDENCY, + STATUSES.TOO_EARLY, + STATUSES.UPGRADE_REQUIRED, + STATUSES.PRECONDITION_REQUIRED, + STATUSES.TOO_MANY_REQUESTS, + STATUSES.REQUEST_HEADER_FIELDS_TOO_LARGE_UNOFFICIAL, + STATUSES.REQUEST_HEADER_FIELDS_TOO_LARGE, + STATUSES.LOGIN_TIMEOUT, + STATUSES.NO_RESPONSE, + STATUSES.RETRY_WITH, + STATUSES.BLOCKED_BY_PARENTAL_CONTROL, + STATUSES.UNAVAILABLE_FOR_LEGAL_REASONS, + STATUSES.CLIENT_CLOSED_LOAD_BALANCED_REQUEST, + STATUSES.INVALID_X_FORWARDED_FOR, + STATUSES.REQUEST_HEADER_TOO_LARGE, + STATUSES.SSL_CERTIFICATE_ERROR, + STATUSES.SSL_CERTIFICATE_REQUIRED, + STATUSES.HTTP_REQUEST_SENT_TO_HTTPS_PORT, + STATUSES.INVALID_TOKEN, + STATUSES.CLIENT_CLOSED_REQUEST, + STATUSES.INTERNAL_SERVER_ERROR, + STATUSES.NOT_IMPLEMENTED, + STATUSES.BAD_GATEWAY, + STATUSES.SERVICE_UNAVAILABLE, + STATUSES.GATEWAY_TIMEOUT, + STATUSES.HTTP_VERSION_NOT_SUPPORTED, + STATUSES.VARIANT_ALSO_NEGOTIATES, + STATUSES.INSUFFICIENT_STORAGE, + STATUSES.LOOP_DETECTED, + STATUSES.BANDWIDTH_LIMIT_EXCEEDED, + STATUSES.NOT_EXTENDED, + STATUSES.NETWORK_AUTHENTICATION_REQUIRED, + STATUSES.WEB_SERVER_UNKNOWN_ERROR, + STATUSES.WEB_SERVER_IS_DOWN, + STATUSES.CONNECTION_TIMEOUT, + STATUSES.ORIGIN_IS_UNREACHABLE, + STATUSES.TIMEOUT_OCCURED, + STATUSES.SSL_HANDSHAKE_FAILED, + STATUSES.INVALID_SSL_CERTIFICATE, + STATUSES.RAILGUN_ERROR, + STATUSES.SITE_IS_OVERLOADED, + STATUSES.SITE_IS_FROZEN, + STATUSES.IDENTITY_PROVIDER_AUTHENTICATION_ERROR, + STATUSES.NETWORK_READ_TIMEOUT, + STATUSES.NETWORK_CONNECT_TIMEOUT, +]; + +export enum FINISH { + SAFE = 0, + SAFE_WITH_CB = 1, + UNSAFE = 2, +} + +// Internal + +export type CharList = Array; + +export const ALPHA: CharList = []; + +for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) { + // Upper case + ALPHA.push(String.fromCharCode(i)); + + // Lower case + ALPHA.push(String.fromCharCode(i + 0x20)); +} + +export const NUM_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, +}; + +export const HEX_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, + A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF, + a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf, +}; + +export const NUM: CharList = [ + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', +]; + +export const ALPHANUM: CharList = ALPHA.concat(NUM); +export const MARK: CharList = [ '-', '_', '.', '!', '~', '*', '\'', '(', ')' ]; +export const USERINFO_CHARS: CharList = ALPHANUM + .concat(MARK) + .concat([ '%', ';', ':', '&', '=', '+', '$', ',' ]); + +// TODO(indutny): use RFC +export const URL_CHAR: CharList = ([ + '!', '"', '$', '%', '&', '\'', + '(', ')', '*', '+', ',', '-', '.', '/', + ':', ';', '<', '=', '>', + '@', '[', '\\', ']', '^', '_', + '`', + '{', '|', '}', '~', +] as CharList).concat(ALPHANUM); + +export const HEX: CharList = NUM.concat( + [ 'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F' ]); + +/* Tokens as defined by rfc 2616. Also lowercases them. + * token = 1* + * separators = "(" | ")" | "<" | ">" | "@" + * | "," | ";" | ":" | "\" | <"> + * | "/" | "[" | "]" | "?" | "=" + * | "{" | "}" | SP | HT + */ +export const TOKEN: CharList = ([ + '!', '#', '$', '%', '&', '\'', + '*', '+', '-', '.', + '^', '_', '`', + '|', '~', +] as CharList).concat(ALPHANUM); + +/* + * Verify that a char is a valid visible (printable) US-ASCII + * character or %x80-FF + */ +export const HEADER_CHARS: CharList = [ '\t' ]; +for (let i = 32; i <= 255; i++) { + if (i !== 127) { + HEADER_CHARS.push(i); + } +} + +// ',' = \x44 +export const CONNECTION_TOKEN_CHARS: CharList = + HEADER_CHARS.filter((c: string | number) => c !== 44); + +export const QUOTED_STRING: CharList = [ '\t', ' ' ]; +for (let i = 0x21; i <= 0xff; i++) { + if (i !== 0x22 && i !== 0x5c) { // All characters in ASCII except \ and " + QUOTED_STRING.push(i); + } +} + +export const HTAB_SP_VCHAR_OBS_TEXT: CharList = [ '\t', ' ' ]; + +// VCHAR: https://tools.ietf.org/html/rfc5234#appendix-B.1 +for (let i = 0x21; i <= 0x7E; i++) { + HTAB_SP_VCHAR_OBS_TEXT.push(i); +} +// OBS_TEXT: https://datatracker.ietf.org/doc/html/rfc9110#name-collected-abnf +for (let i = 0x80; i <= 0xff; i++) { + HTAB_SP_VCHAR_OBS_TEXT.push(i); +} + +export const MAJOR = NUM_MAP; +export const MINOR = MAJOR; + +export enum HEADER_STATE { + GENERAL = 0, + CONNECTION = 1, + CONTENT_LENGTH = 2, + TRANSFER_ENCODING = 3, + UPGRADE = 4, + + CONNECTION_KEEP_ALIVE = 5, + CONNECTION_CLOSE = 6, + CONNECTION_UPGRADE = 7, + TRANSFER_ENCODING_CHUNKED = 8, +} + +export const SPECIAL_HEADERS = { + 'connection': HEADER_STATE.CONNECTION, + 'content-length': HEADER_STATE.CONTENT_LENGTH, + 'proxy-connection': HEADER_STATE.CONNECTION, + 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING, + 'upgrade': HEADER_STATE.UPGRADE, +}; diff --git a/llhttp/src/llhttp/http.ts b/llhttp/src/llhttp/http.ts new file mode 100644 index 0000000..6a201ff --- /dev/null +++ b/llhttp/src/llhttp/http.ts @@ -0,0 +1,1299 @@ +import * as assert from 'assert'; +import { LLParse, source } from 'llparse'; + +import Match = source.node.Match; +import Node = source.node.Node; + +import { + CharList, + CONNECTION_TOKEN_CHARS, ERROR, FINISH, FLAGS, H_METHOD_MAP, HEADER_CHARS, + HEADER_STATE, HEX_MAP, HTAB_SP_VCHAR_OBS_TEXT, + LENIENT_FLAGS, + MAJOR, METHOD_MAP, METHODS, METHODS_HTTP, METHODS_ICE, METHODS_RTSP, + MINOR, NUM_MAP, QUOTED_STRING, SPECIAL_HEADERS, + TOKEN, TYPE, +} from './constants'; +import { URL } from './url'; + +type MaybeNode = string | Match | Node; + +const NODES: ReadonlyArray = [ + 'start', + 'after_start', + 'start_req', + 'after_start_req', + 'start_res', + 'start_req_or_res', + + 'req_or_res_method', + + 'res_http_major', + 'res_http_dot', + 'res_http_minor', + 'res_http_end', + 'res_after_version', + 'res_status_code_digit_1', + 'res_status_code_digit_2', + 'res_status_code_digit_3', + 'res_status_code_otherwise', + 'res_status_start', + 'res_status', + 'res_line_almost_done', + + 'req_first_space_before_url', + 'req_spaces_before_url', + 'req_http_start', + 'req_http_version', + 'req_http_major', + 'req_http_dot', + 'req_http_minor', + 'req_http_end', + 'req_http_complete', + 'req_http_complete_crlf', + + 'req_pri_upgrade', + + 'headers_start', + 'header_field_start', + 'header_field', + 'header_field_colon', + 'header_field_colon_discard_ws', + 'header_field_general', + 'header_field_general_otherwise', + 'header_value_discard_ws', + 'header_value_discard_ws_almost_done', + 'header_value_discard_lws', + 'header_value_start', + 'header_value', + 'header_value_otherwise', + 'header_value_lenient', + 'header_value_lenient_failed', + 'header_value_lws', + 'header_value_te_chunked', + 'header_value_te_chunked_last', + 'header_value_te_token', + 'header_value_te_token_ows', + 'header_value_content_length_once', + 'header_value_content_length', + 'header_value_content_length_ws', + 'header_value_connection', + 'header_value_connection_ws', + 'header_value_connection_token', + 'header_value_almost_done', + + 'headers_almost_done', + 'headers_done', + + 'chunk_size_start', + 'chunk_size_digit', + 'chunk_size', + 'chunk_size_otherwise', + 'chunk_size_almost_done', + 'chunk_size_almost_done_lf', + 'chunk_extensions', + 'chunk_extension_name', + 'chunk_extension_value', + 'chunk_extension_quoted_value', + 'chunk_extension_quoted_value_quoted_pair', + 'chunk_extension_quoted_value_done', + 'chunk_data', + 'chunk_data_almost_done', + 'chunk_complete', + 'body_identity', + 'body_identity_eof', + + 'message_done', + + 'eof', + 'cleanup', + 'closed', + 'restart', +]; + +interface ISpanMap { + readonly status: source.Span; + readonly method: source.Span; + readonly version: source.Span; + readonly headerField: source.Span; + readonly headerValue: source.Span; + readonly chunkExtensionName: source.Span; + readonly chunkExtensionValue: source.Span; + readonly body: source.Span; +} + +interface ICallbackMap { + readonly onMessageBegin: source.code.Code; + readonly onUrlComplete: source.code.Code; + readonly onMethodComplete: source.code.Code; + readonly onVersionComplete: source.code.Code; + readonly onStatusComplete: source.code.Code; + readonly beforeHeadersComplete: source.code.Code; + readonly onHeaderFieldComplete: source.code.Code; + readonly onHeaderValueComplete: source.code.Code; + readonly onHeadersComplete: source.code.Code; + readonly afterHeadersComplete: source.code.Code; + readonly onChunkHeader: source.code.Code; + readonly onChunkExtensionName: source.code.Code; + readonly onChunkExtensionValue: source.code.Code; + readonly onChunkComplete: source.code.Code; + readonly onMessageComplete: source.code.Code; + readonly afterMessageComplete: source.code.Code; + readonly onReset: source.code.Code; +} + +interface IMulTargets { + readonly overflow: string | Node; + readonly success: string | Node; +} + +interface IMulOptions { + readonly base: number; + readonly max?: number; + readonly signed: boolean; +} + +interface IIsEqualTargets { + readonly equal: string | Node; + readonly notEqual: string | Node; +} + +export interface IHTTPResult { + readonly entry: Node; +} + +export class HTTP { + private readonly url: URL; + private readonly TOKEN: CharList; + private readonly span: ISpanMap; + private readonly callback: ICallbackMap; + private readonly nodes: Map = new Map(); + + constructor(private readonly llparse: LLParse) { + const p = llparse; + + this.url = new URL(p); + this.TOKEN = TOKEN; + + this.span = { + body: p.span(p.code.span('llhttp__on_body')), + chunkExtensionName: p.span(p.code.span('llhttp__on_chunk_extension_name')), + chunkExtensionValue: p.span(p.code.span('llhttp__on_chunk_extension_value')), + headerField: p.span(p.code.span('llhttp__on_header_field')), + headerValue: p.span(p.code.span('llhttp__on_header_value')), + method: p.span(p.code.span('llhttp__on_method')), + status: p.span(p.code.span('llhttp__on_status')), + version: p.span(p.code.span('llhttp__on_version')), + }; + + /* tslint:disable:object-literal-sort-keys */ + this.callback = { + // User callbacks + onUrlComplete: p.code.match('llhttp__on_url_complete'), + onStatusComplete: p.code.match('llhttp__on_status_complete'), + onMethodComplete: p.code.match('llhttp__on_method_complete'), + onVersionComplete: p.code.match('llhttp__on_version_complete'), + onHeaderFieldComplete: p.code.match('llhttp__on_header_field_complete'), + onHeaderValueComplete: p.code.match('llhttp__on_header_value_complete'), + onHeadersComplete: p.code.match('llhttp__on_headers_complete'), + onMessageBegin: p.code.match('llhttp__on_message_begin'), + onMessageComplete: p.code.match('llhttp__on_message_complete'), + onChunkHeader: p.code.match('llhttp__on_chunk_header'), + onChunkExtensionName: p.code.match('llhttp__on_chunk_extension_name_complete'), + onChunkExtensionValue: p.code.match('llhttp__on_chunk_extension_value_complete'), + onChunkComplete: p.code.match('llhttp__on_chunk_complete'), + onReset: p.code.match('llhttp__on_reset'), + + // Internal callbacks `src/http.c` + beforeHeadersComplete: + p.code.match('llhttp__before_headers_complete'), + afterHeadersComplete: p.code.match('llhttp__after_headers_complete'), + afterMessageComplete: p.code.match('llhttp__after_message_complete'), + }; + /* tslint:enable:object-literal-sort-keys */ + + for (const name of NODES) { + this.nodes.set(name, p.node(name) as Match); + } + } + + public build(): IHTTPResult { + const p = this.llparse; + + p.property('i64', 'content_length'); + p.property('i8', 'type'); + p.property('i8', 'method'); + p.property('i8', 'http_major'); + p.property('i8', 'http_minor'); + p.property('i8', 'header_state'); + p.property('i16', 'lenient_flags'); + p.property('i8', 'upgrade'); + p.property('i8', 'finish'); + p.property('i16', 'flags'); + p.property('i16', 'status_code'); + p.property('i8', 'initial_message_completed'); + + // Verify defaults + assert.strictEqual(FINISH.SAFE, 0); + assert.strictEqual(TYPE.BOTH, 0); + + // Shared settings (to be used in C wrapper) + p.property('ptr', 'settings'); + + this.buildLine(); + this.buildHeaders(); + + return { + entry: this.node('start'), + }; + } + + private buildLine(): void { + const p = this.llparse; + const span = this.span; + const n = (name: string): Match => this.node(name); + + const url = this.url.build(); + + const switchType = this.load('type', { + [TYPE.REQUEST]: n('start_req'), + [TYPE.RESPONSE]: n('start_res'), + }, n('start_req_or_res')); + + n('start') + .match([ '\r', '\n' ], n('start')) + .otherwise( + this.load('initial_message_completed', { + 1: this.invokePausable('on_reset', ERROR.CB_RESET, n('after_start')), + }, n('after_start')), + ); + + n('after_start').otherwise( + this.update( + 'finish', + FINISH.UNSAFE, + this.invokePausable('on_message_begin', ERROR.CB_MESSAGE_BEGIN, switchType), + ), + ); + + n('start_req_or_res') + .peek('H', this.span.method.start(n('req_or_res_method'))) + .otherwise(this.update('type', TYPE.REQUEST, 'start_req')); + + n('req_or_res_method') + .select(H_METHOD_MAP, this.store('method', + this.update('type', TYPE.REQUEST, this.span.method.end( + this.invokePausable('on_method_complete', ERROR.CB_METHOD_COMPLETE, n('req_first_space_before_url')), + )), + )) + .match('HTTP/', this.span.method.end(this.update('type', TYPE.RESPONSE, + this.span.version.start(n('res_http_major'))))) + .otherwise(p.error(ERROR.INVALID_CONSTANT, 'Invalid word encountered')); + + const checkVersion = (destination: string): Node => { + const node = n(destination); + const errorNode = this.span.version.end(p.error(ERROR.INVALID_VERSION, 'Invalid HTTP version')); + + return this.testLenientFlags(LENIENT_FLAGS.VERSION, + { + 1: node, + }, + this.load('http_major', { + 0: this.load('http_minor', { + 9: node, + }, errorNode), + 1: this.load('http_minor', { + 0: node, + 1: node, + }, errorNode), + 2: this.load('http_minor', { + 0: node, + }, errorNode), + }, errorNode), + ); + }; + + const checkIfAllowLFWithoutCR = (success: Node, failure: Node) => { + return this.testLenientFlags(LENIENT_FLAGS.OPTIONAL_CR_BEFORE_LF, { 1: success }, failure); + }; + + // Response + n('start_res') + .match('HTTP/', span.version.start(n('res_http_major'))) + .otherwise(p.error(ERROR.INVALID_CONSTANT, 'Expected HTTP/')); + + n('res_http_major') + .select(MAJOR, this.store('http_major', 'res_http_dot')) + .otherwise(this.span.version.end(p.error(ERROR.INVALID_VERSION, 'Invalid major version'))); + + n('res_http_dot') + .match('.', n('res_http_minor')) + .otherwise(this.span.version.end(p.error(ERROR.INVALID_VERSION, 'Expected dot'))); + + n('res_http_minor') + .select(MINOR, this.store('http_minor', checkVersion('res_http_end'))) + .otherwise(this.span.version.end(p.error(ERROR.INVALID_VERSION, 'Invalid minor version'))); + + n('res_http_end') + .otherwise(this.span.version.end( + this.invokePausable('on_version_complete', ERROR.CB_VERSION_COMPLETE, 'res_after_version'), + )); + + n('res_after_version') + .match(' ', this.update('status_code', 0, 'res_status_code_digit_1')) + .otherwise(p.error(ERROR.INVALID_VERSION, + 'Expected space after version')); + + n('res_status_code_digit_1') + .select(NUM_MAP, this.mulAdd('status_code', { + overflow: p.error(ERROR.INVALID_STATUS, 'Invalid status code'), + success: 'res_status_code_digit_2', + })) + .otherwise(p.error(ERROR.INVALID_STATUS, 'Invalid status code')); + + n('res_status_code_digit_2') + .select(NUM_MAP, this.mulAdd('status_code', { + overflow: p.error(ERROR.INVALID_STATUS, 'Invalid status code'), + success: 'res_status_code_digit_3', + })) + .otherwise(p.error(ERROR.INVALID_STATUS, 'Invalid status code')); + + n('res_status_code_digit_3') + .select(NUM_MAP, this.mulAdd('status_code', { + overflow: p.error(ERROR.INVALID_STATUS, 'Invalid status code'), + success: 'res_status_code_otherwise', + })) + .otherwise(p.error(ERROR.INVALID_STATUS, 'Invalid status code')); + + const onStatusComplete = this.invokePausable( + 'on_status_complete', ERROR.CB_STATUS_COMPLETE, n('headers_start'), + ); + + n('res_status_code_otherwise') + .match(' ', n('res_status_start')) + .match('\r', n('res_line_almost_done')) + .match( + '\n', + checkIfAllowLFWithoutCR( + onStatusComplete, + p.error(ERROR.INVALID_STATUS, 'Invalid response status'), + ), + ) + .otherwise(p.error(ERROR.INVALID_STATUS, 'Invalid response status')); + + n('res_status_start') + .otherwise(span.status.start(n('res_status'))); + + n('res_status') + .peek('\r', span.status.end().skipTo(n('res_line_almost_done'))) + .peek( + '\n', + span.status.end().skipTo( + checkIfAllowLFWithoutCR( + onStatusComplete, + p.error(ERROR.CR_EXPECTED, 'Missing expected CR after response line'), + ), + ), + ) + .skipTo(n('res_status')); + + n('res_line_almost_done') + .match(['\r', '\n'], onStatusComplete) + .otherwise(this.testLenientFlags(LENIENT_FLAGS.OPTIONAL_LF_AFTER_CR, { + 1: onStatusComplete, + }, p.error(ERROR.STRICT, 'Expected LF after CR'))); + + // Request + n('start_req').otherwise(this.span.method.start(n('after_start_req'))); + + n('after_start_req') + .select(METHOD_MAP, this.store('method', this.span.method.end( + this.invokePausable('on_method_complete', ERROR.CB_METHOD_COMPLETE, n('req_first_space_before_url'), + )))) + .otherwise(p.error(ERROR.INVALID_METHOD, 'Invalid method encountered')); + + n('req_first_space_before_url') + .match(' ', n('req_spaces_before_url')) + .otherwise(p.error(ERROR.INVALID_METHOD, 'Expected space after method')); + + n('req_spaces_before_url') + .match(' ', n('req_spaces_before_url')) + .otherwise(this.isEqual('method', METHODS.CONNECT, { + equal: url.entry.connect, + notEqual: url.entry.normal, + })); + + const onUrlCompleteHTTP = this.invokePausable( + 'on_url_complete', ERROR.CB_URL_COMPLETE, n('req_http_start'), + ); + + url.exit.toHTTP + .otherwise(onUrlCompleteHTTP); + + const onUrlCompleteHTTP09 = this.invokePausable( + 'on_url_complete', ERROR.CB_URL_COMPLETE, n('headers_start'), + ); + + url.exit.toHTTP09 + .otherwise( + this.update('http_major', 0, + this.update('http_minor', 9, onUrlCompleteHTTP09)), + ); + + const checkMethod = (methods: METHODS[], error: string): Node => { + const success = n('req_http_version'); + const failure = p.error(ERROR.INVALID_CONSTANT, error); + + const map: { [key: number]: Node } = {}; + for (const method of methods) { + map[method] = success; + } + + return this.load('method', map, failure); + }; + + n('req_http_start') + .match('HTTP/', checkMethod(METHODS_HTTP, + 'Invalid method for HTTP/x.x request')) + .match('RTSP/', checkMethod(METHODS_RTSP, + 'Invalid method for RTSP/x.x request')) + .match('ICE/', checkMethod(METHODS_ICE, + 'Expected SOURCE method for ICE/x.x request')) + .match(' ', n('req_http_start')) + .otherwise(p.error(ERROR.INVALID_CONSTANT, 'Expected HTTP/')); + + n('req_http_version').otherwise(span.version.start(n('req_http_major'))); + + n('req_http_major') + .select(MAJOR, this.store('http_major', 'req_http_dot')) + .otherwise(this.span.version.end(p.error(ERROR.INVALID_VERSION, 'Invalid major version'))); + + n('req_http_dot') + .match('.', n('req_http_minor')) + .otherwise(this.span.version.end(p.error(ERROR.INVALID_VERSION, 'Expected dot'))); + + n('req_http_minor') + .select(MINOR, this.store('http_minor', checkVersion('req_http_end'))) + .otherwise(this.span.version.end(p.error(ERROR.INVALID_VERSION, 'Invalid minor version'))); + + n('req_http_end').otherwise( + span.version.end( + this.invokePausable( + 'on_version_complete', + ERROR.CB_VERSION_COMPLETE, + this.load('method', { + [METHODS.PRI]: n('req_pri_upgrade'), + }, n('req_http_complete')), + ), + ), + ); + + n('req_http_complete') + .match('\r', n('req_http_complete_crlf')) + .match( + '\n', + checkIfAllowLFWithoutCR( + n('req_http_complete_crlf'), + p.error(ERROR.INVALID_VERSION, 'Expected CRLF after version'), + ), + ) + .otherwise(p.error(ERROR.INVALID_VERSION, 'Expected CRLF after version')); + + n('req_http_complete_crlf') + .match('\n', n('headers_start')) + .otherwise(this.testLenientFlags(LENIENT_FLAGS.OPTIONAL_LF_AFTER_CR, { + 1: n('headers_start'), + }, p.error(ERROR.STRICT, 'Expected CRLF after version'))); + + n('req_pri_upgrade') + .match('\r\n\r\nSM\r\n\r\n', + p.error(ERROR.PAUSED_H2_UPGRADE, 'Pause on PRI/Upgrade')) + .otherwise( + p.error(ERROR.INVALID_VERSION, 'Expected HTTP/2 Connection Preface')); + } + + private buildHeaders(): void { + this.buildHeaderField(); + this.buildHeaderValue(); + } + + private buildHeaderField(): void { + const p = this.llparse; + const span = this.span; + const n = (name: string): Match => this.node(name); + + const onInvalidHeaderFieldChar = + p.error(ERROR.INVALID_HEADER_TOKEN, 'Invalid header field char'); + + n('headers_start') + .match(' ', + this.testLenientFlags(LENIENT_FLAGS.HEADERS, { + 1: n('header_field_start'), + }, p.error(ERROR.UNEXPECTED_SPACE, 'Unexpected space after start line')), + ) + .otherwise(n('header_field_start')); + + n('header_field_start') + .match('\r', n('headers_almost_done')) + .match('\n', + this.testLenientFlags(LENIENT_FLAGS.OPTIONAL_CR_BEFORE_LF, { + 1: this.testFlags(FLAGS.TRAILING, { + 1: this.invokePausable('on_chunk_complete', + ERROR.CB_CHUNK_COMPLETE, 'message_done'), + }).otherwise(this.headersCompleted()), + }, onInvalidHeaderFieldChar), + ) + .peek(':', p.error(ERROR.INVALID_HEADER_TOKEN, 'Invalid header token')) + .otherwise(span.headerField.start(n('header_field'))); + + n('header_field') + .transform(p.transform.toLower()) + // Match headers that need special treatment + .select(SPECIAL_HEADERS, this.store('header_state', 'header_field_colon')) + .otherwise(this.resetHeaderState('header_field_general')); + + /* https://www.rfc-editor.org/rfc/rfc7230.html#section-3.3.3, paragraph 3. + * + * If a message is received with both a Transfer-Encoding and a + * Content-Length header field, the Transfer-Encoding overrides the + * Content-Length. Such a message might indicate an attempt to + * perform request smuggling (Section 9.5) or response splitting + * (Section 9.4) and **ought to be handled as an error**. A sender MUST + * remove the received Content-Length field prior to forwarding such + * a message downstream. + * + * Since llhttp 9, we go for the stricter approach and treat this as an error. + */ + const checkInvalidTransferEncoding = (otherwise: Node) => { + return this.testFlags(FLAGS.CONTENT_LENGTH, { + 1: this.testLenientFlags(LENIENT_FLAGS.CHUNKED_LENGTH, { + 0: p.error(ERROR.INVALID_TRANSFER_ENCODING, "Transfer-Encoding can't be present with Content-Length"), + }).otherwise(otherwise), + }).otherwise(otherwise); + }; + + const checkInvalidContentLength = (otherwise: Node) => { + return this.testFlags(FLAGS.TRANSFER_ENCODING, { + 1: this.testLenientFlags(LENIENT_FLAGS.CHUNKED_LENGTH, { + 0: p.error(ERROR.INVALID_CONTENT_LENGTH, "Content-Length can't be present with Transfer-Encoding"), + }).otherwise(otherwise), + }).otherwise(otherwise); + }; + + const onHeaderFieldComplete = this.invokePausable( + 'on_header_field_complete', ERROR.CB_HEADER_FIELD_COMPLETE, + this.load('header_state', { + [HEADER_STATE.TRANSFER_ENCODING]: checkInvalidTransferEncoding(n('header_value_discard_ws')), + [HEADER_STATE.CONTENT_LENGTH]: checkInvalidContentLength(n('header_value_discard_ws')), + }, 'header_value_discard_ws'), + ); + + const checkLenientFlagsOnColon = + this.testLenientFlags(LENIENT_FLAGS.HEADERS, { + 1: n('header_field_colon_discard_ws'), + }, span.headerField.end().skipTo(onInvalidHeaderFieldChar)); + + n('header_field_colon') + // https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.4 + // Whitespace character is not allowed between the header field-name + // and colon. If the next token matches whitespace then throw an error. + // + // Add a check for the lenient flag. If the lenient flag is set, the + // whitespace token is allowed to support legacy code not following + // http specs. + .peek(' ', checkLenientFlagsOnColon) + .peek(':', span.headerField.end().skipTo(onHeaderFieldComplete)) + // Fallback to general header, there're additional characters: + // `Connection-Duration` instead of `Connection` and so on. + .otherwise(this.resetHeaderState('header_field_general')); + + n('header_field_colon_discard_ws') + .match(' ', n('header_field_colon_discard_ws')) + .otherwise(n('header_field_colon')); + + n('header_field_general') + .match(this.TOKEN, n('header_field_general')) + .otherwise(n('header_field_general_otherwise')); + + // Just a performance optimization, split the node so that the fast case + // remains in `header_field_general` + n('header_field_general_otherwise') + .peek(':', span.headerField.end().skipTo(onHeaderFieldComplete)) + .otherwise(p.error(ERROR.INVALID_HEADER_TOKEN, 'Invalid header token')); + } + + private buildHeaderValue(): void { + const p = this.llparse; + const span = this.span; + const callback = this.callback; + const n = (name: string): Match => this.node(name); + + const fallback = this.resetHeaderState('header_value'); + + n('header_value_discard_ws') + .match([ ' ', '\t' ], n('header_value_discard_ws')) + .match('\r', n('header_value_discard_ws_almost_done')) + .match('\n', this.testLenientFlags(LENIENT_FLAGS.OPTIONAL_CR_BEFORE_LF, { + 1: n('header_value_discard_lws'), + }, p.error(ERROR.INVALID_HEADER_TOKEN, 'Invalid header value char'))) + .otherwise(span.headerValue.start(n('header_value_start'))); + + n('header_value_discard_ws_almost_done') + .match('\n', n('header_value_discard_lws')) + .otherwise( + this.testLenientFlags(LENIENT_FLAGS.HEADERS, { + 1: n('header_value_discard_lws'), + }, p.error(ERROR.STRICT, 'Expected LF after CR')), + ); + + const onHeaderValueComplete = this.invokePausable( + 'on_header_value_complete', ERROR.CB_HEADER_VALUE_COMPLETE, n('header_field_start'), + ); + + const emptyContentLengthError = p.error( + ERROR.INVALID_CONTENT_LENGTH, 'Empty Content-Length'); + const checkContentLengthEmptiness = this.load('header_state', { + [HEADER_STATE.CONTENT_LENGTH]: emptyContentLengthError, + }, this.setHeaderFlags( + this.emptySpan(span.headerValue, onHeaderValueComplete))); + + n('header_value_discard_lws') + .match([ ' ', '\t' ], this.testLenientFlags(LENIENT_FLAGS.HEADERS, { + 1: n('header_value_discard_ws'), + }, p.error(ERROR.INVALID_HEADER_TOKEN, 'Invalid header value char'))) + .otherwise(checkContentLengthEmptiness); + + // Multiple `Transfer-Encoding` headers should be treated as one, but with + // values separate by a comma. + // + // See: https://tools.ietf.org/html/rfc7230#section-3.2.2 + const toTransferEncoding = this.unsetFlag( + FLAGS.CHUNKED, + 'header_value_te_chunked'); + + // Once chunked has been selected, no other encoding is possible in requests + // https://datatracker.ietf.org/doc/html/rfc7230#section-3.3.1 + const forbidAfterChunkedInRequest = (otherwise: Node) => { + return this.load('type', { + [TYPE.REQUEST]: this.testLenientFlags(LENIENT_FLAGS.TRANSFER_ENCODING, { + 0: span.headerValue.end().skipTo( + p.error(ERROR.INVALID_TRANSFER_ENCODING, 'Invalid `Transfer-Encoding` header value'), + ), + }).otherwise(otherwise), + }, otherwise); + }; + + n('header_value_start') + .otherwise(this.load('header_state', { + [HEADER_STATE.UPGRADE]: this.setFlag(FLAGS.UPGRADE, fallback), + [HEADER_STATE.TRANSFER_ENCODING]: this.testFlags( + FLAGS.CHUNKED, + { + 1: forbidAfterChunkedInRequest(this.setFlag(FLAGS.TRANSFER_ENCODING, toTransferEncoding)), + }, + this.setFlag(FLAGS.TRANSFER_ENCODING, toTransferEncoding)), + [HEADER_STATE.CONTENT_LENGTH]: n('header_value_content_length_once'), + [HEADER_STATE.CONNECTION]: n('header_value_connection'), + }, 'header_value')); + + // + // Transfer-Encoding + // + + n('header_value_te_chunked') + .transform(p.transform.toLowerUnsafe()) + .match( + 'chunked', + n('header_value_te_chunked_last'), + ) + .otherwise(n('header_value_te_token')); + + n('header_value_te_chunked_last') + .match(' ', n('header_value_te_chunked_last')) + .peek([ '\r', '\n' ], this.update('header_state', + HEADER_STATE.TRANSFER_ENCODING_CHUNKED, + 'header_value_otherwise')) + .peek(',', forbidAfterChunkedInRequest(n('header_value_te_chunked'))) + .otherwise(n('header_value_te_token')); + + n('header_value_te_token') + .match(',', n('header_value_te_token_ows')) + .match(CONNECTION_TOKEN_CHARS, n('header_value_te_token')) + .otherwise(fallback); + + n('header_value_te_token_ows') + .match([ ' ', '\t' ], n('header_value_te_token_ows')) + .otherwise(n('header_value_te_chunked')); + + // + // Content-Length + // + + const invalidContentLength = (reason: string): Node => { + // End span for easier testing + // TODO(indutny): minimize code size + return span.headerValue.end() + .otherwise(p.error(ERROR.INVALID_CONTENT_LENGTH, reason)); + }; + + n('header_value_content_length_once') + .otherwise(this.testFlags(FLAGS.CONTENT_LENGTH, { + 0: n('header_value_content_length'), + }, p.error(ERROR.UNEXPECTED_CONTENT_LENGTH, 'Duplicate Content-Length'))); + + n('header_value_content_length') + .select(NUM_MAP, this.mulAdd('content_length', { + overflow: invalidContentLength('Content-Length overflow'), + success: 'header_value_content_length', + })) + .otherwise(n('header_value_content_length_ws')); + + n('header_value_content_length_ws') + .match(' ', n('header_value_content_length_ws')) + .peek([ '\r', '\n' ], + this.setFlag(FLAGS.CONTENT_LENGTH, 'header_value_otherwise')) + .otherwise(invalidContentLength('Invalid character in Content-Length')); + + // + // Connection + // + + n('header_value_connection') + .transform(p.transform.toLower()) + // TODO(indutny): extra node for token back-edge? + // Skip lws + .match([ ' ', '\t' ], n('header_value_connection')) + .match( + 'close', + this.update('header_state', HEADER_STATE.CONNECTION_CLOSE, + 'header_value_connection_ws'), + ) + .match( + 'upgrade', + this.update('header_state', HEADER_STATE.CONNECTION_UPGRADE, + 'header_value_connection_ws'), + ) + .match( + 'keep-alive', + this.update('header_state', HEADER_STATE.CONNECTION_KEEP_ALIVE, + 'header_value_connection_ws'), + ) + .otherwise(n('header_value_connection_token')); + + n('header_value_connection_ws') + .match(',', this.setHeaderFlags('header_value_connection')) + .match(' ', n('header_value_connection_ws')) + .peek([ '\r', '\n' ], n('header_value_otherwise')) + .otherwise(this.resetHeaderState('header_value_connection_token')); + + n('header_value_connection_token') + .match(',', n('header_value_connection')) + .match(CONNECTION_TOKEN_CHARS, + n('header_value_connection_token')) + .otherwise(n('header_value_otherwise')); + + // Split for performance reasons + n('header_value') + .match(HEADER_CHARS, n('header_value')) + .otherwise(n('header_value_otherwise')); + + const checkIfAllowLFWithoutCR = (success: Node, failure: Node) => { + return this.testLenientFlags(LENIENT_FLAGS.OPTIONAL_CR_BEFORE_LF, { 1: success }, failure); + }; + + const checkLenient = this.testLenientFlags(LENIENT_FLAGS.HEADERS, { + 1: n('header_value_lenient'), + }, span.headerValue.end(p.error(ERROR.INVALID_HEADER_TOKEN, 'Invalid header value char'))); + + n('header_value_otherwise') + .peek('\r', span.headerValue.end().skipTo(n('header_value_almost_done'))) + .peek( + '\n', + span.headerValue.end( + checkIfAllowLFWithoutCR( + n('header_value_almost_done'), + p.error(ERROR.CR_EXPECTED, 'Missing expected CR after header value'), + ), + ), + ) + .otherwise(checkLenient); + + n('header_value_lenient') + .peek('\r', span.headerValue.end().skipTo(n('header_value_almost_done'))) + .peek('\n', span.headerValue.end(n('header_value_almost_done'))) + .skipTo(n('header_value_lenient')); + + n('header_value_almost_done') + .match('\n', n('header_value_lws')) + .otherwise(p.error(ERROR.LF_EXPECTED, + 'Missing expected LF after header value')); + + n('header_value_lws') + .peek([ ' ', '\t' ], + this.load('header_state', { + [HEADER_STATE.TRANSFER_ENCODING_CHUNKED]: + this.resetHeaderState(span.headerValue.start(n('header_value_start'))), + }, span.headerValue.start(n('header_value_start')))) + .otherwise(this.setHeaderFlags(onHeaderValueComplete)); + + const checkTrailing = this.testFlags(FLAGS.TRAILING, { + 1: this.invokePausable('on_chunk_complete', + ERROR.CB_CHUNK_COMPLETE, 'message_done'), + }).otherwise(this.headersCompleted()); + + n('headers_almost_done') + .match('\n', checkTrailing) + .otherwise( + this.testLenientFlags(LENIENT_FLAGS.OPTIONAL_LF_AFTER_CR, { + 1: checkTrailing, + }, p.error(ERROR.STRICT, 'Expected LF after headers'))); + + const upgradePause = p.pause(ERROR.PAUSED_UPGRADE, + 'Pause on CONNECT/Upgrade'); + + const afterHeadersComplete = p.invoke(callback.afterHeadersComplete, { + 1: this.invokePausable('on_message_complete', + ERROR.CB_MESSAGE_COMPLETE, upgradePause), + 2: n('chunk_size_start'), + 3: n('body_identity'), + 4: n('body_identity_eof'), + + // non-chunked `Transfer-Encoding` for request, see `src/native/http.c` + 5: p.error(ERROR.INVALID_TRANSFER_ENCODING, + 'Request has invalid `Transfer-Encoding`'), + }); + + n('headers_done') + .otherwise(afterHeadersComplete); + + upgradePause + .otherwise(n('cleanup')); + + afterHeadersComplete + .otherwise(this.invokePausable('on_message_complete', + ERROR.CB_MESSAGE_COMPLETE, 'cleanup')); + + n('body_identity') + .otherwise(span.body.start() + .otherwise(p.consume('content_length').otherwise( + span.body.end(n('message_done'))))); + + n('body_identity_eof') + .otherwise( + this.update('finish', FINISH.SAFE_WITH_CB, span.body.start(n('eof')))); + + // Just read everything until EOF + n('eof') + .skipTo(n('eof')); + + n('chunk_size_start') + .otherwise(this.update('content_length', 0, 'chunk_size_digit')); + + const addContentLength = this.mulAdd('content_length', { + overflow: p.error(ERROR.INVALID_CHUNK_SIZE, 'Chunk size overflow'), + success: 'chunk_size', + }, { signed: false, base: 0x10 }); + + n('chunk_size_digit') + .select(HEX_MAP, addContentLength) + .otherwise(p.error(ERROR.INVALID_CHUNK_SIZE, + 'Invalid character in chunk size')); + + n('chunk_size') + .select(HEX_MAP, addContentLength) + .otherwise(n('chunk_size_otherwise')); + + n('chunk_size_otherwise') + .match( + [ ' ', '\t' ], + this.testLenientFlags( + LENIENT_FLAGS.SPACES_AFTER_CHUNK_SIZE, + { + 1: n('chunk_size_otherwise'), + }, + p.error(ERROR.INVALID_CHUNK_SIZE, 'Invalid character in chunk size'), + ), + ) + .match('\r', n('chunk_size_almost_done')) + .match( + '\n', + checkIfAllowLFWithoutCR( + n('chunk_size_almost_done'), + p.error(ERROR.CR_EXPECTED, 'Missing expected CR after chunk size'), + ), + ) + .match(';', n('chunk_extensions')) + .otherwise(p.error(ERROR.INVALID_CHUNK_SIZE, + 'Invalid character in chunk size')); + + const onChunkExtensionNameCompleted = (destination: Node) => { + return this.invokePausable( + 'on_chunk_extension_name', ERROR.CB_CHUNK_EXTENSION_NAME_COMPLETE, destination); + }; + + const onChunkExtensionValueCompleted = (destination: Node) => { + return this.invokePausable( + 'on_chunk_extension_value', ERROR.CB_CHUNK_EXTENSION_VALUE_COMPLETE, destination); + }; + + n('chunk_extensions') + .match(' ', p.error(ERROR.STRICT, 'Invalid character in chunk extensions')) + .match('\r', p.error(ERROR.STRICT, 'Invalid character in chunk extensions')) + .otherwise(this.span.chunkExtensionName.start(n('chunk_extension_name'))); + + n('chunk_extension_name') + .match(TOKEN, n('chunk_extension_name')) + .peek('=', this.span.chunkExtensionName.end().skipTo( + this.span.chunkExtensionValue.start( + onChunkExtensionNameCompleted(n('chunk_extension_value')), + ), + )) + .peek(';', this.span.chunkExtensionName.end().skipTo( + onChunkExtensionNameCompleted(n('chunk_extensions')), + )) + .peek('\r', this.span.chunkExtensionName.end().skipTo( + onChunkExtensionNameCompleted(n('chunk_size_almost_done')), + )) + .peek('\n', this.span.chunkExtensionName.end( + onChunkExtensionNameCompleted( + checkIfAllowLFWithoutCR( + n('chunk_size_almost_done'), + p.error(ERROR.CR_EXPECTED, 'Missing expected CR after chunk extension name'), + ), + ), + )) + .otherwise(this.span.chunkExtensionName.end().skipTo( + p.error(ERROR.STRICT, 'Invalid character in chunk extensions name'), + )); + + n('chunk_extension_value') + .match('"', n('chunk_extension_quoted_value')) + .match(TOKEN, n('chunk_extension_value')) + .peek(';', this.span.chunkExtensionValue.end().skipTo( + onChunkExtensionValueCompleted(n('chunk_extensions')), + )) + .peek('\r', this.span.chunkExtensionValue.end().skipTo( + onChunkExtensionValueCompleted(n('chunk_size_almost_done')), + )) + .peek('\n', this.span.chunkExtensionValue.end( + onChunkExtensionValueCompleted( + checkIfAllowLFWithoutCR( + n('chunk_size_almost_done'), + p.error(ERROR.CR_EXPECTED, 'Missing expected CR after chunk extension value'), + ), + ), + )) + .otherwise(this.span.chunkExtensionValue.end().skipTo( + p.error(ERROR.STRICT, 'Invalid character in chunk extensions value'), + )); + + n('chunk_extension_quoted_value') + .match(QUOTED_STRING, n('chunk_extension_quoted_value')) + .match('"', this.span.chunkExtensionValue.end( + onChunkExtensionValueCompleted(n('chunk_extension_quoted_value_done')), + )) + .match('\\', n('chunk_extension_quoted_value_quoted_pair')) + .otherwise(this.span.chunkExtensionValue.end().skipTo( + p.error(ERROR.STRICT, 'Invalid character in chunk extensions quoted value'), + )); + + n('chunk_extension_quoted_value_quoted_pair') + .match(HTAB_SP_VCHAR_OBS_TEXT, n('chunk_extension_quoted_value')) + .otherwise(this.span.chunkExtensionValue.end().skipTo( + p.error(ERROR.STRICT, 'Invalid quoted-pair in chunk extensions quoted value'), + )); + + n('chunk_extension_quoted_value_done') + .match(';', n('chunk_extensions')) + .match('\r', n('chunk_size_almost_done')) + .peek( + '\n', + checkIfAllowLFWithoutCR( + n('chunk_size_almost_done'), + p.error(ERROR.CR_EXPECTED, 'Missing expected CR after chunk extension value'), + ), + ) + .otherwise(p.error(ERROR.STRICT, + 'Invalid character in chunk extensions quote value')); + + n('chunk_size_almost_done') + .match('\n', n('chunk_size_almost_done_lf')) + .otherwise( + this.testLenientFlags(LENIENT_FLAGS.OPTIONAL_LF_AFTER_CR, { + 1: n('chunk_size_almost_done_lf'), + }).otherwise(p.error(ERROR.STRICT, 'Expected LF after chunk size')), + ); + + const toChunk = this.isEqual('content_length', 0, { + equal: this.setFlag(FLAGS.TRAILING, 'header_field_start'), + notEqual: 'chunk_data', + }); + + n('chunk_size_almost_done_lf') + .otherwise(this.invokePausable('on_chunk_header', + ERROR.CB_CHUNK_HEADER, toChunk)); + + n('chunk_data') + .otherwise(span.body.start() + .otherwise(p.consume('content_length').otherwise( + span.body.end(n('chunk_data_almost_done'))))); + + n('chunk_data_almost_done') + .match('\r\n', n('chunk_complete')) + .match( + '\n', + checkIfAllowLFWithoutCR( + n('chunk_complete'), + p.error(ERROR.CR_EXPECTED, 'Missing expected CR after chunk data'), + ), + ) + .otherwise( + this.testLenientFlags(LENIENT_FLAGS.OPTIONAL_CRLF_AFTER_CHUNK, { + 1: n('chunk_complete'), + }).otherwise(p.error(ERROR.STRICT, 'Expected LF after chunk data')), + ); + + n('chunk_complete') + .otherwise(this.invokePausable('on_chunk_complete', + ERROR.CB_CHUNK_COMPLETE, 'chunk_size_start')); + + const upgradeAfterDone = this.isEqual('upgrade', 1, { + // Exit, the rest of the message is in a different protocol. + equal: upgradePause, + + // Restart + notEqual: 'cleanup', + }); + + n('message_done') + .otherwise(this.invokePausable('on_message_complete', + ERROR.CB_MESSAGE_COMPLETE, upgradeAfterDone)); + + const lenientClose = this.testLenientFlags(LENIENT_FLAGS.KEEP_ALIVE, { + 1: n('restart'), + }, n('closed')); + + // Check if we'd like to keep-alive + n('cleanup') + .otherwise(p.invoke(callback.afterMessageComplete, { + 1: this.update('content_length', 0, n('restart')), + }, this.update('finish', FINISH.SAFE, lenientClose))); + + const lenientDiscardAfterClose = this.testLenientFlags(LENIENT_FLAGS.DATA_AFTER_CLOSE, { + 1: n('closed'), + }, p.error(ERROR.CLOSED_CONNECTION, 'Data after `Connection: close`')); + + n('closed') + .match([ '\r', '\n' ], n('closed')) + .skipTo(lenientDiscardAfterClose); + + n('restart') + .otherwise( + this.update('initial_message_completed', 1, this.update('finish', FINISH.SAFE, n('start')), + )); + } + + private headersCompleted(): Node { + const p = this.llparse; + const callback = this.callback; + const n = (name: string): Match => this.node(name); + + // Set `upgrade` if needed + const beforeHeadersComplete = p.invoke(callback.beforeHeadersComplete); + + /* Here we call the headers_complete callback. This is somewhat + * different than other callbacks because if the user returns 1, we + * will interpret that as saying that this message has no body. This + * is needed for the annoying case of receiving a response to a HEAD + * request. + * + * We'd like to use CALLBACK_NOTIFY_NOADVANCE() here but we cannot, so + * we have to simulate it by handling a change in errno below. + */ + const onHeadersComplete = p.invoke(callback.onHeadersComplete, { + 0: n('headers_done'), + 1: this.setFlag(FLAGS.SKIPBODY, 'headers_done'), + 2: this.update('upgrade', 1, + this.setFlag(FLAGS.SKIPBODY, 'headers_done')), + [ERROR.PAUSED]: this.pause('Paused by on_headers_complete', + 'headers_done'), + }, p.error(ERROR.CB_HEADERS_COMPLETE, 'User callback error')); + + beforeHeadersComplete.otherwise(onHeadersComplete); + + return beforeHeadersComplete; + } + + private node(name: string | T): T { + if (name instanceof Node) { + return name; + } + + assert(this.nodes.has(name), `Unknown node with name "${name}"`); + return this.nodes.get(name)! as any; + } + + private load(field: string, map: { [key: number]: Node }, + next?: string | Node): Node { + const p = this.llparse; + + const res = p.invoke(p.code.load(field), map); + if (next !== undefined) { + res.otherwise(this.node(next)); + } + return res; + } + + private store(field: string, next?: string | Node): Node { + const p = this.llparse; + + const res = p.invoke(p.code.store(field)); + if (next !== undefined) { + res.otherwise(this.node(next)); + } + return res; + } + + private update(field: string, value: number, next?: string | Node): Node { + const p = this.llparse; + + const res = p.invoke(p.code.update(field, value)); + if (next !== undefined) { + res.otherwise(this.node(next)); + } + return res; + } + + private resetHeaderState(next: string | Node): Node { + return this.update('header_state', HEADER_STATE.GENERAL, next); + } + + private emptySpan(span: source.Span, next: string | Node): Node { + return span.start(span.end(this.node(next))); + } + + private unsetFlag(flag: FLAGS, next: string | Node): Node { + const p = this.llparse; + return p.invoke(p.code.and('flags', ~flag), this.node(next)); + } + + private setFlag(flag: FLAGS, next: string | Node): Node { + const p = this.llparse; + return p.invoke(p.code.or('flags', flag), this.node(next)); + } + + private testFlags(flag: FLAGS, map: { [key: number]: Node }, + next?: string | Node): Node { + const p = this.llparse; + const res = p.invoke(p.code.test('flags', flag), map); + if (next !== undefined) { + res.otherwise(this.node(next)); + } + return res; + } + + private testLenientFlags(flag: LENIENT_FLAGS, map: { [key: number]: Node }, + next?: string | Node): Node { + const p = this.llparse; + const res = p.invoke(p.code.test('lenient_flags', flag), map); + if (next !== undefined) { + res.otherwise(this.node(next)); + } + return res; + } + + private setHeaderFlags(next: string | Node): Node { + const HS = HEADER_STATE; + const F = FLAGS; + + const toConnection = + this.update('header_state', HEADER_STATE.CONNECTION, next); + + return this.load('header_state', { + [HS.CONNECTION_KEEP_ALIVE]: + this.setFlag(F.CONNECTION_KEEP_ALIVE, toConnection), + [HS.CONNECTION_CLOSE]: this.setFlag(F.CONNECTION_CLOSE, toConnection), + [HS.CONNECTION_UPGRADE]: this.setFlag(F.CONNECTION_UPGRADE, toConnection), + [HS.TRANSFER_ENCODING_CHUNKED]: this.setFlag(F.CHUNKED, next), + }, this.node(next)); + } + + private mulAdd(field: string, targets: IMulTargets, + options: IMulOptions = { base: 10, signed: false }): Node { + const p = this.llparse; + + return p.invoke(p.code.mulAdd(field, options), { + 1: this.node(targets.overflow), + }, this.node(targets.success)); + } + + private isEqual(field: string, value: number, map: IIsEqualTargets) { + const p = this.llparse; + return p.invoke(p.code.isEqual(field, value), { + 0: this.node(map.notEqual), + }, this.node(map.equal)); + } + + private pause(msg: string, next?: string | Node) { + const p = this.llparse; + const res = p.pause(ERROR.PAUSED, msg); + if (next !== undefined) { + res.otherwise(this.node(next)); + } + return res; + } + + private invokePausable(name: string, errorCode: ERROR, next: string | Node) + : Node { + let cb; + + switch (name) { + case 'on_message_begin': + cb = this.callback.onMessageBegin; + break; + case 'on_url_complete': + cb = this.callback.onUrlComplete; + break; + case 'on_status_complete': + cb = this.callback.onStatusComplete; + break; + case 'on_method_complete': + cb = this.callback.onMethodComplete; + break; + case 'on_version_complete': + cb = this.callback.onVersionComplete; + break; + case 'on_header_field_complete': + cb = this.callback.onHeaderFieldComplete; + break; + case 'on_header_value_complete': + cb = this.callback.onHeaderValueComplete; + break; + case 'on_message_complete': + cb = this.callback.onMessageComplete; + break; + case 'on_chunk_header': + cb = this.callback.onChunkHeader; + break; + case 'on_chunk_extension_name': + cb = this.callback.onChunkExtensionName; + break; + case 'on_chunk_extension_value': + cb = this.callback.onChunkExtensionValue; + break; + case 'on_chunk_complete': + cb = this.callback.onChunkComplete; + break; + case 'on_reset': + cb = this.callback.onReset; + break; + default: + throw new Error('Unknown callback: ' + name); + } + + const p = this.llparse; + return p.invoke(cb, { + 0: this.node(next), + [ERROR.PAUSED]: this.pause(`${name} pause`, next), + }, p.error(errorCode, `\`${name}\` callback error`)); + } +} diff --git a/llhttp/src/llhttp/url.ts b/llhttp/src/llhttp/url.ts new file mode 100644 index 0000000..c5fced9 --- /dev/null +++ b/llhttp/src/llhttp/url.ts @@ -0,0 +1,220 @@ +import { LLParse, source } from 'llparse'; + +import Match = source.node.Match; +import Node = source.node.Node; + +import { + ALPHA, + CharList, + ERROR, + URL_CHAR, + USERINFO_CHARS, +} from './constants'; + +type SpanName = 'schema' | 'host' | 'path' | 'query' | 'fragment' | 'url'; + +export interface IURLResult { + readonly entry: { + readonly normal: Node; + readonly connect: Node; + }; + readonly exit: { + readonly toHTTP: Node; + readonly toHTTP09: Node; + }; +} + +type SpanTable = Map; + +export class URL { + private readonly spanTable: SpanTable = new Map(); + private readonly errorInvalid: Node; + private readonly URL_CHAR: CharList; + + constructor(private readonly llparse: LLParse, separateSpans: boolean = false) { + const p = this.llparse; + + this.errorInvalid = p.error(ERROR.INVALID_URL, 'Invalid characters in url'); + + this.URL_CHAR = URL_CHAR; + + const table = this.spanTable; + if (separateSpans) { + table.set('schema', p.span(p.code.span('llhttp__on_url_schema'))); + table.set('host', p.span(p.code.span('llhttp__on_url_host'))); + table.set('path', p.span(p.code.span('llhttp__on_url_path'))); + table.set('query', p.span(p.code.span('llhttp__on_url_query'))); + table.set('fragment', + p.span(p.code.span('llhttp__on_url_fragment'))); + } else { + table.set('url', p.span(p.code.span('llhttp__on_url'))); + } + } + + public build(): IURLResult { + const p = this.llparse; + + const entry = { + connect: this.node('entry_connect'), + normal: this.node('entry_normal'), + }; + + const start = this.node('start'); + const path = this.node('path'); + const queryOrFragment = this.node('query_or_fragment'); + const schema = this.node('schema'); + const schemaDelim = this.node('schema_delim'); + const server = this.node('server'); + const queryStart = this.node('query_start'); + const query = this.node('query'); + const fragment = this.node('fragment'); + const serverWithAt = this.node('server_with_at'); + + entry.normal + .otherwise(this.spanStart('url', start)); + + entry.connect + .otherwise(this.spanStart('url', this.spanStart('host', server))); + + start + .peek([ '/', '*' ], this.spanStart('path').skipTo(path)) + .peek(ALPHA, this.spanStart('schema', schema)) + .otherwise(p.error(ERROR.INVALID_URL, 'Unexpected start char in url')); + + schema + .match(ALPHA, schema) + .peek(':', this.spanEnd('schema').skipTo(schemaDelim)) + .otherwise(p.error(ERROR.INVALID_URL, 'Unexpected char in url schema')); + + schemaDelim + .match('//', this.spanStart('host', server)) + .otherwise(p.error(ERROR.INVALID_URL, 'Unexpected char in url schema')); + + for (const node of [server, serverWithAt]) { + node + .peek('/', this.spanEnd('host', this.spanStart('path').skipTo(path))) + .match('?', this.spanEnd('host', this.spanStart('query', query))) + .match(USERINFO_CHARS, server) + .match([ '[', ']' ], server) + .otherwise(p.error(ERROR.INVALID_URL, 'Unexpected char in url server')); + + if (node !== serverWithAt) { + node.match('@', serverWithAt); + } + } + + serverWithAt + .match('@', p.error(ERROR.INVALID_URL, 'Double @ in url')); + + path + .match(this.URL_CHAR, path) + .otherwise(this.spanEnd('path', queryOrFragment)); + + // Performance optimization, split `path` so that the fast case remains + // there + queryOrFragment + .match('?', this.spanStart('query', query)) + .match('#', this.spanStart('fragment', fragment)) + .otherwise(p.error(ERROR.INVALID_URL, 'Invalid char in url path')); + + query + .match(this.URL_CHAR, query) + // Allow extra '?' in query string + .match('?', query) + .peek('#', this.spanEnd('query') + .skipTo(this.spanStart('fragment', fragment))) + .otherwise(p.error(ERROR.INVALID_URL, 'Invalid char in url query')); + + fragment + .match(this.URL_CHAR, fragment) + .match([ '?', '#' ], fragment) + .otherwise( + p.error(ERROR.INVALID_URL, 'Invalid char in url fragment start')); + + for (const node of [ start, schema, schemaDelim ]) { + /* No whitespace allowed here */ + node.match([ ' ', '\r', '\n' ], this.errorInvalid); + } + + // Adaptors + const toHTTP = this.node('to_http'); + const toHTTP09 = this.node('to_http_09'); + + const skipToHTTP = this.node('skip_to_http') + .skipTo(toHTTP); + + const skipToHTTP09 = this.node('skip_to_http09') + .skipTo(toHTTP09); + + const skipCRLF = this.node('skip_lf_to_http09') + .match('\r\n', toHTTP09) + .otherwise(p.error(ERROR.INVALID_URL, 'Expected CRLF')); + + for (const node of [server, serverWithAt, queryOrFragment, queryStart, query, fragment]) { + let spanName: SpanName | undefined; + + if (node === server || node === serverWithAt) { + spanName = 'host'; + } else if (node === queryStart || node === query) { + spanName = 'query'; + } else if (node === fragment) { + spanName = 'fragment'; + } + + const endTo = (target: Node): Node => { + let res: Node = this.spanEnd('url', target); + if (spanName !== undefined) { + res = this.spanEnd(spanName, res); + } + return res; + }; + + node.peek(' ', endTo(skipToHTTP)); + + node.peek('\r', endTo(skipCRLF)); + node.peek('\n', endTo(skipToHTTP09)); + } + + return { + entry, + exit: { + toHTTP, + toHTTP09, + }, + }; + } + + private spanStart(name: SpanName, otherwise?: Node): Node { + let res: Node; + if (this.spanTable.has(name)) { + res = this.spanTable.get(name)!.start(); + } else { + res = this.llparse.node('span_start_stub_' + name); + } + if (otherwise !== undefined) { + res.otherwise(otherwise); + } + return res; + } + + private spanEnd(name: SpanName, otherwise?: Node): Node { + let res: Node; + if (this.spanTable.has(name)) { + res = this.spanTable.get(name)!.end(); + } else { + res = this.llparse.node('span_end_stub_' + name); + } + if (otherwise !== undefined) { + res.otherwise(otherwise); + } + return res; + } + + private node(name: string): Match { + const res = this.llparse.node('url_' + name); + + res.match([ '\t', '\f' ], this.errorInvalid); + + return res; + } +} diff --git a/llhttp/src/llhttp/utils.ts b/llhttp/src/llhttp/utils.ts new file mode 100644 index 0000000..7c01d66 --- /dev/null +++ b/llhttp/src/llhttp/utils.ts @@ -0,0 +1,27 @@ +export interface IEnumMap { + [key: string]: number; +} + +export function enumToMap( + obj: any, + filter?: ReadonlyArray, + exceptions?: ReadonlyArray, +): IEnumMap { + const res: IEnumMap = {}; + + for (const key of Object.keys(obj)) { + const value = obj[key]; + if (typeof value !== 'number') { + continue; + } + if (filter && !filter.includes(value)) { + continue; + } + if (exceptions && exceptions.includes(value)) { + continue; + } + res[key] = value; + } + + return res; +} diff --git a/llhttp/src/native/api.c b/llhttp/src/native/api.c new file mode 100644 index 0000000..8c2ce3d --- /dev/null +++ b/llhttp/src/native/api.c @@ -0,0 +1,510 @@ +#include +#include +#include + +#include "llhttp.h" + +#define CALLBACK_MAYBE(PARSER, NAME) \ + do { \ + const llhttp_settings_t* settings; \ + settings = (const llhttp_settings_t*) (PARSER)->settings; \ + if (settings == NULL || settings->NAME == NULL) { \ + err = 0; \ + break; \ + } \ + err = settings->NAME((PARSER)); \ + } while (0) + +#define SPAN_CALLBACK_MAYBE(PARSER, NAME, START, LEN) \ + do { \ + const llhttp_settings_t* settings; \ + settings = (const llhttp_settings_t*) (PARSER)->settings; \ + if (settings == NULL || settings->NAME == NULL) { \ + err = 0; \ + break; \ + } \ + err = settings->NAME((PARSER), (START), (LEN)); \ + if (err == -1) { \ + err = HPE_USER; \ + llhttp_set_error_reason((PARSER), "Span callback error in " #NAME); \ + } \ + } while (0) + +void llhttp_init(llhttp_t* parser, llhttp_type_t type, + const llhttp_settings_t* settings) { + llhttp__internal_init(parser); + + parser->type = type; + parser->settings = (void*) settings; +} + + +#if defined(__wasm__) + +extern int wasm_on_message_begin(llhttp_t * p); +extern int wasm_on_url(llhttp_t* p, const char* at, size_t length); +extern int wasm_on_status(llhttp_t* p, const char* at, size_t length); +extern int wasm_on_header_field(llhttp_t* p, const char* at, size_t length); +extern int wasm_on_header_value(llhttp_t* p, const char* at, size_t length); +extern int wasm_on_headers_complete(llhttp_t * p, int status_code, + uint8_t upgrade, int should_keep_alive); +extern int wasm_on_body(llhttp_t* p, const char* at, size_t length); +extern int wasm_on_message_complete(llhttp_t * p); + +static int wasm_on_headers_complete_wrap(llhttp_t* p) { + return wasm_on_headers_complete(p, p->status_code, p->upgrade, + llhttp_should_keep_alive(p)); +} + +const llhttp_settings_t wasm_settings = { + wasm_on_message_begin, + wasm_on_url, + wasm_on_status, + NULL, + NULL, + wasm_on_header_field, + wasm_on_header_value, + NULL, + NULL, + wasm_on_headers_complete_wrap, + wasm_on_body, + wasm_on_message_complete, + NULL, + NULL, + NULL, + NULL, + NULL, + NULL, + NULL, + NULL, + NULL, + NULL, + NULL, +}; + + +llhttp_t* llhttp_alloc(llhttp_type_t type) { + llhttp_t* parser = malloc(sizeof(llhttp_t)); + llhttp_init(parser, type, &wasm_settings); + return parser; +} + +void llhttp_free(llhttp_t* parser) { + free(parser); +} + +#endif // defined(__wasm__) + +/* Some getters required to get stuff from the parser */ + +uint8_t llhttp_get_type(llhttp_t* parser) { + return parser->type; +} + +uint8_t llhttp_get_http_major(llhttp_t* parser) { + return parser->http_major; +} + +uint8_t llhttp_get_http_minor(llhttp_t* parser) { + return parser->http_minor; +} + +uint8_t llhttp_get_method(llhttp_t* parser) { + return parser->method; +} + +int llhttp_get_status_code(llhttp_t* parser) { + return parser->status_code; +} + +uint8_t llhttp_get_upgrade(llhttp_t* parser) { + return parser->upgrade; +} + + +void llhttp_reset(llhttp_t* parser) { + llhttp_type_t type = parser->type; + const llhttp_settings_t* settings = parser->settings; + void* data = parser->data; + uint16_t lenient_flags = parser->lenient_flags; + + llhttp__internal_init(parser); + + parser->type = type; + parser->settings = (void*) settings; + parser->data = data; + parser->lenient_flags = lenient_flags; +} + + +llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len) { + return llhttp__internal_execute(parser, data, data + len); +} + + +void llhttp_settings_init(llhttp_settings_t* settings) { + memset(settings, 0, sizeof(*settings)); +} + + +llhttp_errno_t llhttp_finish(llhttp_t* parser) { + int err; + + /* We're in an error state. Don't bother doing anything. */ + if (parser->error != 0) { + return 0; + } + + switch (parser->finish) { + case HTTP_FINISH_SAFE_WITH_CB: + CALLBACK_MAYBE(parser, on_message_complete); + if (err != HPE_OK) return err; + + /* FALLTHROUGH */ + case HTTP_FINISH_SAFE: + return HPE_OK; + case HTTP_FINISH_UNSAFE: + parser->reason = "Invalid EOF state"; + return HPE_INVALID_EOF_STATE; + default: + abort(); + } +} + + +void llhttp_pause(llhttp_t* parser) { + if (parser->error != HPE_OK) { + return; + } + + parser->error = HPE_PAUSED; + parser->reason = "Paused"; +} + + +void llhttp_resume(llhttp_t* parser) { + if (parser->error != HPE_PAUSED) { + return; + } + + parser->error = 0; +} + + +void llhttp_resume_after_upgrade(llhttp_t* parser) { + if (parser->error != HPE_PAUSED_UPGRADE) { + return; + } + + parser->error = 0; +} + + +llhttp_errno_t llhttp_get_errno(const llhttp_t* parser) { + return parser->error; +} + + +const char* llhttp_get_error_reason(const llhttp_t* parser) { + return parser->reason; +} + + +void llhttp_set_error_reason(llhttp_t* parser, const char* reason) { + parser->reason = reason; +} + + +const char* llhttp_get_error_pos(const llhttp_t* parser) { + return parser->error_pos; +} + + +const char* llhttp_errno_name(llhttp_errno_t err) { +#define HTTP_ERRNO_GEN(CODE, NAME, _) case HPE_##NAME: return "HPE_" #NAME; + switch (err) { + HTTP_ERRNO_MAP(HTTP_ERRNO_GEN) + default: abort(); + } +#undef HTTP_ERRNO_GEN +} + + +const char* llhttp_method_name(llhttp_method_t method) { +#define HTTP_METHOD_GEN(NUM, NAME, STRING) case HTTP_##NAME: return #STRING; + switch (method) { + HTTP_ALL_METHOD_MAP(HTTP_METHOD_GEN) + default: abort(); + } +#undef HTTP_METHOD_GEN +} + +const char* llhttp_status_name(llhttp_status_t status) { +#define HTTP_STATUS_GEN(NUM, NAME, STRING) case HTTP_STATUS_##NAME: return #STRING; + switch (status) { + HTTP_STATUS_MAP(HTTP_STATUS_GEN) + default: abort(); + } +#undef HTTP_STATUS_GEN +} + + +void llhttp_set_lenient_headers(llhttp_t* parser, int enabled) { + if (enabled) { + parser->lenient_flags |= LENIENT_HEADERS; + } else { + parser->lenient_flags &= ~LENIENT_HEADERS; + } +} + + +void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled) { + if (enabled) { + parser->lenient_flags |= LENIENT_CHUNKED_LENGTH; + } else { + parser->lenient_flags &= ~LENIENT_CHUNKED_LENGTH; + } +} + + +void llhttp_set_lenient_keep_alive(llhttp_t* parser, int enabled) { + if (enabled) { + parser->lenient_flags |= LENIENT_KEEP_ALIVE; + } else { + parser->lenient_flags &= ~LENIENT_KEEP_ALIVE; + } +} + +void llhttp_set_lenient_transfer_encoding(llhttp_t* parser, int enabled) { + if (enabled) { + parser->lenient_flags |= LENIENT_TRANSFER_ENCODING; + } else { + parser->lenient_flags &= ~LENIENT_TRANSFER_ENCODING; + } +} + +void llhttp_set_lenient_version(llhttp_t* parser, int enabled) { + if (enabled) { + parser->lenient_flags |= LENIENT_VERSION; + } else { + parser->lenient_flags &= ~LENIENT_VERSION; + } +} + +void llhttp_set_lenient_data_after_close(llhttp_t* parser, int enabled) { + if (enabled) { + parser->lenient_flags |= LENIENT_DATA_AFTER_CLOSE; + } else { + parser->lenient_flags &= ~LENIENT_DATA_AFTER_CLOSE; + } +} + +void llhttp_set_lenient_optional_lf_after_cr(llhttp_t* parser, int enabled) { + if (enabled) { + parser->lenient_flags |= LENIENT_OPTIONAL_LF_AFTER_CR; + } else { + parser->lenient_flags &= ~LENIENT_OPTIONAL_LF_AFTER_CR; + } +} + +void llhttp_set_lenient_optional_crlf_after_chunk(llhttp_t* parser, int enabled) { + if (enabled) { + parser->lenient_flags |= LENIENT_OPTIONAL_CRLF_AFTER_CHUNK; + } else { + parser->lenient_flags &= ~LENIENT_OPTIONAL_CRLF_AFTER_CHUNK; + } +} + +void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled) { + if (enabled) { + parser->lenient_flags |= LENIENT_OPTIONAL_CR_BEFORE_LF; + } else { + parser->lenient_flags &= ~LENIENT_OPTIONAL_CR_BEFORE_LF; + } +} + +void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled) { + if (enabled) { + parser->lenient_flags |= LENIENT_SPACES_AFTER_CHUNK_SIZE; + } else { + parser->lenient_flags &= ~LENIENT_SPACES_AFTER_CHUNK_SIZE; + } +} + +/* Callbacks */ + + +int llhttp__on_message_begin(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_message_begin); + return err; +} + + +int llhttp__on_url(llhttp_t* s, const char* p, const char* endp) { + int err; + SPAN_CALLBACK_MAYBE(s, on_url, p, endp - p); + return err; +} + + +int llhttp__on_url_complete(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_url_complete); + return err; +} + + +int llhttp__on_status(llhttp_t* s, const char* p, const char* endp) { + int err; + SPAN_CALLBACK_MAYBE(s, on_status, p, endp - p); + return err; +} + + +int llhttp__on_status_complete(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_status_complete); + return err; +} + + +int llhttp__on_method(llhttp_t* s, const char* p, const char* endp) { + int err; + SPAN_CALLBACK_MAYBE(s, on_method, p, endp - p); + return err; +} + + +int llhttp__on_method_complete(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_method_complete); + return err; +} + + +int llhttp__on_version(llhttp_t* s, const char* p, const char* endp) { + int err; + SPAN_CALLBACK_MAYBE(s, on_version, p, endp - p); + return err; +} + + +int llhttp__on_version_complete(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_version_complete); + return err; +} + + +int llhttp__on_header_field(llhttp_t* s, const char* p, const char* endp) { + int err; + SPAN_CALLBACK_MAYBE(s, on_header_field, p, endp - p); + return err; +} + + +int llhttp__on_header_field_complete(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_header_field_complete); + return err; +} + + +int llhttp__on_header_value(llhttp_t* s, const char* p, const char* endp) { + int err; + SPAN_CALLBACK_MAYBE(s, on_header_value, p, endp - p); + return err; +} + + +int llhttp__on_header_value_complete(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_header_value_complete); + return err; +} + + +int llhttp__on_headers_complete(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_headers_complete); + return err; +} + + +int llhttp__on_message_complete(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_message_complete); + return err; +} + + +int llhttp__on_body(llhttp_t* s, const char* p, const char* endp) { + int err; + SPAN_CALLBACK_MAYBE(s, on_body, p, endp - p); + return err; +} + + +int llhttp__on_chunk_header(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_chunk_header); + return err; +} + + +int llhttp__on_chunk_extension_name(llhttp_t* s, const char* p, const char* endp) { + int err; + SPAN_CALLBACK_MAYBE(s, on_chunk_extension_name, p, endp - p); + return err; +} + + +int llhttp__on_chunk_extension_name_complete(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_chunk_extension_name_complete); + return err; +} + + +int llhttp__on_chunk_extension_value(llhttp_t* s, const char* p, const char* endp) { + int err; + SPAN_CALLBACK_MAYBE(s, on_chunk_extension_value, p, endp - p); + return err; +} + + +int llhttp__on_chunk_extension_value_complete(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_chunk_extension_value_complete); + return err; +} + + +int llhttp__on_chunk_complete(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_chunk_complete); + return err; +} + + +int llhttp__on_reset(llhttp_t* s, const char* p, const char* endp) { + int err; + CALLBACK_MAYBE(s, on_reset); + return err; +} + + +/* Private */ + + +void llhttp__debug(llhttp_t* s, const char* p, const char* endp, + const char* msg) { + if (p == endp) { + fprintf(stderr, "p=%p type=%d flags=%02x next=null debug=%s\n", s, s->type, + s->flags, msg); + } else { + fprintf(stderr, "p=%p type=%d flags=%02x next=%02x debug=%s\n", s, + s->type, s->flags, *p, msg); + } +} diff --git a/llhttp/src/native/api.h b/llhttp/src/native/api.h new file mode 100644 index 0000000..321879c --- /dev/null +++ b/llhttp/src/native/api.h @@ -0,0 +1,355 @@ +#ifndef INCLUDE_LLHTTP_API_H_ +#define INCLUDE_LLHTTP_API_H_ +#ifdef __cplusplus +extern "C" { +#endif +#include + +#if defined(__wasm__) +#define LLHTTP_EXPORT __attribute__((visibility("default"))) +#else +#define LLHTTP_EXPORT +#endif + +typedef llhttp__internal_t llhttp_t; +typedef struct llhttp_settings_s llhttp_settings_t; + +typedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length); +typedef int (*llhttp_cb)(llhttp_t*); + +struct llhttp_settings_s { + /* Possible return values 0, -1, `HPE_PAUSED` */ + llhttp_cb on_message_begin; + + /* Possible return values 0, -1, HPE_USER */ + llhttp_data_cb on_url; + llhttp_data_cb on_status; + llhttp_data_cb on_method; + llhttp_data_cb on_version; + llhttp_data_cb on_header_field; + llhttp_data_cb on_header_value; + llhttp_data_cb on_chunk_extension_name; + llhttp_data_cb on_chunk_extension_value; + + /* Possible return values: + * 0 - Proceed normally + * 1 - Assume that request/response has no body, and proceed to parsing the + * next message + * 2 - Assume absence of body (as above) and make `llhttp_execute()` return + * `HPE_PAUSED_UPGRADE` + * -1 - Error + * `HPE_PAUSED` + */ + llhttp_cb on_headers_complete; + + /* Possible return values 0, -1, HPE_USER */ + llhttp_data_cb on_body; + + /* Possible return values 0, -1, `HPE_PAUSED` */ + llhttp_cb on_message_complete; + llhttp_cb on_url_complete; + llhttp_cb on_status_complete; + llhttp_cb on_method_complete; + llhttp_cb on_version_complete; + llhttp_cb on_header_field_complete; + llhttp_cb on_header_value_complete; + llhttp_cb on_chunk_extension_name_complete; + llhttp_cb on_chunk_extension_value_complete; + + /* When on_chunk_header is called, the current chunk length is stored + * in parser->content_length. + * Possible return values 0, -1, `HPE_PAUSED` + */ + llhttp_cb on_chunk_header; + llhttp_cb on_chunk_complete; + llhttp_cb on_reset; +}; + +/* Initialize the parser with specific type and user settings. + * + * NOTE: lifetime of `settings` has to be at least the same as the lifetime of + * the `parser` here. In practice, `settings` has to be either a static + * variable or be allocated with `malloc`, `new`, etc. + */ +LLHTTP_EXPORT +void llhttp_init(llhttp_t* parser, llhttp_type_t type, + const llhttp_settings_t* settings); + +LLHTTP_EXPORT +llhttp_t* llhttp_alloc(llhttp_type_t type); + +LLHTTP_EXPORT +void llhttp_free(llhttp_t* parser); + +LLHTTP_EXPORT +uint8_t llhttp_get_type(llhttp_t* parser); + +LLHTTP_EXPORT +uint8_t llhttp_get_http_major(llhttp_t* parser); + +LLHTTP_EXPORT +uint8_t llhttp_get_http_minor(llhttp_t* parser); + +LLHTTP_EXPORT +uint8_t llhttp_get_method(llhttp_t* parser); + +LLHTTP_EXPORT +int llhttp_get_status_code(llhttp_t* parser); + +LLHTTP_EXPORT +uint8_t llhttp_get_upgrade(llhttp_t* parser); + +/* Reset an already initialized parser back to the start state, preserving the + * existing parser type, callback settings, user data, and lenient flags. + */ +LLHTTP_EXPORT +void llhttp_reset(llhttp_t* parser); + +/* Initialize the settings object */ +LLHTTP_EXPORT +void llhttp_settings_init(llhttp_settings_t* settings); + +/* Parse full or partial request/response, invoking user callbacks along the + * way. + * + * If any of `llhttp_data_cb` returns errno not equal to `HPE_OK` - the parsing + * interrupts, and such errno is returned from `llhttp_execute()`. If + * `HPE_PAUSED` was used as a errno, the execution can be resumed with + * `llhttp_resume()` call. + * + * In a special case of CONNECT/Upgrade request/response `HPE_PAUSED_UPGRADE` + * is returned after fully parsing the request/response. If the user wishes to + * continue parsing, they need to invoke `llhttp_resume_after_upgrade()`. + * + * NOTE: if this function ever returns a non-pause type error, it will continue + * to return the same error upon each successive call up until `llhttp_init()` + * is called. + */ +LLHTTP_EXPORT +llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len); + +/* This method should be called when the other side has no further bytes to + * send (e.g. shutdown of readable side of the TCP connection.) + * + * Requests without `Content-Length` and other messages might require treating + * all incoming bytes as the part of the body, up to the last byte of the + * connection. This method will invoke `on_message_complete()` callback if the + * request was terminated safely. Otherwise a error code would be returned. + */ +LLHTTP_EXPORT +llhttp_errno_t llhttp_finish(llhttp_t* parser); + +/* Returns `1` if the incoming message is parsed until the last byte, and has + * to be completed by calling `llhttp_finish()` on EOF + */ +LLHTTP_EXPORT +int llhttp_message_needs_eof(const llhttp_t* parser); + +/* Returns `1` if there might be any other messages following the last that was + * successfully parsed. + */ +LLHTTP_EXPORT +int llhttp_should_keep_alive(const llhttp_t* parser); + +/* Make further calls of `llhttp_execute()` return `HPE_PAUSED` and set + * appropriate error reason. + * + * Important: do not call this from user callbacks! User callbacks must return + * `HPE_PAUSED` if pausing is required. + */ +LLHTTP_EXPORT +void llhttp_pause(llhttp_t* parser); + +/* Might be called to resume the execution after the pause in user's callback. + * See `llhttp_execute()` above for details. + * + * Call this only if `llhttp_execute()` returns `HPE_PAUSED`. + */ +LLHTTP_EXPORT +void llhttp_resume(llhttp_t* parser); + +/* Might be called to resume the execution after the pause in user's callback. + * See `llhttp_execute()` above for details. + * + * Call this only if `llhttp_execute()` returns `HPE_PAUSED_UPGRADE` + */ +LLHTTP_EXPORT +void llhttp_resume_after_upgrade(llhttp_t* parser); + +/* Returns the latest return error */ +LLHTTP_EXPORT +llhttp_errno_t llhttp_get_errno(const llhttp_t* parser); + +/* Returns the verbal explanation of the latest returned error. + * + * Note: User callback should set error reason when returning the error. See + * `llhttp_set_error_reason()` for details. + */ +LLHTTP_EXPORT +const char* llhttp_get_error_reason(const llhttp_t* parser); + +/* Assign verbal description to the returned error. Must be called in user + * callbacks right before returning the errno. + * + * Note: `HPE_USER` error code might be useful in user callbacks. + */ +LLHTTP_EXPORT +void llhttp_set_error_reason(llhttp_t* parser, const char* reason); + +/* Returns the pointer to the last parsed byte before the returned error. The + * pointer is relative to the `data` argument of `llhttp_execute()`. + * + * Note: this method might be useful for counting the number of parsed bytes. + */ +LLHTTP_EXPORT +const char* llhttp_get_error_pos(const llhttp_t* parser); + +/* Returns textual name of error code */ +LLHTTP_EXPORT +const char* llhttp_errno_name(llhttp_errno_t err); + +/* Returns textual name of HTTP method */ +LLHTTP_EXPORT +const char* llhttp_method_name(llhttp_method_t method); + +/* Returns textual name of HTTP status */ +LLHTTP_EXPORT +const char* llhttp_status_name(llhttp_status_t status); + +/* Enables/disables lenient header value parsing (disabled by default). + * + * Lenient parsing disables header value token checks, extending llhttp's + * protocol support to highly non-compliant clients/server. No + * `HPE_INVALID_HEADER_TOKEN` will be raised for incorrect header values when + * lenient parsing is "on". + * + * **Enabling this flag can pose a security issue since you will be exposed to + * request smuggling attacks. USE WITH CAUTION!** + */ +LLHTTP_EXPORT +void llhttp_set_lenient_headers(llhttp_t* parser, int enabled); + + +/* Enables/disables lenient handling of conflicting `Transfer-Encoding` and + * `Content-Length` headers (disabled by default). + * + * Normally `llhttp` would error when `Transfer-Encoding` is present in + * conjunction with `Content-Length`. This error is important to prevent HTTP + * request smuggling, but may be less desirable for small number of cases + * involving legacy servers. + * + * **Enabling this flag can pose a security issue since you will be exposed to + * request smuggling attacks. USE WITH CAUTION!** + */ +LLHTTP_EXPORT +void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled); + + +/* Enables/disables lenient handling of `Connection: close` and HTTP/1.0 + * requests responses. + * + * Normally `llhttp` would error on (in strict mode) or discard (in loose mode) + * the HTTP request/response after the request/response with `Connection: close` + * and `Content-Length`. This is important to prevent cache poisoning attacks, + * but might interact badly with outdated and insecure clients. With this flag + * the extra request/response will be parsed normally. + * + * **Enabling this flag can pose a security issue since you will be exposed to + * poisoning attacks. USE WITH CAUTION!** + */ +LLHTTP_EXPORT +void llhttp_set_lenient_keep_alive(llhttp_t* parser, int enabled); + +/* Enables/disables lenient handling of `Transfer-Encoding` header. + * + * Normally `llhttp` would error when a `Transfer-Encoding` has `chunked` value + * and another value after it (either in a single header or in multiple + * headers whose value are internally joined using `, `). + * This is mandated by the spec to reliably determine request body size and thus + * avoid request smuggling. + * With this flag the extra value will be parsed normally. + * + * **Enabling this flag can pose a security issue since you will be exposed to + * request smuggling attacks. USE WITH CAUTION!** + */ +LLHTTP_EXPORT +void llhttp_set_lenient_transfer_encoding(llhttp_t* parser, int enabled); + +/* Enables/disables lenient handling of HTTP version. + * + * Normally `llhttp` would error when the HTTP version in the request or status line + * is not `0.9`, `1.0`, `1.1` or `2.0`. + * With this flag the invalid value will be parsed normally. + * + * **Enabling this flag can pose a security issue since you will allow unsupported + * HTTP versions. USE WITH CAUTION!** + */ +LLHTTP_EXPORT +void llhttp_set_lenient_version(llhttp_t* parser, int enabled); + +/* Enables/disables lenient handling of additional data received after a message ends + * and keep-alive is disabled. + * + * Normally `llhttp` would error when additional unexpected data is received if the message + * contains the `Connection` header with `close` value. + * With this flag the extra data will discarded without throwing an error. + * + * **Enabling this flag can pose a security issue since you will be exposed to + * poisoning attacks. USE WITH CAUTION!** + */ +LLHTTP_EXPORT +void llhttp_set_lenient_data_after_close(llhttp_t* parser, int enabled); + +/* Enables/disables lenient handling of incomplete CRLF sequences. + * + * Normally `llhttp` would error when a CR is not followed by LF when terminating the + * request line, the status line, the headers or a chunk header. + * With this flag only a CR is required to terminate such sections. + * + * **Enabling this flag can pose a security issue since you will be exposed to + * request smuggling attacks. USE WITH CAUTION!** + */ +LLHTTP_EXPORT +void llhttp_set_lenient_optional_lf_after_cr(llhttp_t* parser, int enabled); + +/* + * Enables/disables lenient handling of line separators. + * + * Normally `llhttp` would error when a LF is not preceded by CR when terminating the + * request line, the status line, the headers, a chunk header or a chunk data. + * With this flag only a LF is required to terminate such sections. + * + * **Enabling this flag can pose a security issue since you will be exposed to + * request smuggling attacks. USE WITH CAUTION!** + */ +LLHTTP_EXPORT +void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled); + +/* Enables/disables lenient handling of chunks not separated via CRLF. + * + * Normally `llhttp` would error when after a chunk data a CRLF is missing before + * starting a new chunk. + * With this flag the new chunk can start immediately after the previous one. + * + * **Enabling this flag can pose a security issue since you will be exposed to + * request smuggling attacks. USE WITH CAUTION!** + */ +LLHTTP_EXPORT +void llhttp_set_lenient_optional_crlf_after_chunk(llhttp_t* parser, int enabled); + +/* Enables/disables lenient handling of spaces after chunk size. + * + * Normally `llhttp` would error when after a chunk size is followed by one or more + * spaces are present instead of a CRLF or `;`. + * With this flag this check is disabled. + * + * **Enabling this flag can pose a security issue since you will be exposed to + * request smuggling attacks. USE WITH CAUTION!** + */ +LLHTTP_EXPORT +void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled); + +#ifdef __cplusplus +} /* extern "C" */ +#endif +#endif /* INCLUDE_LLHTTP_API_H_ */ diff --git a/llhttp/src/native/http.c b/llhttp/src/native/http.c new file mode 100644 index 0000000..1ab91a5 --- /dev/null +++ b/llhttp/src/native/http.c @@ -0,0 +1,170 @@ +#include +#ifndef LLHTTP__TEST +# include "llhttp.h" +#else +# define llhttp_t llparse_t +#endif /* */ + +int llhttp_message_needs_eof(const llhttp_t* parser); +int llhttp_should_keep_alive(const llhttp_t* parser); + +int llhttp__before_headers_complete(llhttp_t* parser, const char* p, + const char* endp) { + /* Set this here so that on_headers_complete() callbacks can see it */ + if ((parser->flags & F_UPGRADE) && + (parser->flags & F_CONNECTION_UPGRADE)) { + /* For responses, "Upgrade: foo" and "Connection: upgrade" are + * mandatory only when it is a 101 Switching Protocols response, + * otherwise it is purely informational, to announce support. + */ + parser->upgrade = + (parser->type == HTTP_REQUEST || parser->status_code == 101); + } else { + parser->upgrade = (parser->method == HTTP_CONNECT); + } + return 0; +} + + +/* Return values: + * 0 - No body, `restart`, message_complete + * 1 - CONNECT request, `restart`, message_complete, and pause + * 2 - chunk_size_start + * 3 - body_identity + * 4 - body_identity_eof + * 5 - invalid transfer-encoding for request + */ +int llhttp__after_headers_complete(llhttp_t* parser, const char* p, + const char* endp) { + int hasBody; + + hasBody = parser->flags & F_CHUNKED || parser->content_length > 0; + if ( + (parser->upgrade && (parser->method == HTTP_CONNECT || + (parser->flags & F_SKIPBODY) || !hasBody)) || + /* See RFC 2616 section 4.4 - 1xx e.g. Continue */ + (parser->type == HTTP_RESPONSE && parser->status_code == 101) + ) { + /* Exit, the rest of the message is in a different protocol. */ + return 1; + } + + if (parser->type == HTTP_RESPONSE && parser->status_code == 100) { + /* No body, restart as the message is complete */ + return 0; + } + + /* See RFC 2616 section 4.4 */ + if ( + parser->flags & F_SKIPBODY || /* response to a HEAD request */ + ( + parser->type == HTTP_RESPONSE && ( + parser->status_code == 102 || /* Processing */ + parser->status_code == 103 || /* Early Hints */ + parser->status_code == 204 || /* No Content */ + parser->status_code == 304 /* Not Modified */ + ) + ) + ) { + return 0; + } else if (parser->flags & F_CHUNKED) { + /* chunked encoding - ignore Content-Length header, prepare for a chunk */ + return 2; + } else if (parser->flags & F_TRANSFER_ENCODING) { + if (parser->type == HTTP_REQUEST && + (parser->lenient_flags & LENIENT_CHUNKED_LENGTH) == 0 && + (parser->lenient_flags & LENIENT_TRANSFER_ENCODING) == 0) { + /* RFC 7230 3.3.3 */ + + /* If a Transfer-Encoding header field + * is present in a request and the chunked transfer coding is not + * the final encoding, the message body length cannot be determined + * reliably; the server MUST respond with the 400 (Bad Request) + * status code and then close the connection. + */ + return 5; + } else { + /* RFC 7230 3.3.3 */ + + /* If a Transfer-Encoding header field is present in a response and + * the chunked transfer coding is not the final encoding, the + * message body length is determined by reading the connection until + * it is closed by the server. + */ + return 4; + } + } else { + if (!(parser->flags & F_CONTENT_LENGTH)) { + if (!llhttp_message_needs_eof(parser)) { + /* Assume content-length 0 - read the next */ + return 0; + } else { + /* Read body until EOF */ + return 4; + } + } else if (parser->content_length == 0) { + /* Content-Length header given but zero: Content-Length: 0\r\n */ + return 0; + } else { + /* Content-Length header given and non-zero */ + return 3; + } + } +} + + +int llhttp__after_message_complete(llhttp_t* parser, const char* p, + const char* endp) { + int should_keep_alive; + + should_keep_alive = llhttp_should_keep_alive(parser); + parser->finish = HTTP_FINISH_SAFE; + parser->flags = 0; + + /* NOTE: this is ignored in loose parsing mode */ + return should_keep_alive; +} + + +int llhttp_message_needs_eof(const llhttp_t* parser) { + if (parser->type == HTTP_REQUEST) { + return 0; + } + + /* See RFC 2616 section 4.4 */ + if (parser->status_code / 100 == 1 || /* 1xx e.g. Continue */ + parser->status_code == 204 || /* No Content */ + parser->status_code == 304 || /* Not Modified */ + (parser->flags & F_SKIPBODY)) { /* response to a HEAD request */ + return 0; + } + + /* RFC 7230 3.3.3, see `llhttp__after_headers_complete` */ + if ((parser->flags & F_TRANSFER_ENCODING) && + (parser->flags & F_CHUNKED) == 0) { + return 1; + } + + if (parser->flags & (F_CHUNKED | F_CONTENT_LENGTH)) { + return 0; + } + + return 1; +} + + +int llhttp_should_keep_alive(const llhttp_t* parser) { + if (parser->http_major > 0 && parser->http_minor > 0) { + /* HTTP/1.1 */ + if (parser->flags & F_CONNECTION_CLOSE) { + return 0; + } + } else { + /* HTTP/1.0 or earlier */ + if (!(parser->flags & F_CONNECTION_KEEP_ALIVE)) { + return 0; + } + } + + return !llhttp_message_needs_eof(parser); +} diff --git a/llhttp/test/fixtures/extra.c b/llhttp/test/fixtures/extra.c new file mode 100644 index 0000000..dadf8dc --- /dev/null +++ b/llhttp/test/fixtures/extra.c @@ -0,0 +1,457 @@ +#include + +#include "fixture.h" + +int llhttp__on_url(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("url", p, endp); +} + + +int llhttp__on_url_complete(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "url complete"); + + #ifdef LLHTTP__TEST_PAUSE_ON_URL_COMPLETE + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + + +int llhttp__on_url_schema(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("url.schema", p, endp); +} + + +int llhttp__on_url_host(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("url.host", p, endp); +} + + +int llhttp__on_url_path(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("url.path", p, endp); +} + + +int llhttp__on_url_query(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("url.query", p, endp); +} + + +int llhttp__on_url_fragment(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("url.fragment", p, endp); +} + + +#ifdef LLHTTP__TEST_HTTP + +void llhttp__test_init_request(llparse_t* s) { + s->type = HTTP_REQUEST; +} + + +void llhttp__test_init_response(llparse_t* s) { + s->type = HTTP_RESPONSE; +} + + +void llhttp__test_init_request_lenient_all(llparse_t* s) { + llhttp__test_init_request(s); + s->lenient_flags |= + LENIENT_HEADERS | LENIENT_CHUNKED_LENGTH | LENIENT_KEEP_ALIVE | + LENIENT_TRANSFER_ENCODING | LENIENT_VERSION | LENIENT_DATA_AFTER_CLOSE | + LENIENT_OPTIONAL_LF_AFTER_CR | LENIENT_OPTIONAL_CR_BEFORE_LF | + LENIENT_OPTIONAL_CRLF_AFTER_CHUNK; +} + + +void llhttp__test_init_response_lenient_all(llparse_t* s) { + llhttp__test_init_response(s); + s->lenient_flags |= + LENIENT_HEADERS | LENIENT_CHUNKED_LENGTH | LENIENT_KEEP_ALIVE | + LENIENT_TRANSFER_ENCODING | LENIENT_VERSION | LENIENT_DATA_AFTER_CLOSE | + LENIENT_OPTIONAL_LF_AFTER_CR | LENIENT_OPTIONAL_CR_BEFORE_LF | + LENIENT_OPTIONAL_CRLF_AFTER_CHUNK; +} + + +void llhttp__test_init_request_lenient_headers(llparse_t* s) { + llhttp__test_init_request(s); + s->lenient_flags |= LENIENT_HEADERS; +} + + +void llhttp__test_init_request_lenient_chunked_length(llparse_t* s) { + llhttp__test_init_request(s); + s->lenient_flags |= LENIENT_CHUNKED_LENGTH; +} + + +void llhttp__test_init_request_lenient_keep_alive(llparse_t* s) { + llhttp__test_init_request(s); + s->lenient_flags |= LENIENT_KEEP_ALIVE; +} + +void llhttp__test_init_request_lenient_transfer_encoding(llparse_t* s) { + llhttp__test_init_request(s); + s->lenient_flags |= LENIENT_TRANSFER_ENCODING; +} + + +void llhttp__test_init_request_lenient_version(llparse_t* s) { + llhttp__test_init_request(s); + s->lenient_flags |= LENIENT_VERSION; +} + + +void llhttp__test_init_response_lenient_keep_alive(llparse_t* s) { + llhttp__test_init_response(s); + s->lenient_flags |= LENIENT_KEEP_ALIVE; +} + +void llhttp__test_init_response_lenient_version(llparse_t* s) { + llhttp__test_init_response(s); + s->lenient_flags |= LENIENT_VERSION; +} + + +void llhttp__test_init_response_lenient_headers(llparse_t* s) { + llhttp__test_init_response(s); + s->lenient_flags |= LENIENT_HEADERS; +} + +void llhttp__test_init_request_lenient_data_after_close(llparse_t* s) { + llhttp__test_init_request(s); + s->lenient_flags |= LENIENT_DATA_AFTER_CLOSE; +} + +void llhttp__test_init_response_lenient_data_after_close(llparse_t* s) { + llhttp__test_init_response(s); + s->lenient_flags |= LENIENT_DATA_AFTER_CLOSE; +} + +void llhttp__test_init_request_lenient_optional_lf_after_cr(llparse_t* s) { + llhttp__test_init_request(s); + s->lenient_flags |= LENIENT_OPTIONAL_LF_AFTER_CR; +} + +void llhttp__test_init_response_lenient_optional_lf_after_cr(llparse_t* s) { + llhttp__test_init_response(s); + s->lenient_flags |= LENIENT_OPTIONAL_LF_AFTER_CR; +} + +void llhttp__test_init_request_lenient_optional_cr_before_lf(llparse_t* s) { + llhttp__test_init_request(s); + s->lenient_flags |= LENIENT_OPTIONAL_CR_BEFORE_LF; +} + +void llhttp__test_init_response_lenient_optional_cr_before_lf(llparse_t* s) { + llhttp__test_init_response(s); + s->lenient_flags |= LENIENT_OPTIONAL_CR_BEFORE_LF; +} + +void llhttp__test_init_request_lenient_optional_crlf_after_chunk(llparse_t* s) { + llhttp__test_init_request(s); + s->lenient_flags |= LENIENT_OPTIONAL_CRLF_AFTER_CHUNK; +} + +void llhttp__test_init_response_lenient_optional_crlf_after_chunk(llparse_t* s) { + llhttp__test_init_response(s); + s->lenient_flags |= LENIENT_OPTIONAL_CRLF_AFTER_CHUNK; +} + +void llhttp__test_init_request_lenient_spaces_after_chunk_size(llparse_t* s) { + llhttp__test_init_request(s); + s->lenient_flags |= LENIENT_SPACES_AFTER_CHUNK_SIZE; +} + +void llhttp__test_init_response_lenient_spaces_after_chunk_size(llparse_t* s) { + llhttp__test_init_response(s); + s->lenient_flags |= LENIENT_SPACES_AFTER_CHUNK_SIZE; +} + + +void llhttp__test_finish(llparse_t* s) { + llparse__print(NULL, NULL, "finish=%d", s->finish); +} + + +int llhttp__on_message_begin(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "message begin"); + + #ifdef LLHTTP__TEST_PAUSE_ON_MESSAGE_BEGIN + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + + +int llhttp__on_message_complete(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "message complete"); + + #ifdef LLHTTP__TEST_PAUSE_ON_MESSAGE_COMPLETE + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + + +int llhttp__on_status(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("status", p, endp); +} + + +int llhttp__on_status_complete(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "status complete"); + + #ifdef LLHTTP__TEST_PAUSE_ON_STATUS_COMPLETE + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + + +int llhttp__on_method(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench || s->type != HTTP_REQUEST) + return 0; + + return llparse__print_span("method", p, endp); +} + + +int llhttp__on_method_complete(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "method complete"); + + #ifdef LLHTTP__TEST_PAUSE_ON_METHOD_COMPLETE + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + + +int llhttp__on_version(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("version", p, endp); +} + + +int llhttp__on_version_complete(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "version complete"); + + #ifdef LLHTTP__TEST_PAUSE_ON_VERSION_COMPLETE + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + +int llhttp__on_header_field(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("header_field", p, endp); +} + + +int llhttp__on_header_field_complete(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "header_field complete"); + + #ifdef LLHTTP__TEST_PAUSE_ON_HEADER_FIELD_COMPLETE + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + + +int llhttp__on_header_value(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("header_value", p, endp); +} + + +int llhttp__on_header_value_complete(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "header_value complete"); + + #ifdef LLHTTP__TEST_PAUSE_ON_HEADER_VALUE_COMPLETE + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + + +int llhttp__on_headers_complete(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + if (s->type == HTTP_REQUEST) { + llparse__print(p, endp, + "headers complete method=%d v=%d/%d flags=%x content_length=%llu", + s->method, s->http_major, s->http_minor, s->flags, s->content_length); + } else if (s->type == HTTP_RESPONSE) { + llparse__print(p, endp, + "headers complete status=%d v=%d/%d flags=%x content_length=%llu", + s->status_code, s->http_major, s->http_minor, s->flags, + s->content_length); + } else { + llparse__print(p, endp, "invalid headers complete"); + } + + #ifdef LLHTTP__TEST_PAUSE_ON_HEADERS_COMPLETE + return LLPARSE__ERROR_PAUSE; + #elif defined(LLHTTP__TEST_SKIP_BODY) + llparse__print(p, endp, "skip body"); + return 1; + #else + return 0; + #endif +} + + +int llhttp__on_body(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("body", p, endp); +} + + +int llhttp__on_chunk_header(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "chunk header len=%d", (int) s->content_length); + + #ifdef LLHTTP__TEST_PAUSE_ON_CHUNK_HEADER + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + + +int llhttp__on_chunk_extension_name(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("chunk_extension_name", p, endp); +} + + +int llhttp__on_chunk_extension_name_complete(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "chunk_extension_name complete"); + + #ifdef LLHTTP__TEST_PAUSE_ON_CHUNK_EXTENSION_NAME + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + + +int llhttp__on_chunk_extension_value(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + return llparse__print_span("chunk_extension_value", p, endp); +} + + +int llhttp__on_chunk_extension_value_complete(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "chunk_extension_value complete"); + + #ifdef LLHTTP__TEST_PAUSE_ON_CHUNK_EXTENSION_VALUE + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + + +int llhttp__on_chunk_complete(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "chunk complete"); + + #ifdef LLHTTP__TEST_PAUSE_ON_CHUNK_COMPLETE + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + +int llhttp__on_reset(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + + llparse__print(p, endp, "reset"); + + #ifdef LLHTTP__TEST_PAUSE_ON_RESET + return LLPARSE__ERROR_PAUSE; + #else + return 0; + #endif +} + +#endif /* LLHTTP__TEST_HTTP */ diff --git a/llhttp/test/fixtures/index.ts b/llhttp/test/fixtures/index.ts new file mode 100644 index 0000000..1571f9d --- /dev/null +++ b/llhttp/test/fixtures/index.ts @@ -0,0 +1,116 @@ +import * as fs from 'fs'; +import { ICompilerResult, LLParse } from 'llparse'; +import { Dot } from 'llparse-dot'; +import { + Fixture, FixtureResult, IFixtureBuildOptions, +} from 'llparse-test-fixture'; +import * as path from 'path'; + +import * as llhttp from '../../src/llhttp'; + +export { FixtureResult }; + +export type TestType = 'request' | 'response' | 'request-finish' | 'response-finish' | + 'request-lenient-all' | 'response-lenient-all' | + 'request-lenient-headers' | 'response-lenient-headers' | + 'request-lenient-chunked-length' | 'request-lenient-transfer-encoding' | + 'request-lenient-keep-alive' | 'response-lenient-keep-alive' | + 'request-lenient-version' | 'response-lenient-version' | + 'request-lenient-data-after-close' | 'response-lenient-data-after-close' | + 'request-lenient-optional-lf-after-cr' | 'response-lenient-optional-lf-after-cr' | + 'request-lenient-optional-cr-before-lf' | 'response-lenient-optional-cr-before-lf' | + 'request-lenient-optional-crlf-after-chunk' | 'response-lenient-optional-crlf-after-chunk' | + 'request-lenient-spaces-after-chunk-size' | 'response-lenient-spaces-after-chunk-size' | + 'none' | 'url'; + +export const allowedTypes: TestType[] = [ + 'request', + 'response', + 'request-finish', + 'response-finish', + 'request-lenient-all', + 'response-lenient-all', + 'request-lenient-headers', + 'response-lenient-headers', + 'request-lenient-keep-alive', + 'response-lenient-keep-alive', + 'request-lenient-chunked-length', + 'request-lenient-transfer-encoding', + 'request-lenient-version', + 'response-lenient-version', + 'request-lenient-data-after-close', + 'response-lenient-data-after-close', + 'request-lenient-optional-lf-after-cr', + 'response-lenient-optional-lf-after-cr', + 'request-lenient-optional-cr-before-lf', + 'response-lenient-optional-cr-before-lf', + 'request-lenient-optional-crlf-after-chunk', + 'response-lenient-optional-crlf-after-chunk', + 'request-lenient-spaces-after-chunk-size', + 'response-lenient-spaces-after-chunk-size', +]; + +const BUILD_DIR = path.join(__dirname, '..', 'tmp'); +const CHEADERS_FILE = path.join(BUILD_DIR, 'cheaders.h'); + +const cheaders = new llhttp.CHeaders().build(); +try { + fs.mkdirSync(BUILD_DIR); +} catch (e) { + // no-op +} +fs.writeFileSync(CHEADERS_FILE, cheaders); + +const fixtures = new Fixture({ + buildDir: path.join(__dirname, '..', 'tmp'), + extra: [ + '-msse4.2', + '-DLLHTTP__TEST', + '-DLLPARSE__ERROR_PAUSE=' + llhttp.constants.ERROR.PAUSED, + '-include', CHEADERS_FILE, + path.join(__dirname, 'extra.c'), + ], + maxParallel: process.env.LLPARSE_DEBUG ? 1 : undefined, +}); + +const cache: Map = new Map(); + +export async function build( + llparse: LLParse, node: any, outFile: string, + options: IFixtureBuildOptions = {}, + ty: TestType = 'none'): Promise { + const dot = new Dot(); + fs.writeFileSync(path.join(BUILD_DIR, outFile + '.dot'), + dot.build(node)); + + let artifacts: ICompilerResult; + if (cache.has(node)) { + artifacts = cache.get(node)!; + } else { + artifacts = llparse.build(node, { + c: { header: outFile }, + debug: process.env.LLPARSE_DEBUG ? 'llparse__debug' : undefined, + }); + cache.set(node, artifacts); + } + + const extra = options.extra === undefined ? [] : options.extra.slice(); + + if (allowedTypes.includes(ty)) { + extra.push( + `-DLLPARSE__TEST_INIT=llhttp__test_init_${ty.replace(/-/g, '_')}`); + } + + if (ty === 'request-finish' || ty === 'response-finish') { + if (ty === 'request-finish') { + extra.push('-DLLPARSE__TEST_INIT=llhttp__test_init_request'); + } else { + extra.push('-DLLPARSE__TEST_INIT=llhttp__test_init_response'); + } + extra.push('-DLLPARSE__TEST_FINISH=llhttp__test_finish'); + } + + return await fixtures.build(artifacts, outFile, Object.assign(options, { + extra, + })); +} diff --git a/llhttp/test/fuzzers/fuzz_parser.c b/llhttp/test/fuzzers/fuzz_parser.c new file mode 100644 index 0000000..60d00ae --- /dev/null +++ b/llhttp/test/fuzzers/fuzz_parser.c @@ -0,0 +1,45 @@ +#include "llhttp.h" +#include +#include +#include + +int handle_on_message_complete(llhttp_t *arg) { return 0; } + +int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) { + llhttp_t parser; + llhttp_settings_t settings; + llhttp_type_t http_type; + + /* We need four bytes to determine variable parameters */ + if (size < 4) { + return 0; + } + + int headers = (data[0] & 0x01) == 1; + int chunked_length = (data[1] & 0x01) == 1; + int keep_alive = (data[2] & 0x01) == 1; + if (data[0] % 3 == 0) { + http_type = HTTP_BOTH; + } else if (data[0] % 3 == 1) { + http_type = HTTP_REQUEST; + } else { + http_type = HTTP_RESPONSE; + } + data += 4; + size -= 4; + + /* Initialize user callbacks and settings */ + llhttp_settings_init(&settings); + + /* Set user callback */ + settings.on_message_complete = handle_on_message_complete; + + llhttp_init(&parser, http_type, &settings); + llhttp_set_lenient_headers(&parser, headers); + llhttp_set_lenient_chunked_length(&parser, chunked_length); + llhttp_set_lenient_keep_alive(&parser, keep_alive); + + llhttp_execute(&parser, data, size); + + return 0; +} diff --git a/llhttp/test/md-test.ts b/llhttp/test/md-test.ts new file mode 100644 index 0000000..0c24e18 --- /dev/null +++ b/llhttp/test/md-test.ts @@ -0,0 +1,269 @@ +import * as assert from 'assert'; +import * as fs from 'fs'; +import { LLParse } from 'llparse'; +import { Group, MDGator, Metadata, Test } from 'mdgator'; +import * as path from 'path'; +import * as vm from 'vm'; + +import * as llhttp from '../src/llhttp'; +import {IHTTPResult} from '../src/llhttp/http'; +import {IURLResult} from '../src/llhttp/url'; +import { allowedTypes, build, FixtureResult, TestType } from './fixtures'; + +// +// Cache nodes/llparse instances ahead of time +// (different types of tests will re-use them) +// + +interface INodeCacheEntry { + llparse: LLParse; + entry: IHTTPResult['entry']; +} + +interface IUrlCacheEntry { + llparse: LLParse; + entry: IURLResult['entry']['normal']; +} + +const modeCache = new Map(); + +function buildNode() { + const p = new LLParse(); + const instance = new llhttp.HTTP(p); + + return { llparse: p, entry: instance.build().entry }; +} + +function buildURL() { + const p = new LLParse(); + const instance = new llhttp.URL(p, true); + + const node = instance.build(); + + // Loop + node.exit.toHTTP.otherwise(node.entry.normal); + node.exit.toHTTP09.otherwise(node.entry.normal); + + return { llparse: p, entry: node.entry.normal }; +} + +// +// Build binaries using cached nodes/llparse +// + +async function buildMode(ty: TestType, meta: any) + : Promise { + + const cacheKey = `${ty}:${JSON.stringify(meta || {})}`; + let entry = modeCache.get(cacheKey); + + if (entry) { + return entry; + } + + let node; + let prefix: string; + let extra: string[]; + if (ty === 'url') { + node = buildURL(); + prefix = 'url'; + extra = []; + } else { + node = buildNode(); + prefix = 'http'; + extra = [ + '-DLLHTTP__TEST_HTTP', + path.join(__dirname, '..', 'src', 'native', 'http.c'), + ]; + } + + if (meta.pause) { + extra.push(`-DLLHTTP__TEST_PAUSE_${meta.pause.toUpperCase()}=1`); + } + + if (meta.skipBody) { + extra.push('-DLLHTTP__TEST_SKIP_BODY=1'); + } + + entry = await build(node.llparse, node.entry, `${prefix}-${ty}`, { + extra, + }, ty); + + modeCache.set(cacheKey, entry); + return entry; +} + +interface IFixtureMap { + [key: string]: { [key: string]: Promise }; +} + +// +// Run test suite +// + +function run(name: string): void { + const md = new MDGator(); + + const raw = fs.readFileSync(path.join(__dirname, name + '.md')).toString(); + const groups = md.parse(raw); + + function runSingleTest(ty: TestType, meta: any, + input: string, + expected: ReadonlyArray): void { + it(`should pass for type="${ty}"`, async () => { + const binary = await buildMode(ty, meta); + await binary.check(input, expected, { + noScan: meta.noScan === true, + }); + }); + } + + function runTest(test: Test) { + describe(test.name + ` at ${name}.md:${test.line + 1}`, () => { + let types: TestType[] = []; + + const isURL = test.values.has('url'); + const inputKey = isURL ? 'url' : 'http'; + + assert(test.values.has(inputKey), + `Missing "${inputKey}" code in md file`); + assert.strictEqual(test.values.get(inputKey)!.length, 1, + `Expected just one "${inputKey}" input`); + + let meta: Metadata; + if (test.meta.has(inputKey)) { + meta = test.meta.get(inputKey)![0]!; + } else { + assert(isURL, 'Missing required http metadata'); + meta = {}; + } + + if (isURL) { + types = [ 'url' ]; + } else { + assert(meta.hasOwnProperty('type'), 'Missing required `type` metadata'); + + if (meta.type) { + if (!allowedTypes.includes(meta.type)) { + throw new Error(`Invalid value of \`type\` metadata: "${meta.type}"`); + } + + types.push(meta.type); + } + } + + assert(test.values.has('log'), 'Missing `log` code in md file'); + + assert.strictEqual(test.values.get('log')!.length, 1, + 'Expected just one output'); + + let input: string = test.values.get(inputKey)![0]; + let expected: string = test.values.get('log')![0]; + + // Remove trailing newline + input = input.replace(/\n$/, ''); + + // Remove escaped newlines + input = input.replace(/\\(\r\n|\r|\n)/g, ''); + + // Normalize all newlines + input = input.replace(/\r\n|\r|\n/g, '\r\n'); + + // Replace escaped CRLF, tabs, form-feed + input = input.replace(/\\r/g, '\r'); + input = input.replace(/\\n/g, '\n'); + input = input.replace(/\\t/g, '\t'); + input = input.replace(/\\f/g, '\f'); + input = input.replace(/\\x([0-9a-fA-F]+)/g, (all, hex) => { + return String.fromCharCode(parseInt(hex, 16)); + }); + + // Useful in token tests + input = input.replace(/\\([0-7]{1,3})/g, (_, digits) => { + return String.fromCharCode(parseInt(digits, 8)); + }); + + // Evaluate inline JavaScript + input = input.replace(/\$\{(.+?)\}/g, (_, code) => { + return vm.runInNewContext(code) + ''; + }); + + // Escape first symbol `\r` or `\n`, `|`, `&` for Windows + if (process.platform === 'win32') { + const firstByte = Buffer.from(input)[0]; + if (firstByte === 0x0a || firstByte === 0x0d) { + input = '\\' + input; + } + + input = input.replace(/\|/g, '^|'); + input = input.replace(/&/g, '^&'); + } + + // Replace escaped tabs/form-feed in expected too + expected = expected.replace(/\\t/g, '\t'); + expected = expected.replace(/\\f/g, '\f'); + + // Split + const expectedLines = expected.split(/\n/g).slice(0, -1); + + const fullExpected = expectedLines.map((line) => { + if (line.startsWith('/')) { + return new RegExp(line.trim().slice(1, -1)); + } else { + return line; + } + }); + + for (const ty of types) { + if (meta.skip === true || (process.env.ONLY === 'true' && !meta.only)) { + continue; + } + + runSingleTest(ty, meta, input, fullExpected); + } + }); + } + + function runGroup(group: Group) { + describe(group.name + ` at ${name}.md:${group.line + 1}`, function() { + this.timeout(60000); + + for (const child of group.children) { + runGroup(child); + } + + for (const test of group.tests) { + runTest(test); + } + }); + } + + for (const group of groups) { + runGroup(group); + } +} + +run('request/sample'); +run('request/lenient-headers'); +run('request/lenient-version'); +run('request/method'); +run('request/uri'); +run('request/connection'); +run('request/content-length'); +run('request/transfer-encoding'); +run('request/invalid'); +run('request/finish'); +run('request/pausing'); +run('request/pipelining'); + +run('response/sample'); +run('response/connection'); +run('response/content-length'); +run('response/transfer-encoding'); +run('response/invalid'); +run('response/finish'); +run('response/lenient-version'); +run('response/pausing'); +run('response/pipelining'); + +run('url'); diff --git a/llhttp/test/request/connection.md b/llhttp/test/request/connection.md new file mode 100644 index 0000000..a03242e --- /dev/null +++ b/llhttp/test/request/connection.md @@ -0,0 +1,732 @@ +Connection header +================= + +## `keep-alive` + +### Setting flag + + +```http +PUT /url HTTP/1.1 +Connection: keep-alive + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=10 span[header_field]="Connection" +off=30 header_field complete +off=31 len=10 span[header_value]="keep-alive" +off=43 header_value complete +off=45 headers complete method=4 v=1/1 flags=1 content_length=0 +off=45 message complete +``` + +### Restarting when keep-alive is explicitly + + +```http +PUT /url HTTP/1.1 +Connection: keep-alive + +PUT /url HTTP/1.1 +Connection: keep-alive + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=10 span[header_field]="Connection" +off=30 header_field complete +off=31 len=10 span[header_value]="keep-alive" +off=43 header_value complete +off=45 headers complete method=4 v=1/1 flags=1 content_length=0 +off=45 message complete +off=45 reset +off=45 message begin +off=45 len=3 span[method]="PUT" +off=48 method complete +off=49 len=4 span[url]="/url" +off=54 url complete +off=59 len=3 span[version]="1.1" +off=62 version complete +off=64 len=10 span[header_field]="Connection" +off=75 header_field complete +off=76 len=10 span[header_value]="keep-alive" +off=88 header_value complete +off=90 headers complete method=4 v=1/1 flags=1 content_length=0 +off=90 message complete +``` + +### No restart when keep-alive is off (1.0) + + +```http +PUT /url HTTP/1.0 + +PUT /url HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.0" +off=17 version complete +off=21 headers complete method=4 v=1/0 flags=0 content_length=0 +off=21 message complete +off=22 error code=5 reason="Data after `Connection: close`" +``` + +### Resetting flags when keep-alive is off (1.0, lenient) + +Even though we allow restarts in loose mode, the flags should be still set to +`0` upon restart. + + +```http +PUT /url HTTP/1.0 +Content-Length: 0 + +PUT /url HTTP/1.1 +Transfer-Encoding: chunked + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.0" +off=17 version complete +off=19 len=14 span[header_field]="Content-Length" +off=34 header_field complete +off=35 len=1 span[header_value]="0" +off=38 header_value complete +off=40 headers complete method=4 v=1/0 flags=20 content_length=0 +off=40 message complete +off=40 reset +off=40 message begin +off=40 len=3 span[method]="PUT" +off=43 method complete +off=44 len=4 span[url]="/url" +off=49 url complete +off=54 len=3 span[version]="1.1" +off=57 version complete +off=59 len=17 span[header_field]="Transfer-Encoding" +off=77 header_field complete +off=78 len=7 span[header_value]="chunked" +off=87 header_value complete +off=89 headers complete method=4 v=1/1 flags=208 content_length=0 +``` + +### CRLF between requests, implicit `keep-alive` + + +```http +POST / HTTP/1.1 +Host: www.example.com +Content-Type: application/x-www-form-urlencoded +Content-Length: 4 + +q=42 + +GET / HTTP/1.1 +``` +_Note the trailing CRLF above_ + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=4 span[header_field]="Host" +off=22 header_field complete +off=23 len=15 span[header_value]="www.example.com" +off=40 header_value complete +off=40 len=12 span[header_field]="Content-Type" +off=53 header_field complete +off=54 len=33 span[header_value]="application/x-www-form-urlencoded" +off=89 header_value complete +off=89 len=14 span[header_field]="Content-Length" +off=104 header_field complete +off=105 len=1 span[header_value]="4" +off=108 header_value complete +off=110 headers complete method=3 v=1/1 flags=20 content_length=4 +off=110 len=4 span[body]="q=42" +off=114 message complete +off=118 reset +off=118 message begin +off=118 len=3 span[method]="GET" +off=121 method complete +off=122 len=1 span[url]="/" +off=124 url complete +off=129 len=3 span[version]="1.1" +off=132 version complete +``` + +### Not treating `\r` as `-` + + +```http +PUT /url HTTP/1.1 +Connection: keep\ralive + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=10 span[header_field]="Connection" +off=30 header_field complete +off=31 len=4 span[header_value]="keep" +off=36 error code=3 reason="Missing expected LF after header value" +``` + +## `close` + +### Setting flag on `close` + + +```http +PUT /url HTTP/1.1 +Connection: close + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=10 span[header_field]="Connection" +off=30 header_field complete +off=31 len=5 span[header_value]="close" +off=38 header_value complete +off=40 headers complete method=4 v=1/1 flags=2 content_length=0 +off=40 message complete +``` + +### CRLF between requests, explicit `close` + +`close` means closed connection + + +```http +POST / HTTP/1.1 +Host: www.example.com +Content-Type: application/x-www-form-urlencoded +Content-Length: 4 +Connection: close + +q=42 + +GET / HTTP/1.1 +``` +_Note the trailing CRLF above_ + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=4 span[header_field]="Host" +off=22 header_field complete +off=23 len=15 span[header_value]="www.example.com" +off=40 header_value complete +off=40 len=12 span[header_field]="Content-Type" +off=53 header_field complete +off=54 len=33 span[header_value]="application/x-www-form-urlencoded" +off=89 header_value complete +off=89 len=14 span[header_field]="Content-Length" +off=104 header_field complete +off=105 len=1 span[header_value]="4" +off=108 header_value complete +off=108 len=10 span[header_field]="Connection" +off=119 header_field complete +off=120 len=5 span[header_value]="close" +off=127 header_value complete +off=129 headers complete method=3 v=1/1 flags=22 content_length=4 +off=129 len=4 span[body]="q=42" +off=133 message complete +off=138 error code=5 reason="Data after `Connection: close`" +``` + +### CRLF between requests, explicit `close` (lenient) + +Loose mode is more lenient, and allows further requests. + + +```http +POST / HTTP/1.1 +Host: www.example.com +Content-Type: application/x-www-form-urlencoded +Content-Length: 4 +Connection: close + +q=42 + +GET / HTTP/1.1 +``` +_Note the trailing CRLF above_ + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=4 span[header_field]="Host" +off=22 header_field complete +off=23 len=15 span[header_value]="www.example.com" +off=40 header_value complete +off=40 len=12 span[header_field]="Content-Type" +off=53 header_field complete +off=54 len=33 span[header_value]="application/x-www-form-urlencoded" +off=89 header_value complete +off=89 len=14 span[header_field]="Content-Length" +off=104 header_field complete +off=105 len=1 span[header_value]="4" +off=108 header_value complete +off=108 len=10 span[header_field]="Connection" +off=119 header_field complete +off=120 len=5 span[header_value]="close" +off=127 header_value complete +off=129 headers complete method=3 v=1/1 flags=22 content_length=4 +off=129 len=4 span[body]="q=42" +off=133 message complete +off=137 reset +off=137 message begin +off=137 len=3 span[method]="GET" +off=140 method complete +off=141 len=1 span[url]="/" +off=143 url complete +off=148 len=3 span[version]="1.1" +off=151 version complete +``` + +## Parsing multiple tokens + +### Sample + + +```http +PUT /url HTTP/1.1 +Connection: close, token, upgrade, token, keep-alive + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=10 span[header_field]="Connection" +off=30 header_field complete +off=31 len=40 span[header_value]="close, token, upgrade, token, keep-alive" +off=73 header_value complete +off=75 headers complete method=4 v=1/1 flags=7 content_length=0 +off=75 message complete +``` + +### Multiple tokens with folding + + +```http +GET /demo HTTP/1.1 +Host: example.com +Connection: Something, + Upgrade, ,Keep-Alive +Sec-WebSocket-Key2: 12998 5 Y3 1 .P00 +Sec-WebSocket-Protocol: sample +Upgrade: WebSocket +Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5 +Origin: http://example.com + +Hot diggity dogg +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=5 span[url]="/demo" +off=10 url complete +off=15 len=3 span[version]="1.1" +off=18 version complete +off=20 len=4 span[header_field]="Host" +off=25 header_field complete +off=26 len=11 span[header_value]="example.com" +off=39 header_value complete +off=39 len=10 span[header_field]="Connection" +off=50 header_field complete +off=51 len=10 span[header_value]="Something," +off=63 len=21 span[header_value]=" Upgrade, ,Keep-Alive" +off=86 header_value complete +off=86 len=18 span[header_field]="Sec-WebSocket-Key2" +off=105 header_field complete +off=106 len=18 span[header_value]="12998 5 Y3 1 .P00" +off=126 header_value complete +off=126 len=22 span[header_field]="Sec-WebSocket-Protocol" +off=149 header_field complete +off=150 len=6 span[header_value]="sample" +off=158 header_value complete +off=158 len=7 span[header_field]="Upgrade" +off=166 header_field complete +off=167 len=9 span[header_value]="WebSocket" +off=178 header_value complete +off=178 len=18 span[header_field]="Sec-WebSocket-Key1" +off=197 header_field complete +off=198 len=20 span[header_value]="4 @1 46546xW%0l 1 5" +off=220 header_value complete +off=220 len=6 span[header_field]="Origin" +off=227 header_field complete +off=228 len=18 span[header_value]="http://example.com" +off=248 header_value complete +off=250 headers complete method=1 v=1/1 flags=15 content_length=0 +off=250 message complete +off=250 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +### Multiple tokens with folding and LWS + + +```http +GET /demo HTTP/1.1 +Connection: keep-alive, upgrade +Upgrade: WebSocket + +Hot diggity dogg +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=5 span[url]="/demo" +off=10 url complete +off=15 len=3 span[version]="1.1" +off=18 version complete +off=20 len=10 span[header_field]="Connection" +off=31 header_field complete +off=32 len=19 span[header_value]="keep-alive, upgrade" +off=53 header_value complete +off=53 len=7 span[header_field]="Upgrade" +off=61 header_field complete +off=62 len=9 span[header_value]="WebSocket" +off=73 header_value complete +off=75 headers complete method=1 v=1/1 flags=15 content_length=0 +off=75 message complete +off=75 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +### Multiple tokens with folding, LWS, and CRLF + + +```http +GET /demo HTTP/1.1 +Connection: keep-alive, \r\n upgrade +Upgrade: WebSocket + +Hot diggity dogg +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=5 span[url]="/demo" +off=10 url complete +off=15 len=3 span[version]="1.1" +off=18 version complete +off=20 len=10 span[header_field]="Connection" +off=31 header_field complete +off=32 len=12 span[header_value]="keep-alive, " +off=46 len=8 span[header_value]=" upgrade" +off=56 header_value complete +off=56 len=7 span[header_field]="Upgrade" +off=64 header_field complete +off=65 len=9 span[header_value]="WebSocket" +off=76 header_value complete +off=78 headers complete method=1 v=1/1 flags=15 content_length=0 +off=78 message complete +off=78 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +### Invalid whitespace token with `Connection` header field + + +```http +PUT /url HTTP/1.1 +Connection : upgrade +Content-Length: 4 +Upgrade: ws + +abcdefgh +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=10 span[header_field]="Connection" +off=30 error code=10 reason="Invalid header field char" +``` + +### Invalid whitespace token with `Connection` header field (lenient) + + +```http +PUT /url HTTP/1.1 +Connection : upgrade +Content-Length: 4 +Upgrade: ws + +abcdefgh +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=11 span[header_field]="Connection " +off=31 header_field complete +off=32 len=7 span[header_value]="upgrade" +off=41 header_value complete +off=41 len=14 span[header_field]="Content-Length" +off=56 header_field complete +off=57 len=1 span[header_value]="4" +off=60 header_value complete +off=60 len=7 span[header_field]="Upgrade" +off=68 header_field complete +off=69 len=2 span[header_value]="ws" +off=73 header_value complete +off=75 headers complete method=4 v=1/1 flags=34 content_length=4 +off=75 len=4 span[body]="abcd" +off=79 message complete +off=79 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +## `upgrade` + +### Setting a flag and pausing + + +```http +PUT /url HTTP/1.1 +Connection: upgrade +Upgrade: ws + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=10 span[header_field]="Connection" +off=30 header_field complete +off=31 len=7 span[header_value]="upgrade" +off=40 header_value complete +off=40 len=7 span[header_field]="Upgrade" +off=48 header_field complete +off=49 len=2 span[header_value]="ws" +off=53 header_value complete +off=55 headers complete method=4 v=1/1 flags=14 content_length=0 +off=55 message complete +off=55 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +### Emitting part of body and pausing + + +```http +PUT /url HTTP/1.1 +Connection: upgrade +Content-Length: 4 +Upgrade: ws + +abcdefgh +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=10 span[header_field]="Connection" +off=30 header_field complete +off=31 len=7 span[header_value]="upgrade" +off=40 header_value complete +off=40 len=14 span[header_field]="Content-Length" +off=55 header_field complete +off=56 len=1 span[header_value]="4" +off=59 header_value complete +off=59 len=7 span[header_field]="Upgrade" +off=67 header_field complete +off=68 len=2 span[header_value]="ws" +off=72 header_value complete +off=74 headers complete method=4 v=1/1 flags=34 content_length=4 +off=74 len=4 span[body]="abcd" +off=78 message complete +off=78 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +### Upgrade GET request + + +```http +GET /demo HTTP/1.1 +Host: example.com +Connection: Upgrade +Sec-WebSocket-Key2: 12998 5 Y3 1 .P00 +Sec-WebSocket-Protocol: sample +Upgrade: WebSocket +Sec-WebSocket-Key1: 4 @1 46546xW%0l 1 5 +Origin: http://example.com + +Hot diggity dogg +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=5 span[url]="/demo" +off=10 url complete +off=15 len=3 span[version]="1.1" +off=18 version complete +off=20 len=4 span[header_field]="Host" +off=25 header_field complete +off=26 len=11 span[header_value]="example.com" +off=39 header_value complete +off=39 len=10 span[header_field]="Connection" +off=50 header_field complete +off=51 len=7 span[header_value]="Upgrade" +off=60 header_value complete +off=60 len=18 span[header_field]="Sec-WebSocket-Key2" +off=79 header_field complete +off=80 len=18 span[header_value]="12998 5 Y3 1 .P00" +off=100 header_value complete +off=100 len=22 span[header_field]="Sec-WebSocket-Protocol" +off=123 header_field complete +off=124 len=6 span[header_value]="sample" +off=132 header_value complete +off=132 len=7 span[header_field]="Upgrade" +off=140 header_field complete +off=141 len=9 span[header_value]="WebSocket" +off=152 header_value complete +off=152 len=18 span[header_field]="Sec-WebSocket-Key1" +off=171 header_field complete +off=172 len=20 span[header_value]="4 @1 46546xW%0l 1 5" +off=194 header_value complete +off=194 len=6 span[header_field]="Origin" +off=201 header_field complete +off=202 len=18 span[header_value]="http://example.com" +off=222 header_value complete +off=224 headers complete method=1 v=1/1 flags=14 content_length=0 +off=224 message complete +off=224 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +### Upgrade POST request + + +```http +POST /demo HTTP/1.1 +Host: example.com +Connection: Upgrade +Upgrade: HTTP/2.0 +Content-Length: 15 + +sweet post body\ +Hot diggity dogg +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=5 span[url]="/demo" +off=11 url complete +off=16 len=3 span[version]="1.1" +off=19 version complete +off=21 len=4 span[header_field]="Host" +off=26 header_field complete +off=27 len=11 span[header_value]="example.com" +off=40 header_value complete +off=40 len=10 span[header_field]="Connection" +off=51 header_field complete +off=52 len=7 span[header_value]="Upgrade" +off=61 header_value complete +off=61 len=7 span[header_field]="Upgrade" +off=69 header_field complete +off=70 len=8 span[header_value]="HTTP/2.0" +off=80 header_value complete +off=80 len=14 span[header_field]="Content-Length" +off=95 header_field complete +off=96 len=2 span[header_value]="15" +off=100 header_value complete +off=102 headers complete method=3 v=1/1 flags=34 content_length=15 +off=102 len=15 span[body]="sweet post body" +off=117 message complete +off=117 error code=22 reason="Pause on CONNECT/Upgrade" +``` diff --git a/llhttp/test/request/content-length.md b/llhttp/test/request/content-length.md new file mode 100644 index 0000000..524d183 --- /dev/null +++ b/llhttp/test/request/content-length.md @@ -0,0 +1,482 @@ +Content-Length header +===================== + +## `Content-Length` with zeroes + + +```http +PUT /url HTTP/1.1 +Content-Length: 003 + +abc +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=14 span[header_field]="Content-Length" +off=34 header_field complete +off=35 len=3 span[header_value]="003" +off=40 header_value complete +off=42 headers complete method=4 v=1/1 flags=20 content_length=3 +off=42 len=3 span[body]="abc" +off=45 message complete +``` + +## `Content-Length` with follow-up headers + +The way the parser works is that special headers (like `Content-Length`) first +set `header_state` to appropriate value, and then apply custom parsing using +that value. For `Content-Length`, in particular, the `header_state` is used for +setting the flag too. + +Make sure that `header_state` is reset to `0`, so that the flag won't be +attempted to set twice (and error). + + +```http +PUT /url HTTP/1.1 +Content-Length: 003 +Ohai: world + +abc +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=14 span[header_field]="Content-Length" +off=34 header_field complete +off=35 len=3 span[header_value]="003" +off=40 header_value complete +off=40 len=4 span[header_field]="Ohai" +off=45 header_field complete +off=46 len=5 span[header_value]="world" +off=53 header_value complete +off=55 headers complete method=4 v=1/1 flags=20 content_length=3 +off=55 len=3 span[body]="abc" +off=58 message complete +``` + +## Error on `Content-Length` overflow + + +```http +PUT /url HTTP/1.1 +Content-Length: 1000000000000000000000 + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=14 span[header_field]="Content-Length" +off=34 header_field complete +off=35 len=21 span[header_value]="100000000000000000000" +off=56 error code=11 reason="Content-Length overflow" +``` + +## Error on duplicate `Content-Length` + + +```http +PUT /url HTTP/1.1 +Content-Length: 1 +Content-Length: 2 + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=14 span[header_field]="Content-Length" +off=34 header_field complete +off=35 len=1 span[header_value]="1" +off=38 header_value complete +off=38 len=14 span[header_field]="Content-Length" +off=53 header_field complete +off=54 error code=4 reason="Duplicate Content-Length" +``` + +## Error on simultaneous `Content-Length` and `Transfer-Encoding: identity` + + +```http +PUT /url HTTP/1.1 +Content-Length: 1 +Transfer-Encoding: identity + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=14 span[header_field]="Content-Length" +off=34 header_field complete +off=35 len=1 span[header_value]="1" +off=38 header_value complete +off=38 len=17 span[header_field]="Transfer-Encoding" +off=56 header_field complete +off=56 error code=15 reason="Transfer-Encoding can't be present with Content-Length" +``` + +## Invalid whitespace token with `Content-Length` header field + + +```http +PUT /url HTTP/1.1 +Connection: upgrade +Content-Length : 4 +Upgrade: ws + +abcdefgh +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=10 span[header_field]="Connection" +off=30 header_field complete +off=31 len=7 span[header_value]="upgrade" +off=40 header_value complete +off=40 len=14 span[header_field]="Content-Length" +off=55 error code=10 reason="Invalid header field char" +``` + +## Invalid whitespace token with `Content-Length` header field (lenient) + + +```http +PUT /url HTTP/1.1 +Connection: upgrade +Content-Length : 4 +Upgrade: ws + +abcdefgh +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=10 span[header_field]="Connection" +off=30 header_field complete +off=31 len=7 span[header_value]="upgrade" +off=40 header_value complete +off=40 len=15 span[header_field]="Content-Length " +off=56 header_field complete +off=57 len=1 span[header_value]="4" +off=60 header_value complete +off=60 len=7 span[header_field]="Upgrade" +off=68 header_field complete +off=69 len=2 span[header_value]="ws" +off=73 header_value complete +off=75 headers complete method=4 v=1/1 flags=34 content_length=4 +off=75 len=4 span[body]="abcd" +off=79 message complete +off=79 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +## No error on simultaneous `Content-Length` and `Transfer-Encoding: identity` (lenient) + + +```http +PUT /url HTTP/1.1 +Content-Length: 1 +Transfer-Encoding: identity + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=14 span[header_field]="Content-Length" +off=34 header_field complete +off=35 len=1 span[header_value]="1" +off=38 header_value complete +off=38 len=17 span[header_field]="Transfer-Encoding" +off=56 header_field complete +off=57 len=8 span[header_value]="identity" +off=67 header_value complete +off=69 headers complete method=4 v=1/1 flags=220 content_length=1 +``` + +## Funky `Content-Length` with body + + +```http +GET /get_funky_content_length_body_hello HTTP/1.0 +conTENT-Length: 5 + +HELLO +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=36 span[url]="/get_funky_content_length_body_hello" +off=41 url complete +off=46 len=3 span[version]="1.0" +off=49 version complete +off=51 len=14 span[header_field]="conTENT-Length" +off=66 header_field complete +off=67 len=1 span[header_value]="5" +off=70 header_value complete +off=72 headers complete method=1 v=1/0 flags=20 content_length=5 +off=72 len=5 span[body]="HELLO" +off=77 message complete +``` + +## Spaces in `Content-Length` (surrounding) + + +```http +POST / HTTP/1.1 +Content-Length: 42 + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=34 len=3 span[header_value]="42 " +off=39 header_value complete +off=41 headers complete method=3 v=1/1 flags=20 content_length=42 +``` + +### Spaces in `Content-Length` #2 + + +```http +POST / HTTP/1.1 +Content-Length: 4 2 + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=2 span[header_value]="4 " +off=35 error code=11 reason="Invalid character in Content-Length" +``` + +### Spaces in `Content-Length` #3 + + +```http +POST / HTTP/1.1 +Content-Length: 13 37 + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=3 span[header_value]="13 " +off=36 error code=11 reason="Invalid character in Content-Length" +``` + +### Empty `Content-Length` + + +```http +POST / HTTP/1.1 +Content-Length: + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=34 error code=11 reason="Empty Content-Length" +``` + +## `Content-Length` with CR instead of dash + + +```http +PUT /url HTTP/1.1 +Content\rLength: 003 + +abc +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=26 error code=10 reason="Invalid header token" +``` + +## Content-Length reset when no body is received + + +```http +PUT /url HTTP/1.1 +Content-Length: 123 + +POST /url HTTP/1.1 +Content-Length: 456 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=14 span[header_field]="Content-Length" +off=34 header_field complete +off=35 len=3 span[header_value]="123" +off=40 header_value complete +off=42 headers complete method=4 v=1/1 flags=20 content_length=123 +off=42 skip body +off=42 message complete +off=42 reset +off=42 message begin +off=42 len=4 span[method]="POST" +off=46 method complete +off=47 len=4 span[url]="/url" +off=52 url complete +off=57 len=3 span[version]="1.1" +off=60 version complete +off=62 len=14 span[header_field]="Content-Length" +off=77 header_field complete +off=78 len=3 span[header_value]="456" +off=83 header_value complete +off=85 headers complete method=3 v=1/1 flags=20 content_length=456 +off=85 skip body +off=85 message complete +``` + +## Missing CRLF-CRLF before body + + +```http +PUT /url HTTP/1.1 +Content-Length: 3 +\rabc +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=14 span[header_field]="Content-Length" +off=34 header_field complete +off=35 len=1 span[header_value]="3" +off=38 header_value complete +off=39 error code=2 reason="Expected LF after headers" +``` + +## Missing CRLF-CRLF before body (lenient) + + +```http +PUT /url HTTP/1.1 +Content-Length: 3 +\rabc +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=14 span[header_field]="Content-Length" +off=34 header_field complete +off=35 len=1 span[header_value]="3" +off=38 header_value complete +off=39 headers complete method=4 v=1/1 flags=20 content_length=3 +off=39 len=3 span[body]="abc" +off=42 message complete +``` \ No newline at end of file diff --git a/llhttp/test/request/finish.md b/llhttp/test/request/finish.md new file mode 100644 index 0000000..710daa5 --- /dev/null +++ b/llhttp/test/request/finish.md @@ -0,0 +1,69 @@ +Finish +====== + +Those tests check the return codes and the behavior of `llhttp_finish()` C API. + +## It should be safe to finish after GET request + + +```http +GET / HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=18 headers complete method=1 v=1/1 flags=0 content_length=0 +off=18 message complete +off=NULL finish=0 +``` + +## It should be unsafe to finish after incomplete PUT request + + +```http +PUT / HTTP/1.1 +Content-Length: 100 + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=14 span[header_field]="Content-Length" +off=31 header_field complete +off=32 len=3 span[header_value]="100" +off=NULL finish=2 +``` + +## It should be unsafe to finish inside of the header + + +```http +PUT / HTTP/1.1 +Content-Leng +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=12 span[header_field]="Content-Leng" +off=NULL finish=2 +``` diff --git a/llhttp/test/request/invalid.md b/llhttp/test/request/invalid.md new file mode 100644 index 0000000..9fb8383 --- /dev/null +++ b/llhttp/test/request/invalid.md @@ -0,0 +1,607 @@ +Invalid requests +================ + +### ICE protocol and GET method + + +```http +GET /music/sweet/music ICE/1.0 +Host: example.com + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=18 span[url]="/music/sweet/music" +off=23 url complete +off=27 error code=8 reason="Expected SOURCE method for ICE/x.x request" +``` + +### ICE protocol, but not really + + +```http +GET /music/sweet/music IHTTP/1.0 +Host: example.com + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=18 span[url]="/music/sweet/music" +off=23 url complete +off=24 error code=8 reason="Expected HTTP/" +``` + +### RTSP protocol and PUT method + + +```http +PUT /music/sweet/music RTSP/1.0 +Host: example.com + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=18 span[url]="/music/sweet/music" +off=23 url complete +off=28 error code=8 reason="Invalid method for RTSP/x.x request" +``` + +### HTTP protocol and ANNOUNCE method + + +```http +ANNOUNCE /music/sweet/music HTTP/1.0 +Host: example.com + + +``` + +```log +off=0 message begin +off=0 len=8 span[method]="ANNOUNCE" +off=8 method complete +off=9 len=18 span[url]="/music/sweet/music" +off=28 url complete +off=33 error code=8 reason="Invalid method for HTTP/x.x request" +``` + +### Headers separated by CR + + +```http +GET / HTTP/1.1 +Foo: 1\rBar: 2 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=3 span[header_field]="Foo" +off=20 header_field complete +off=21 len=1 span[header_value]="1" +off=23 error code=3 reason="Missing expected LF after header value" +``` + +### Headers separated by LF + + +```http +POST / HTTP/1.1 +Host: localhost:5000 +x:x\nTransfer-Encoding: chunked + +1 +A +0 + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=4 span[header_field]="Host" +off=22 header_field complete +off=23 len=14 span[header_value]="localhost:5000" +off=39 header_value complete +off=39 len=1 span[header_field]="x" +off=41 header_field complete +off=41 len=1 span[header_value]="x" +off=42 error code=25 reason="Missing expected CR after header value" +``` + +### Headers separated by dummy characters + + +```http +GET / HTTP/1.1 +Connection: close +Host: a +\rZGET /evil: HTTP/1.1 +Host: a + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=10 span[header_field]="Connection" +off=27 header_field complete +off=28 len=5 span[header_value]="close" +off=35 header_value complete +off=35 len=4 span[header_field]="Host" +off=40 header_field complete +off=41 len=1 span[header_value]="a" +off=44 header_value complete +off=45 error code=2 reason="Expected LF after headers" +``` + + +### Headers separated by dummy characters (lenient) + + +```http +GET / HTTP/1.1 +Connection: close +Host: a +\rZGET /evil: HTTP/1.1 +Host: a + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=10 span[header_field]="Connection" +off=27 header_field complete +off=28 len=5 span[header_value]="close" +off=35 header_value complete +off=35 len=4 span[header_field]="Host" +off=40 header_field complete +off=41 len=1 span[header_value]="a" +off=44 header_value complete +off=45 headers complete method=1 v=1/1 flags=2 content_length=0 +off=45 message complete +off=46 error code=5 reason="Data after `Connection: close`" +``` + +### Empty headers separated by CR + + +```http +POST / HTTP/1.1 +Connection: Close +Host: localhost:5000 +x:\rTransfer-Encoding: chunked + +1 +A +0 + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=10 span[header_field]="Connection" +off=28 header_field complete +off=29 len=5 span[header_value]="Close" +off=36 header_value complete +off=36 len=4 span[header_field]="Host" +off=41 header_field complete +off=42 len=14 span[header_value]="localhost:5000" +off=58 header_value complete +off=58 len=1 span[header_field]="x" +off=60 header_field complete +off=61 error code=2 reason="Expected LF after CR" +``` + +### Empty headers separated by LF + + +```http +POST / HTTP/1.1 +Host: localhost:5000 +x:\nTransfer-Encoding: chunked + +1 +A +0 + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=4 span[header_field]="Host" +off=22 header_field complete +off=23 len=14 span[header_value]="localhost:5000" +off=39 header_value complete +off=39 len=1 span[header_field]="x" +off=41 header_field complete +off=42 error code=10 reason="Invalid header value char" +``` + +### Invalid header token #1 + + +```http +GET / HTTP/1.1 +Fo@: Failure + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=18 error code=10 reason="Invalid header token" +``` + +### Invalid header token #2 + + +```http +GET / HTTP/1.1 +Foo\01\test: Bar + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=19 error code=10 reason="Invalid header token" +``` + +### Invalid header token #3 + + +```http +GET / HTTP/1.1 +: Bar + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 error code=10 reason="Invalid header token" +``` + +### Invalid method + + +```http +MKCOLA / HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=5 span[method]="MKCOL" +off=5 method complete +off=5 error code=6 reason="Expected space after method" +``` + +### Illegal header field name line folding + + +```http +GET / HTTP/1.1 +name + : value + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=20 error code=10 reason="Invalid header token" +``` + +### Corrupted Connection header + + +```http +GET / HTTP/1.1 +Host: www.example.com +Connection\r\033\065\325eep-Alive +Accept-Encoding: gzip + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=4 span[header_field]="Host" +off=21 header_field complete +off=22 len=15 span[header_value]="www.example.com" +off=39 header_value complete +off=49 error code=10 reason="Invalid header token" +``` + +### Corrupted header name + + +```http +GET / HTTP/1.1 +Host: www.example.com +X-Some-Header\r\033\065\325eep-Alive +Accept-Encoding: gzip + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=4 span[header_field]="Host" +off=21 header_field complete +off=22 len=15 span[header_value]="www.example.com" +off=39 header_value complete +off=52 error code=10 reason="Invalid header token" +``` + +### Missing CR between headers + + + +```http +GET / HTTP/1.1 +Host: localhost +Dummy: x\nContent-Length: 23 + +GET / HTTP/1.1 +Dummy: GET /admin HTTP/1.1 +Host: localhost + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=4 span[header_field]="Host" +off=21 header_field complete +off=22 len=9 span[header_value]="localhost" +off=33 header_value complete +off=33 len=5 span[header_field]="Dummy" +off=39 header_field complete +off=40 len=1 span[header_value]="x" +off=41 error code=25 reason="Missing expected CR after header value" +``` + +### Invalid HTTP version + + +```http +GET / HTTP/5.6 +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="5.6" +off=14 error code=9 reason="Invalid HTTP version" +``` + +## Invalid space after start line + + +```http +GET / HTTP/1.1 + Host: foo +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=17 error code=30 reason="Unexpected space after start line" +``` + + +### Only LFs present + + +```http +POST / HTTP/1.1\n\ +Transfer-Encoding: chunked\n\ +Trailer: Baz +Foo: abc\n\ +Bar: def\n\ +\n\ +1\n\ +A\n\ +1;abc\n\ +B\n\ +1;def=ghi\n\ +C\n\ +1;jkl="mno"\n\ +D\n\ +0\n\ +\n\ +Baz: ghi\n\ +\n\ +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=16 error code=9 reason="Expected CRLF after version" +``` + +### Only LFs present (lenient) + + +```http +POST / HTTP/1.1\n\ +Transfer-Encoding: chunked\n\ +Trailer: Baz +Foo: abc\n\ +Bar: def\n\ +\n\ +1\n\ +A\n\ +1;abc\n\ +B\n\ +1;def=ghi\n\ +C\n\ +1;jkl="mno"\n\ +D\n\ +0\n\ +\n\ +Baz: ghi\n\ +\n +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=16 len=17 span[header_field]="Transfer-Encoding" +off=34 header_field complete +off=35 len=7 span[header_value]="chunked" +off=43 header_value complete +off=43 len=7 span[header_field]="Trailer" +off=51 header_field complete +off=52 len=3 span[header_value]="Baz" +off=57 header_value complete +off=57 len=3 span[header_field]="Foo" +off=61 header_field complete +off=62 len=3 span[header_value]="abc" +off=66 header_value complete +off=66 len=3 span[header_field]="Bar" +off=70 header_field complete +off=71 len=3 span[header_value]="def" +off=75 header_value complete +off=76 headers complete method=3 v=1/1 flags=208 content_length=0 +off=78 chunk header len=1 +off=78 len=1 span[body]="A" +off=80 chunk complete +off=82 len=3 span[chunk_extension_name]="abc" +off=85 chunk_extension_name complete +off=86 chunk header len=1 +off=86 len=1 span[body]="B" +off=88 chunk complete +off=90 len=3 span[chunk_extension_name]="def" +off=94 chunk_extension_name complete +off=94 len=3 span[chunk_extension_value]="ghi" +off=97 chunk_extension_value complete +off=98 chunk header len=1 +off=98 len=1 span[body]="C" +off=100 chunk complete +off=102 len=3 span[chunk_extension_name]="jkl" +off=106 chunk_extension_name complete +off=106 len=5 span[chunk_extension_value]=""mno"" +off=111 chunk_extension_value complete +off=112 chunk header len=1 +off=112 len=1 span[body]="D" +off=114 chunk complete +off=117 chunk header len=0 +off=117 len=3 span[header_field]="Baz" +off=121 header_field complete +off=122 len=3 span[header_value]="ghi" +off=126 header_value complete +off=127 chunk complete +off=127 message complete +``` \ No newline at end of file diff --git a/llhttp/test/request/lenient-headers.md b/llhttp/test/request/lenient-headers.md new file mode 100644 index 0000000..05e105f --- /dev/null +++ b/llhttp/test/request/lenient-headers.md @@ -0,0 +1,145 @@ +Lenient header value parsing +============================ + +Parsing with header value token checks off. + +## Header value (lenient) + + +```http +GET /url HTTP/1.1 +Header1: \f + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=7 span[header_field]="Header1" +off=27 header_field complete +off=28 len=1 span[header_value]="\f" +off=31 header_value complete +off=33 headers complete method=1 v=1/1 flags=0 content_length=0 +off=33 message complete +``` + +## Second request header value (lenient) + + +```http +GET /url HTTP/1.1 +Header1: Okay + + +GET /url HTTP/1.1 +Header1: \f + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=7 span[header_field]="Header1" +off=27 header_field complete +off=28 len=4 span[header_value]="Okay" +off=34 header_value complete +off=36 headers complete method=1 v=1/1 flags=0 content_length=0 +off=36 message complete +off=38 reset +off=38 message begin +off=38 len=3 span[method]="GET" +off=41 method complete +off=42 len=4 span[url]="/url" +off=47 url complete +off=52 len=3 span[version]="1.1" +off=55 version complete +off=57 len=7 span[header_field]="Header1" +off=65 header_field complete +off=66 len=1 span[header_value]="\f" +off=69 header_value complete +off=71 headers complete method=1 v=1/1 flags=0 content_length=0 +off=71 message complete +``` + +## Header value + + +```http +GET /url HTTP/1.1 +Header1: \f + + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=7 span[header_field]="Header1" +off=27 header_field complete +off=28 len=0 span[header_value]="" +off=28 error code=10 reason="Invalid header value char" +``` + +### Empty headers separated by CR (lenient) + + +```http +POST / HTTP/1.1 +Connection: Close +Host: localhost:5000 +x:\rTransfer-Encoding: chunked + +1 +A +0 + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=10 span[header_field]="Connection" +off=28 header_field complete +off=29 len=5 span[header_value]="Close" +off=36 header_value complete +off=36 len=4 span[header_field]="Host" +off=41 header_field complete +off=42 len=14 span[header_value]="localhost:5000" +off=58 header_value complete +off=58 len=1 span[header_field]="x" +off=60 header_field complete +off=61 len=0 span[header_value]="" +off=61 header_value complete +off=61 len=17 span[header_field]="Transfer-Encoding" +off=79 header_field complete +off=80 len=7 span[header_value]="chunked" +off=89 header_value complete +off=91 headers complete method=3 v=1/1 flags=20a content_length=0 +off=94 chunk header len=1 +off=94 len=1 span[body]="A" +off=97 chunk complete +off=100 chunk header len=0 +``` \ No newline at end of file diff --git a/llhttp/test/request/lenient-version.md b/llhttp/test/request/lenient-version.md new file mode 100644 index 0000000..4185556 --- /dev/null +++ b/llhttp/test/request/lenient-version.md @@ -0,0 +1,23 @@ +Lenient HTTP version parsing +============================ + +### Invalid HTTP version (lenient) + + +```http +GET / HTTP/5.6 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="5.6" +off=14 version complete +off=18 headers complete method=1 v=5/6 flags=0 content_length=0 +off=18 message complete +``` \ No newline at end of file diff --git a/llhttp/test/request/method.md b/llhttp/test/request/method.md new file mode 100644 index 0000000..dce262e --- /dev/null +++ b/llhttp/test/request/method.md @@ -0,0 +1,450 @@ +Methods +======= + +### REPORT request + + +```http +REPORT /test HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=6 span[method]="REPORT" +off=6 method complete +off=7 len=5 span[url]="/test" +off=13 url complete +off=18 len=3 span[version]="1.1" +off=21 version complete +off=25 headers complete method=20 v=1/1 flags=0 content_length=0 +off=25 message complete +``` + +### CONNECT request + + +```http +CONNECT 0-home0.netscape.com:443 HTTP/1.0 +User-agent: Mozilla/1.1N +Proxy-authorization: basic aGVsbG86d29ybGQ= + +some data +and yet even more data +``` + +```log +off=0 message begin +off=0 len=7 span[method]="CONNECT" +off=7 method complete +off=8 len=24 span[url]="0-home0.netscape.com:443" +off=33 url complete +off=38 len=3 span[version]="1.0" +off=41 version complete +off=43 len=10 span[header_field]="User-agent" +off=54 header_field complete +off=55 len=12 span[header_value]="Mozilla/1.1N" +off=69 header_value complete +off=69 len=19 span[header_field]="Proxy-authorization" +off=89 header_field complete +off=90 len=22 span[header_value]="basic aGVsbG86d29ybGQ=" +off=114 header_value complete +off=116 headers complete method=5 v=1/0 flags=0 content_length=0 +off=116 message complete +off=116 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +### CONNECT request with CAPS + + +```http +CONNECT HOME0.NETSCAPE.COM:443 HTTP/1.0 +User-agent: Mozilla/1.1N +Proxy-authorization: basic aGVsbG86d29ybGQ= + + +``` + +```log +off=0 message begin +off=0 len=7 span[method]="CONNECT" +off=7 method complete +off=8 len=22 span[url]="HOME0.NETSCAPE.COM:443" +off=31 url complete +off=36 len=3 span[version]="1.0" +off=39 version complete +off=41 len=10 span[header_field]="User-agent" +off=52 header_field complete +off=53 len=12 span[header_value]="Mozilla/1.1N" +off=67 header_value complete +off=67 len=19 span[header_field]="Proxy-authorization" +off=87 header_field complete +off=88 len=22 span[header_value]="basic aGVsbG86d29ybGQ=" +off=112 header_value complete +off=114 headers complete method=5 v=1/0 flags=0 content_length=0 +off=114 message complete +off=114 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +### CONNECT with body + + +```http +CONNECT foo.bar.com:443 HTTP/1.0 +User-agent: Mozilla/1.1N +Proxy-authorization: basic aGVsbG86d29ybGQ= +Content-Length: 10 + +blarfcicle" +``` + +```log +off=0 message begin +off=0 len=7 span[method]="CONNECT" +off=7 method complete +off=8 len=15 span[url]="foo.bar.com:443" +off=24 url complete +off=29 len=3 span[version]="1.0" +off=32 version complete +off=34 len=10 span[header_field]="User-agent" +off=45 header_field complete +off=46 len=12 span[header_value]="Mozilla/1.1N" +off=60 header_value complete +off=60 len=19 span[header_field]="Proxy-authorization" +off=80 header_field complete +off=81 len=22 span[header_value]="basic aGVsbG86d29ybGQ=" +off=105 header_value complete +off=105 len=14 span[header_field]="Content-Length" +off=120 header_field complete +off=121 len=2 span[header_value]="10" +off=125 header_value complete +off=127 headers complete method=5 v=1/0 flags=20 content_length=10 +off=127 message complete +off=127 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +### M-SEARCH request + + +```http +M-SEARCH * HTTP/1.1 +HOST: 239.255.255.250:1900 +MAN: "ssdp:discover" +ST: "ssdp:all" + + +``` + +```log +off=0 message begin +off=0 len=8 span[method]="M-SEARCH" +off=8 method complete +off=9 len=1 span[url]="*" +off=11 url complete +off=16 len=3 span[version]="1.1" +off=19 version complete +off=21 len=4 span[header_field]="HOST" +off=26 header_field complete +off=27 len=20 span[header_value]="239.255.255.250:1900" +off=49 header_value complete +off=49 len=3 span[header_field]="MAN" +off=53 header_field complete +off=54 len=15 span[header_value]=""ssdp:discover"" +off=71 header_value complete +off=71 len=2 span[header_field]="ST" +off=74 header_field complete +off=75 len=10 span[header_value]=""ssdp:all"" +off=87 header_value complete +off=89 headers complete method=24 v=1/1 flags=0 content_length=0 +off=89 message complete +``` + +### PATCH request + + +```http +PATCH /file.txt HTTP/1.1 +Host: www.example.com +Content-Type: application/example +If-Match: "e0023aa4e" +Content-Length: 10 + +cccccccccc +``` + +```log +off=0 message begin +off=0 len=5 span[method]="PATCH" +off=5 method complete +off=6 len=9 span[url]="/file.txt" +off=16 url complete +off=21 len=3 span[version]="1.1" +off=24 version complete +off=26 len=4 span[header_field]="Host" +off=31 header_field complete +off=32 len=15 span[header_value]="www.example.com" +off=49 header_value complete +off=49 len=12 span[header_field]="Content-Type" +off=62 header_field complete +off=63 len=19 span[header_value]="application/example" +off=84 header_value complete +off=84 len=8 span[header_field]="If-Match" +off=93 header_field complete +off=94 len=11 span[header_value]=""e0023aa4e"" +off=107 header_value complete +off=107 len=14 span[header_field]="Content-Length" +off=122 header_field complete +off=123 len=2 span[header_value]="10" +off=127 header_value complete +off=129 headers complete method=28 v=1/1 flags=20 content_length=10 +off=129 len=10 span[body]="cccccccccc" +off=139 message complete +``` + +### PURGE request + + +```http +PURGE /file.txt HTTP/1.1 +Host: www.example.com + + +``` + +```log +off=0 message begin +off=0 len=5 span[method]="PURGE" +off=5 method complete +off=6 len=9 span[url]="/file.txt" +off=16 url complete +off=21 len=3 span[version]="1.1" +off=24 version complete +off=26 len=4 span[header_field]="Host" +off=31 header_field complete +off=32 len=15 span[header_value]="www.example.com" +off=49 header_value complete +off=51 headers complete method=29 v=1/1 flags=0 content_length=0 +off=51 message complete +``` + +### SEARCH request + + +```http +SEARCH / HTTP/1.1 +Host: www.example.com + + +``` + +```log +off=0 message begin +off=0 len=6 span[method]="SEARCH" +off=6 method complete +off=7 len=1 span[url]="/" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=4 span[header_field]="Host" +off=24 header_field complete +off=25 len=15 span[header_value]="www.example.com" +off=42 header_value complete +off=44 headers complete method=14 v=1/1 flags=0 content_length=0 +off=44 message complete +``` + +### LINK request + + +```http +LINK /images/my_dog.jpg HTTP/1.1 +Host: example.com +Link: ; rel="tag" +Link: ; rel="tag" + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="LINK" +off=4 method complete +off=5 len=18 span[url]="/images/my_dog.jpg" +off=24 url complete +off=29 len=3 span[version]="1.1" +off=32 version complete +off=34 len=4 span[header_field]="Host" +off=39 header_field complete +off=40 len=11 span[header_value]="example.com" +off=53 header_value complete +off=53 len=4 span[header_field]="Link" +off=58 header_field complete +off=59 len=44 span[header_value]="; rel="tag"" +off=105 header_value complete +off=105 len=4 span[header_field]="Link" +off=110 header_field complete +off=111 len=46 span[header_value]="; rel="tag"" +off=159 header_value complete +off=161 headers complete method=31 v=1/1 flags=0 content_length=0 +off=161 message complete +``` + +### LINK request + + +```http +UNLINK /images/my_dog.jpg HTTP/1.1 +Host: example.com +Link: ; rel="tag" + + +``` + +```log +off=0 message begin +off=0 len=6 span[method]="UNLINK" +off=6 method complete +off=7 len=18 span[url]="/images/my_dog.jpg" +off=26 url complete +off=31 len=3 span[version]="1.1" +off=34 version complete +off=36 len=4 span[header_field]="Host" +off=41 header_field complete +off=42 len=11 span[header_value]="example.com" +off=55 header_value complete +off=55 len=4 span[header_field]="Link" +off=60 header_field complete +off=61 len=46 span[header_value]="; rel="tag"" +off=109 header_value complete +off=111 headers complete method=32 v=1/1 flags=0 content_length=0 +off=111 message complete +``` + +### SOURCE request + + +```http +SOURCE /music/sweet/music HTTP/1.1 +Host: example.com + + +``` + +```log +off=0 message begin +off=0 len=6 span[method]="SOURCE" +off=6 method complete +off=7 len=18 span[url]="/music/sweet/music" +off=26 url complete +off=31 len=3 span[version]="1.1" +off=34 version complete +off=36 len=4 span[header_field]="Host" +off=41 header_field complete +off=42 len=11 span[header_value]="example.com" +off=55 header_value complete +off=57 headers complete method=33 v=1/1 flags=0 content_length=0 +off=57 message complete +``` + +### SOURCE request with ICE + + +```http +SOURCE /music/sweet/music ICE/1.0 +Host: example.com + + +``` + +```log +off=0 message begin +off=0 len=6 span[method]="SOURCE" +off=6 method complete +off=7 len=18 span[url]="/music/sweet/music" +off=26 url complete +off=30 len=3 span[version]="1.0" +off=33 version complete +off=35 len=4 span[header_field]="Host" +off=40 header_field complete +off=41 len=11 span[header_value]="example.com" +off=54 header_value complete +off=56 headers complete method=33 v=1/0 flags=0 content_length=0 +off=56 message complete +``` + +### OPTIONS request with RTSP + +NOTE: `OPTIONS` is a valid HTTP metho too. + + +```http +OPTIONS /music/sweet/music RTSP/1.0 +Host: example.com + + +``` + +```log +off=0 message begin +off=0 len=7 span[method]="OPTIONS" +off=7 method complete +off=8 len=18 span[url]="/music/sweet/music" +off=27 url complete +off=32 len=3 span[version]="1.0" +off=35 version complete +off=37 len=4 span[header_field]="Host" +off=42 header_field complete +off=43 len=11 span[header_value]="example.com" +off=56 header_value complete +off=58 headers complete method=6 v=1/0 flags=0 content_length=0 +off=58 message complete +``` + +### ANNOUNCE request with RTSP + + +```http +ANNOUNCE /music/sweet/music RTSP/1.0 +Host: example.com + + +``` + +```log +off=0 message begin +off=0 len=8 span[method]="ANNOUNCE" +off=8 method complete +off=9 len=18 span[url]="/music/sweet/music" +off=28 url complete +off=33 len=3 span[version]="1.0" +off=36 version complete +off=38 len=4 span[header_field]="Host" +off=43 header_field complete +off=44 len=11 span[header_value]="example.com" +off=57 header_value complete +off=59 headers complete method=36 v=1/0 flags=0 content_length=0 +off=59 message complete +``` + +### PRI request HTTP2 + + +```http +PRI * HTTP/1.1 + +SM + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PRI" +off=3 method complete +off=4 len=1 span[url]="*" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=24 error code=23 reason="Pause on PRI/Upgrade" +``` diff --git a/llhttp/test/request/pausing.md b/llhttp/test/request/pausing.md new file mode 100644 index 0000000..8e501e3 --- /dev/null +++ b/llhttp/test/request/pausing.md @@ -0,0 +1,381 @@ +Pausing +======= + +### on_message_begin + + +```http +POST / HTTP/1.1 +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=0 pause +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete method=3 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_message_complete + + +```http +POST / HTTP/1.1 +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete method=3 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +off=41 pause +``` + +### on_method_complete + + +```http +POST / HTTP/1.1 +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=4 pause +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete method=3 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_url_complete + + +```http +POST / HTTP/1.1 +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=7 pause +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete method=3 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_version_complete + + +```http +POST / HTTP/1.1 +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=15 pause +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete method=3 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_header_field_complete + + +```http +POST / HTTP/1.1 +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=32 pause +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete method=3 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_header_value_complete + + +```http +POST / HTTP/1.1 +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=36 pause +off=38 headers complete method=3 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_headers_complete + + +```http +POST / HTTP/1.1 +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete method=3 v=1/1 flags=20 content_length=3 +off=38 pause +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_chunk_header + + +```http +PUT / HTTP/1.1 +Transfer-Encoding: chunked + +a +0123456789 +0 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=17 span[header_field]="Transfer-Encoding" +off=34 header_field complete +off=35 len=7 span[header_value]="chunked" +off=44 header_value complete +off=46 headers complete method=4 v=1/1 flags=208 content_length=0 +off=49 chunk header len=10 +off=49 pause +off=49 len=10 span[body]="0123456789" +off=61 chunk complete +off=64 chunk header len=0 +off=64 pause +off=66 chunk complete +off=66 message complete +``` + +### on_chunk_extension_name + + +```http +PUT / HTTP/1.1 +Transfer-Encoding: chunked + +a;foo=bar +0123456789 +0 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=17 span[header_field]="Transfer-Encoding" +off=34 header_field complete +off=35 len=7 span[header_value]="chunked" +off=44 header_value complete +off=46 headers complete method=4 v=1/1 flags=208 content_length=0 +off=48 len=3 span[chunk_extension_name]="foo" +off=52 chunk_extension_name complete +off=52 pause +off=52 len=3 span[chunk_extension_value]="bar" +off=56 chunk_extension_value complete +off=57 chunk header len=10 +off=57 len=10 span[body]="0123456789" +off=69 chunk complete +off=72 chunk header len=0 +off=74 chunk complete +off=74 message complete +``` + +### on_chunk_extension_value + + +```http +PUT / HTTP/1.1 +Transfer-Encoding: chunked + +a;foo=bar +0123456789 +0 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=17 span[header_field]="Transfer-Encoding" +off=34 header_field complete +off=35 len=7 span[header_value]="chunked" +off=44 header_value complete +off=46 headers complete method=4 v=1/1 flags=208 content_length=0 +off=48 len=3 span[chunk_extension_name]="foo" +off=52 chunk_extension_name complete +off=52 len=3 span[chunk_extension_value]="bar" +off=56 chunk_extension_value complete +off=56 pause +off=57 chunk header len=10 +off=57 len=10 span[body]="0123456789" +off=69 chunk complete +off=72 chunk header len=0 +off=74 chunk complete +off=74 message complete +``` + + +### on_chunk_complete + + +```http +PUT / HTTP/1.1 +Transfer-Encoding: chunked + +a +0123456789 +0 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=17 span[header_field]="Transfer-Encoding" +off=34 header_field complete +off=35 len=7 span[header_value]="chunked" +off=44 header_value complete +off=46 headers complete method=4 v=1/1 flags=208 content_length=0 +off=49 chunk header len=10 +off=49 len=10 span[body]="0123456789" +off=61 chunk complete +off=61 pause +off=64 chunk header len=0 +off=66 chunk complete +off=66 pause +off=66 message complete +``` diff --git a/llhttp/test/request/pipelining.md b/llhttp/test/request/pipelining.md new file mode 100644 index 0000000..bdfe6ab --- /dev/null +++ b/llhttp/test/request/pipelining.md @@ -0,0 +1,66 @@ +Pipelining +========== + +## Should parse multiple events + + +```http +POST /aaa HTTP/1.1 +Content-Length: 3 + +AAA +PUT /bbb HTTP/1.1 +Content-Length: 4 + +BBBB +PATCH /ccc HTTP/1.1 +Content-Length: 5 + +CCCC +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=4 span[url]="/aaa" +off=10 url complete +off=15 len=3 span[version]="1.1" +off=18 version complete +off=20 len=14 span[header_field]="Content-Length" +off=35 header_field complete +off=36 len=1 span[header_value]="3" +off=39 header_value complete +off=41 headers complete method=3 v=1/1 flags=20 content_length=3 +off=41 len=3 span[body]="AAA" +off=44 message complete +off=46 reset +off=46 message begin +off=46 len=3 span[method]="PUT" +off=49 method complete +off=50 len=4 span[url]="/bbb" +off=55 url complete +off=60 len=3 span[version]="1.1" +off=63 version complete +off=65 len=14 span[header_field]="Content-Length" +off=80 header_field complete +off=81 len=1 span[header_value]="4" +off=84 header_value complete +off=86 headers complete method=4 v=1/1 flags=20 content_length=4 +off=86 len=4 span[body]="BBBB" +off=90 message complete +off=92 reset +off=92 message begin +off=92 len=5 span[method]="PATCH" +off=97 method complete +off=98 len=4 span[url]="/ccc" +off=103 url complete +off=108 len=3 span[version]="1.1" +off=111 version complete +off=113 len=14 span[header_field]="Content-Length" +off=128 header_field complete +off=129 len=1 span[header_value]="5" +off=132 header_value complete +off=134 headers complete method=28 v=1/1 flags=20 content_length=5 +off=134 len=4 span[body]="CCCC" +``` \ No newline at end of file diff --git a/llhttp/test/request/sample.md b/llhttp/test/request/sample.md new file mode 100644 index 0000000..f0a5d44 --- /dev/null +++ b/llhttp/test/request/sample.md @@ -0,0 +1,629 @@ +Sample requests +=============== + +Lots of sample requests, most ported from [http_parser][0] test suite. + +## Simple request + + +```http +OPTIONS /url HTTP/1.1 +Header1: Value1 +Header2:\t Value2 + + +``` + +```log +off=0 message begin +off=0 len=7 span[method]="OPTIONS" +off=7 method complete +off=8 len=4 span[url]="/url" +off=13 url complete +off=18 len=3 span[version]="1.1" +off=21 version complete +off=23 len=7 span[header_field]="Header1" +off=31 header_field complete +off=32 len=6 span[header_value]="Value1" +off=40 header_value complete +off=40 len=7 span[header_field]="Header2" +off=48 header_field complete +off=50 len=6 span[header_value]="Value2" +off=58 header_value complete +off=60 headers complete method=6 v=1/1 flags=0 content_length=0 +off=60 message complete +``` + +## Request with method starting with `H` + +There's a optimization in `start_req_or_res` that passes execution to +`start_req` when the first character is not `H` (because response must start +with `HTTP/`). However, there're still methods like `HEAD` that should get +to `start_req`. Verify that it still works after optimization. + + +```http +HEAD /url HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="HEAD" +off=4 method complete +off=5 len=4 span[url]="/url" +off=10 url complete +off=15 len=3 span[version]="1.1" +off=18 version complete +off=22 headers complete method=2 v=1/1 flags=0 content_length=0 +off=22 message complete +``` + +## curl GET + + +```http +GET /test HTTP/1.1 +User-Agent: curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1 +Host: 0.0.0.0=5000 +Accept: */* + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=5 span[url]="/test" +off=10 url complete +off=15 len=3 span[version]="1.1" +off=18 version complete +off=20 len=10 span[header_field]="User-Agent" +off=31 header_field complete +off=32 len=85 span[header_value]="curl/7.18.0 (i486-pc-linux-gnu) libcurl/7.18.0 OpenSSL/0.9.8g zlib/1.2.3.3 libidn/1.1" +off=119 header_value complete +off=119 len=4 span[header_field]="Host" +off=124 header_field complete +off=125 len=12 span[header_value]="0.0.0.0=5000" +off=139 header_value complete +off=139 len=6 span[header_field]="Accept" +off=146 header_field complete +off=147 len=3 span[header_value]="*/*" +off=152 header_value complete +off=154 headers complete method=1 v=1/1 flags=0 content_length=0 +off=154 message complete +``` + +## Firefox GET + + +```http +GET /favicon.ico HTTP/1.1 +Host: 0.0.0.0=5000 +User-Agent: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) Gecko/2008061015 Firefox/3.0 +Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8 +Accept-Language: en-us,en;q=0.5 +Accept-Encoding: gzip,deflate +Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7 +Keep-Alive: 300 +Connection: keep-alive + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=12 span[url]="/favicon.ico" +off=17 url complete +off=22 len=3 span[version]="1.1" +off=25 version complete +off=27 len=4 span[header_field]="Host" +off=32 header_field complete +off=33 len=12 span[header_value]="0.0.0.0=5000" +off=47 header_value complete +off=47 len=10 span[header_field]="User-Agent" +off=58 header_field complete +off=59 len=76 span[header_value]="Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9) Gecko/2008061015 Firefox/3.0" +off=137 header_value complete +off=137 len=6 span[header_field]="Accept" +off=144 header_field complete +off=145 len=63 span[header_value]="text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8" +off=210 header_value complete +off=210 len=15 span[header_field]="Accept-Language" +off=226 header_field complete +off=227 len=14 span[header_value]="en-us,en;q=0.5" +off=243 header_value complete +off=243 len=15 span[header_field]="Accept-Encoding" +off=259 header_field complete +off=260 len=12 span[header_value]="gzip,deflate" +off=274 header_value complete +off=274 len=14 span[header_field]="Accept-Charset" +off=289 header_field complete +off=290 len=30 span[header_value]="ISO-8859-1,utf-8;q=0.7,*;q=0.7" +off=322 header_value complete +off=322 len=10 span[header_field]="Keep-Alive" +off=333 header_field complete +off=334 len=3 span[header_value]="300" +off=339 header_value complete +off=339 len=10 span[header_field]="Connection" +off=350 header_field complete +off=351 len=10 span[header_value]="keep-alive" +off=363 header_value complete +off=365 headers complete method=1 v=1/1 flags=1 content_length=0 +off=365 message complete +``` + +## DUMBPACK + + +```http +GET /dumbpack HTTP/1.1 +aaaaaaaaaaaaa:++++++++++ + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=9 span[url]="/dumbpack" +off=14 url complete +off=19 len=3 span[version]="1.1" +off=22 version complete +off=24 len=13 span[header_field]="aaaaaaaaaaaaa" +off=38 header_field complete +off=38 len=10 span[header_value]="++++++++++" +off=50 header_value complete +off=52 headers complete method=1 v=1/1 flags=0 content_length=0 +off=52 message complete +``` + +## No headers and no body + + +```http +GET /get_no_headers_no_body/world HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=29 span[url]="/get_no_headers_no_body/world" +off=34 url complete +off=39 len=3 span[version]="1.1" +off=42 version complete +off=46 headers complete method=1 v=1/1 flags=0 content_length=0 +off=46 message complete +``` + +## One header and no body + + +```http +GET /get_one_header_no_body HTTP/1.1 +Accept: */* + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=23 span[url]="/get_one_header_no_body" +off=28 url complete +off=33 len=3 span[version]="1.1" +off=36 version complete +off=38 len=6 span[header_field]="Accept" +off=45 header_field complete +off=46 len=3 span[header_value]="*/*" +off=51 header_value complete +off=53 headers complete method=1 v=1/1 flags=0 content_length=0 +off=53 message complete +``` + +## Apache bench GET + +The server receiving this request SHOULD NOT wait for EOF to know that +`Content-Length == 0`. + + +```http +GET /test HTTP/1.0 +Host: 0.0.0.0:5000 +User-Agent: ApacheBench/2.3 +Accept: */* + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=5 span[url]="/test" +off=10 url complete +off=15 len=3 span[version]="1.0" +off=18 version complete +off=20 len=4 span[header_field]="Host" +off=25 header_field complete +off=26 len=12 span[header_value]="0.0.0.0:5000" +off=40 header_value complete +off=40 len=10 span[header_field]="User-Agent" +off=51 header_field complete +off=52 len=15 span[header_value]="ApacheBench/2.3" +off=69 header_value complete +off=69 len=6 span[header_field]="Accept" +off=76 header_field complete +off=77 len=3 span[header_value]="*/*" +off=82 header_value complete +off=84 headers complete method=1 v=1/0 flags=0 content_length=0 +off=84 message complete +``` + +## Prefix newline + +Some clients, especially after a POST in a keep-alive connection, +will send an extra CRLF before the next request. + + +```http +\r\nGET /test HTTP/1.1 + + +``` + +```log +off=2 message begin +off=2 len=3 span[method]="GET" +off=5 method complete +off=6 len=5 span[url]="/test" +off=12 url complete +off=17 len=3 span[version]="1.1" +off=20 version complete +off=24 headers complete method=1 v=1/1 flags=0 content_length=0 +off=24 message complete +``` + +## No HTTP version + + +```http +GET / + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=7 url complete +off=9 headers complete method=1 v=0/9 flags=0 content_length=0 +off=9 message complete +``` + +## Line folding in header value with CRLF + + +```http +GET / HTTP/1.1 +Line1: abc +\tdef + ghi +\t\tjkl + mno +\t \tqrs +Line2: \t line2\t +Line3: + line3 +Line4: + +Connection: + close + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=5 span[header_field]="Line1" +off=22 header_field complete +off=25 len=3 span[header_value]="abc" +off=30 len=4 span[header_value]="\tdef" +off=36 len=4 span[header_value]=" ghi" +off=42 len=5 span[header_value]="\t\tjkl" +off=49 len=6 span[header_value]=" mno " +off=57 len=6 span[header_value]="\t \tqrs" +off=65 header_value complete +off=65 len=5 span[header_field]="Line2" +off=71 header_field complete +off=74 len=6 span[header_value]="line2\t" +off=82 header_value complete +off=82 len=5 span[header_field]="Line3" +off=88 header_field complete +off=91 len=5 span[header_value]="line3" +off=98 header_value complete +off=98 len=5 span[header_field]="Line4" +off=104 header_field complete +off=110 len=0 span[header_value]="" +off=110 header_value complete +off=110 len=10 span[header_field]="Connection" +off=121 header_field complete +off=124 len=5 span[header_value]="close" +off=131 header_value complete +off=133 headers complete method=1 v=1/1 flags=2 content_length=0 +off=133 message complete +``` + +## Line folding in header value with LF + + + +```http +GET / HTTP/1.1 +Line1: abc\n\ +\tdef\n\ + ghi\n\ +\t\tjkl\n\ + mno \n\ +\t \tqrs\n\ +Line2: \t line2\t\n\ +Line3:\n\ + line3\n\ +Line4: \n\ + \n\ +Connection:\n\ + close\n\ +\n +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=5 span[header_field]="Line1" +off=22 header_field complete +off=25 len=3 span[header_value]="abc" +off=28 error code=25 reason="Missing expected CR after header value" +``` + +## No LF after CR + + + +```http +GET / HTTP/1.1\rLine: 1 + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=15 error code=2 reason="Expected CRLF after version" +``` + +## No LF after CR (lenient) + + + +```http +GET / HTTP/1.1\rLine: 1 + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=15 len=4 span[header_field]="Line" +off=20 header_field complete +off=21 len=1 span[header_value]="1" +``` + +## Request starting with CRLF + + +```http +\r\nGET /url HTTP/1.1 +Header1: Value1 + + +``` + +```log +off=2 message begin +off=2 len=3 span[method]="GET" +off=5 method complete +off=6 len=4 span[url]="/url" +off=11 url complete +off=16 len=3 span[version]="1.1" +off=19 version complete +off=21 len=7 span[header_field]="Header1" +off=29 header_field complete +off=30 len=6 span[header_value]="Value1" +off=38 header_value complete +off=40 headers complete method=1 v=1/1 flags=0 content_length=0 +off=40 message complete +``` + +## Extended Characters + +See nodejs/test/parallel/test-http-headers-obstext.js + + +```http +GET / HTTP/1.1 +Test: Düsseldorf + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=4 span[header_field]="Test" +off=21 header_field complete +off=22 len=11 span[header_value]="Düsseldorf" +off=35 header_value complete +off=37 headers complete method=1 v=1/1 flags=0 content_length=0 +off=37 message complete +``` + +## 255 ASCII in header value + +Note: `Buffer.from([ 0xff ]).toString('latin1') === 'ÿ'`. + + +```http +OPTIONS /url HTTP/1.1 +Header1: Value1 +Header2: \xffValue2 + + +``` + +```log +off=0 message begin +off=0 len=7 span[method]="OPTIONS" +off=7 method complete +off=8 len=4 span[url]="/url" +off=13 url complete +off=18 len=3 span[version]="1.1" +off=21 version complete +off=23 len=7 span[header_field]="Header1" +off=31 header_field complete +off=32 len=6 span[header_value]="Value1" +off=40 header_value complete +off=40 len=7 span[header_field]="Header2" +off=48 header_field complete +off=49 len=8 span[header_value]="ÿValue2" +off=59 header_value complete +off=61 headers complete method=6 v=1/1 flags=0 content_length=0 +off=61 message complete +``` + +## X-SSL-Nonsense + +See nodejs/test/parallel/test-http-headers-obstext.js + + +```http +GET / HTTP/1.1 +X-SSL-Nonsense: -----BEGIN CERTIFICATE----- +\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx +\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT +\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu +\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV +\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV +\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB +\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF +\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR +\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL +\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP +\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR +\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG +\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgHTTPAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs +\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD +\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj +\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj +\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG +\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE +\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO +\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1 +\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0 +\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD +\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv +\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3 +\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8 +\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk +\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK +\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu +\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3 +\tRA== +\t-----END CERTIFICATE----- + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=14 span[header_field]="X-SSL-Nonsense" +off=31 header_field complete +off=34 len=27 span[header_value]="-----BEGIN CERTIFICATE-----" +off=63 len=65 span[header_value]="\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx" +off=130 len=65 span[header_value]="\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT" +off=197 len=65 span[header_value]="\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu" +off=264 len=65 span[header_value]="\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV" +off=331 len=65 span[header_value]="\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV" +off=398 len=65 span[header_value]="\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB" +off=465 len=65 span[header_value]="\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF" +off=532 len=65 span[header_value]="\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR" +off=599 len=65 span[header_value]="\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL" +off=666 len=65 span[header_value]="\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP" +off=733 len=65 span[header_value]="\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR" +off=800 len=65 span[header_value]="\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG" +off=867 len=66 span[header_value]="\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgHTTPAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs" +off=935 len=65 span[header_value]="\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD" +off=1002 len=65 span[header_value]="\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj" +off=1069 len=65 span[header_value]="\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj" +off=1136 len=65 span[header_value]="\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG" +off=1203 len=65 span[header_value]="\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE" +off=1270 len=65 span[header_value]="\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO" +off=1337 len=65 span[header_value]="\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1" +off=1404 len=75 span[header_value]="\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0" +off=1481 len=65 span[header_value]="\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD" +off=1548 len=55 span[header_value]="\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv" +off=1605 len=65 span[header_value]="\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3" +off=1672 len=65 span[header_value]="\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8" +off=1739 len=65 span[header_value]="\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk" +off=1806 len=65 span[header_value]="\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK" +off=1873 len=65 span[header_value]="\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu" +off=1940 len=65 span[header_value]="\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3" +off=2007 len=5 span[header_value]="\tRA==" +off=2014 len=26 span[header_value]="\t-----END CERTIFICATE-----" +off=2042 header_value complete +off=2044 headers complete method=1 v=1/1 flags=0 content_length=0 +off=2044 message complete +``` + +[0]: https://github.com/nodejs/http-parser diff --git a/llhttp/test/request/transfer-encoding.md b/llhttp/test/request/transfer-encoding.md new file mode 100644 index 0000000..0f839bc --- /dev/null +++ b/llhttp/test/request/transfer-encoding.md @@ -0,0 +1,1187 @@ +Transfer-Encoding header +======================== + +## `chunked` + +### Parsing and setting flag + + +```http +PUT /url HTTP/1.1 +Transfer-Encoding: chunked + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=17 span[header_field]="Transfer-Encoding" +off=37 header_field complete +off=38 len=7 span[header_value]="chunked" +off=47 header_value complete +off=49 headers complete method=4 v=1/1 flags=208 content_length=0 +``` + +### Parse chunks with lowercase size + + +```http +PUT /url HTTP/1.1 +Transfer-Encoding: chunked + +a +0123456789 +0 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=17 span[header_field]="Transfer-Encoding" +off=37 header_field complete +off=38 len=7 span[header_value]="chunked" +off=47 header_value complete +off=49 headers complete method=4 v=1/1 flags=208 content_length=0 +off=52 chunk header len=10 +off=52 len=10 span[body]="0123456789" +off=64 chunk complete +off=67 chunk header len=0 +off=69 chunk complete +off=69 message complete +``` + +### Parse chunks with uppercase size + + +```http +PUT /url HTTP/1.1 +Transfer-Encoding: chunked + +A +0123456789 +0 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=17 span[header_field]="Transfer-Encoding" +off=37 header_field complete +off=38 len=7 span[header_value]="chunked" +off=47 header_value complete +off=49 headers complete method=4 v=1/1 flags=208 content_length=0 +off=52 chunk header len=10 +off=52 len=10 span[body]="0123456789" +off=64 chunk complete +off=67 chunk header len=0 +off=69 chunk complete +off=69 message complete +``` + +### POST with `Transfer-Encoding: chunked` + + +```http +POST /post_chunked_all_your_base HTTP/1.1 +Transfer-Encoding: chunked + +1e +all your base are belong to us +0 + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=27 span[url]="/post_chunked_all_your_base" +off=33 url complete +off=38 len=3 span[version]="1.1" +off=41 version complete +off=43 len=17 span[header_field]="Transfer-Encoding" +off=61 header_field complete +off=62 len=7 span[header_value]="chunked" +off=71 header_value complete +off=73 headers complete method=3 v=1/1 flags=208 content_length=0 +off=77 chunk header len=30 +off=77 len=30 span[body]="all your base are belong to us" +off=109 chunk complete +off=112 chunk header len=0 +off=114 chunk complete +off=114 message complete +``` + +### Two chunks and triple zero prefixed end chunk + + +```http +POST /two_chunks_mult_zero_end HTTP/1.1 +Transfer-Encoding: chunked + +5 +hello +6 + world +000 + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=25 span[url]="/two_chunks_mult_zero_end" +off=31 url complete +off=36 len=3 span[version]="1.1" +off=39 version complete +off=41 len=17 span[header_field]="Transfer-Encoding" +off=59 header_field complete +off=60 len=7 span[header_value]="chunked" +off=69 header_value complete +off=71 headers complete method=3 v=1/1 flags=208 content_length=0 +off=74 chunk header len=5 +off=74 len=5 span[body]="hello" +off=81 chunk complete +off=84 chunk header len=6 +off=84 len=6 span[body]=" world" +off=92 chunk complete +off=97 chunk header len=0 +off=99 chunk complete +off=99 message complete +``` + +### Trailing headers + + +```http +POST /chunked_w_trailing_headers HTTP/1.1 +Transfer-Encoding: chunked + +5 +hello +6 + world +0 +Vary: * +Content-Type: text/plain + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=27 span[url]="/chunked_w_trailing_headers" +off=33 url complete +off=38 len=3 span[version]="1.1" +off=41 version complete +off=43 len=17 span[header_field]="Transfer-Encoding" +off=61 header_field complete +off=62 len=7 span[header_value]="chunked" +off=71 header_value complete +off=73 headers complete method=3 v=1/1 flags=208 content_length=0 +off=76 chunk header len=5 +off=76 len=5 span[body]="hello" +off=83 chunk complete +off=86 chunk header len=6 +off=86 len=6 span[body]=" world" +off=94 chunk complete +off=97 chunk header len=0 +off=97 len=4 span[header_field]="Vary" +off=102 header_field complete +off=103 len=1 span[header_value]="*" +off=106 header_value complete +off=106 len=12 span[header_field]="Content-Type" +off=119 header_field complete +off=120 len=10 span[header_value]="text/plain" +off=132 header_value complete +off=134 chunk complete +off=134 message complete +``` + +### Chunk extensions + + +```http +POST /chunked_w_unicorns_after_length HTTP/1.1 +Transfer-Encoding: chunked + +5;ilovew3;somuchlove=aretheseparametersfor;another=withvalue +hello +6;blahblah;blah + world +0 + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=32 span[url]="/chunked_w_unicorns_after_length" +off=38 url complete +off=43 len=3 span[version]="1.1" +off=46 version complete +off=48 len=17 span[header_field]="Transfer-Encoding" +off=66 header_field complete +off=67 len=7 span[header_value]="chunked" +off=76 header_value complete +off=78 headers complete method=3 v=1/1 flags=208 content_length=0 +off=80 len=7 span[chunk_extension_name]="ilovew3" +off=88 chunk_extension_name complete +off=88 len=10 span[chunk_extension_name]="somuchlove" +off=99 chunk_extension_name complete +off=99 len=21 span[chunk_extension_value]="aretheseparametersfor" +off=121 chunk_extension_value complete +off=121 len=7 span[chunk_extension_name]="another" +off=129 chunk_extension_name complete +off=129 len=9 span[chunk_extension_value]="withvalue" +off=139 chunk_extension_value complete +off=140 chunk header len=5 +off=140 len=5 span[body]="hello" +off=147 chunk complete +off=149 len=8 span[chunk_extension_name]="blahblah" +off=158 chunk_extension_name complete +off=158 len=4 span[chunk_extension_name]="blah" +off=163 chunk_extension_name complete +off=164 chunk header len=6 +off=164 len=6 span[body]=" world" +off=172 chunk complete +off=175 chunk header len=0 +``` + +### No semicolon before chunk extensions + + +```http +POST /chunked_w_unicorns_after_length HTTP/1.1 +Host: localhost +Transfer-encoding: chunked + +2 erfrferferf +aa +0 rrrr + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=32 span[url]="/chunked_w_unicorns_after_length" +off=38 url complete +off=43 len=3 span[version]="1.1" +off=46 version complete +off=48 len=4 span[header_field]="Host" +off=53 header_field complete +off=54 len=9 span[header_value]="localhost" +off=65 header_value complete +off=65 len=17 span[header_field]="Transfer-encoding" +off=83 header_field complete +off=84 len=7 span[header_value]="chunked" +off=93 header_value complete +off=95 headers complete method=3 v=1/1 flags=208 content_length=0 +off=97 error code=12 reason="Invalid character in chunk size" +``` + +### No extension after semicolon + + +```http +POST /chunked_w_unicorns_after_length HTTP/1.1 +Host: localhost +Transfer-encoding: chunked + +2; +aa +0 + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=32 span[url]="/chunked_w_unicorns_after_length" +off=38 url complete +off=43 len=3 span[version]="1.1" +off=46 version complete +off=48 len=4 span[header_field]="Host" +off=53 header_field complete +off=54 len=9 span[header_value]="localhost" +off=65 header_value complete +off=65 len=17 span[header_field]="Transfer-encoding" +off=83 header_field complete +off=84 len=7 span[header_value]="chunked" +off=93 header_value complete +off=95 headers complete method=3 v=1/1 flags=208 content_length=0 +off=98 error code=2 reason="Invalid character in chunk extensions" +``` + + +### Chunk extensions quoting + + +```http +POST /chunked_w_unicorns_after_length HTTP/1.1 +Transfer-Encoding: chunked + +5;ilovew3="I \"love\"; \\extensions\\";somuchlove="aretheseparametersfor";blah;foo=bar +hello +6;blahblah;blah + world +0 + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=32 span[url]="/chunked_w_unicorns_after_length" +off=38 url complete +off=43 len=3 span[version]="1.1" +off=46 version complete +off=48 len=17 span[header_field]="Transfer-Encoding" +off=66 header_field complete +off=67 len=7 span[header_value]="chunked" +off=76 header_value complete +off=78 headers complete method=3 v=1/1 flags=208 content_length=0 +off=80 len=7 span[chunk_extension_name]="ilovew3" +off=88 chunk_extension_name complete +off=88 len=28 span[chunk_extension_value]=""I \"love\"; \\extensions\\"" +off=116 chunk_extension_value complete +off=117 len=10 span[chunk_extension_name]="somuchlove" +off=128 chunk_extension_name complete +off=128 len=23 span[chunk_extension_value]=""aretheseparametersfor"" +off=151 chunk_extension_value complete +off=152 len=4 span[chunk_extension_name]="blah" +off=157 chunk_extension_name complete +off=157 len=3 span[chunk_extension_name]="foo" +off=161 chunk_extension_name complete +off=161 len=3 span[chunk_extension_value]="bar" +off=165 chunk_extension_value complete +off=166 chunk header len=5 +off=166 len=5 span[body]="hello" +off=173 chunk complete +off=175 len=8 span[chunk_extension_name]="blahblah" +off=184 chunk_extension_name complete +off=184 len=4 span[chunk_extension_name]="blah" +off=189 chunk_extension_name complete +off=190 chunk header len=6 +off=190 len=6 span[body]=" world" +off=198 chunk complete +off=201 chunk header len=0 +``` + + +### Unbalanced chunk extensions quoting + + +```http +POST /chunked_w_unicorns_after_length HTTP/1.1 +Transfer-Encoding: chunked + +5;ilovew3="abc";somuchlove="def; ghi +hello +6;blahblah;blah + world +0 + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=32 span[url]="/chunked_w_unicorns_after_length" +off=38 url complete +off=43 len=3 span[version]="1.1" +off=46 version complete +off=48 len=17 span[header_field]="Transfer-Encoding" +off=66 header_field complete +off=67 len=7 span[header_value]="chunked" +off=76 header_value complete +off=78 headers complete method=3 v=1/1 flags=208 content_length=0 +off=80 len=7 span[chunk_extension_name]="ilovew3" +off=88 chunk_extension_name complete +off=88 len=5 span[chunk_extension_value]=""abc"" +off=93 chunk_extension_value complete +off=94 len=10 span[chunk_extension_name]="somuchlove" +off=105 chunk_extension_name complete +off=105 len=9 span[chunk_extension_value]=""def; ghi" +off=115 error code=2 reason="Invalid character in chunk extensions quoted value" +``` + +## Ignoring `pigeons` + +Requests cannot have invalid `Transfer-Encoding`. It is impossible to determine +their body size. Not erroring would make HTTP smuggling attacks possible. + + +```http +PUT /url HTTP/1.1 +Transfer-Encoding: pigeons + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=17 span[header_field]="Transfer-Encoding" +off=37 header_field complete +off=38 len=7 span[header_value]="pigeons" +off=47 header_value complete +off=49 headers complete method=4 v=1/1 flags=200 content_length=0 +off=49 error code=15 reason="Request has invalid `Transfer-Encoding`" +``` + +## POST with `Transfer-Encoding` and `Content-Length` + + +```http +POST /post_identity_body_world?q=search#hey HTTP/1.1 +Accept: */* +Transfer-Encoding: identity +Content-Length: 5 + +World +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=38 span[url]="/post_identity_body_world?q=search#hey" +off=44 url complete +off=49 len=3 span[version]="1.1" +off=52 version complete +off=54 len=6 span[header_field]="Accept" +off=61 header_field complete +off=62 len=3 span[header_value]="*/*" +off=67 header_value complete +off=67 len=17 span[header_field]="Transfer-Encoding" +off=85 header_field complete +off=86 len=8 span[header_value]="identity" +off=96 header_value complete +off=96 len=14 span[header_field]="Content-Length" +off=111 header_field complete +off=111 error code=11 reason="Content-Length can't be present with Transfer-Encoding" +``` + +## POST with `Transfer-Encoding` and `Content-Length` (lenient) + +TODO(indutny): should we allow it even in lenient mode? (Consider disabling +this). + +NOTE: `Content-Length` is ignored when `Transfer-Encoding` is present. Messages +(in lenient mode) are read until EOF. + + +```http +POST /post_identity_body_world?q=search#hey HTTP/1.1 +Accept: */* +Transfer-Encoding: identity +Content-Length: 1 + +World +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=38 span[url]="/post_identity_body_world?q=search#hey" +off=44 url complete +off=49 len=3 span[version]="1.1" +off=52 version complete +off=54 len=6 span[header_field]="Accept" +off=61 header_field complete +off=62 len=3 span[header_value]="*/*" +off=67 header_value complete +off=67 len=17 span[header_field]="Transfer-Encoding" +off=85 header_field complete +off=86 len=8 span[header_value]="identity" +off=96 header_value complete +off=96 len=14 span[header_field]="Content-Length" +off=111 header_field complete +off=112 len=1 span[header_value]="1" +off=115 header_value complete +off=117 headers complete method=3 v=1/1 flags=220 content_length=1 +off=117 len=5 span[body]="World" +``` + +## POST with empty `Transfer-Encoding` and `Content-Length` (lenient) + + +```http +POST / HTTP/1.1 +Host: foo +Content-Length: 10 +Transfer-Encoding: +Transfer-Encoding: +Transfer-Encoding: + +2 +AA +0 +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=1 span[url]="/" +off=7 url complete +off=12 len=3 span[version]="1.1" +off=15 version complete +off=17 len=4 span[header_field]="Host" +off=22 header_field complete +off=23 len=3 span[header_value]="foo" +off=28 header_value complete +off=28 len=14 span[header_field]="Content-Length" +off=43 header_field complete +off=44 len=2 span[header_value]="10" +off=48 header_value complete +off=48 len=17 span[header_field]="Transfer-Encoding" +off=66 header_field complete +off=66 error code=15 reason="Transfer-Encoding can't be present with Content-Length" +``` + +## POST with `chunked` before other transfer coding names + + +```http +POST /post_identity_body_world?q=search#hey HTTP/1.1 +Accept: */* +Transfer-Encoding: chunked, deflate + +World +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=38 span[url]="/post_identity_body_world?q=search#hey" +off=44 url complete +off=49 len=3 span[version]="1.1" +off=52 version complete +off=54 len=6 span[header_field]="Accept" +off=61 header_field complete +off=62 len=3 span[header_value]="*/*" +off=67 header_value complete +off=67 len=17 span[header_field]="Transfer-Encoding" +off=85 header_field complete +off=86 len=7 span[header_value]="chunked" +off=94 error code=15 reason="Invalid `Transfer-Encoding` header value" +``` + +## POST with `chunked` and duplicate transfer-encoding + + +```http +POST /post_identity_body_world?q=search#hey HTTP/1.1 +Accept: */* +Transfer-Encoding: chunked +Transfer-Encoding: deflate + +World +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=38 span[url]="/post_identity_body_world?q=search#hey" +off=44 url complete +off=49 len=3 span[version]="1.1" +off=52 version complete +off=54 len=6 span[header_field]="Accept" +off=61 header_field complete +off=62 len=3 span[header_value]="*/*" +off=67 header_value complete +off=67 len=17 span[header_field]="Transfer-Encoding" +off=85 header_field complete +off=86 len=7 span[header_value]="chunked" +off=95 header_value complete +off=95 len=17 span[header_field]="Transfer-Encoding" +off=113 header_field complete +off=114 len=0 span[header_value]="" +off=115 error code=15 reason="Invalid `Transfer-Encoding` header value" +``` + +## POST with `chunked` before other transfer-coding (lenient) + + +```http +POST /post_identity_body_world?q=search#hey HTTP/1.1 +Accept: */* +Transfer-Encoding: chunked, deflate + +World +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=38 span[url]="/post_identity_body_world?q=search#hey" +off=44 url complete +off=49 len=3 span[version]="1.1" +off=52 version complete +off=54 len=6 span[header_field]="Accept" +off=61 header_field complete +off=62 len=3 span[header_value]="*/*" +off=67 header_value complete +off=67 len=17 span[header_field]="Transfer-Encoding" +off=85 header_field complete +off=86 len=16 span[header_value]="chunked, deflate" +off=104 header_value complete +off=106 headers complete method=3 v=1/1 flags=200 content_length=0 +off=106 len=5 span[body]="World" +``` + +## POST with `chunked` and duplicate transfer-encoding (lenient) + + +```http +POST /post_identity_body_world?q=search#hey HTTP/1.1 +Accept: */* +Transfer-Encoding: chunked +Transfer-Encoding: deflate + +World +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=38 span[url]="/post_identity_body_world?q=search#hey" +off=44 url complete +off=49 len=3 span[version]="1.1" +off=52 version complete +off=54 len=6 span[header_field]="Accept" +off=61 header_field complete +off=62 len=3 span[header_value]="*/*" +off=67 header_value complete +off=67 len=17 span[header_field]="Transfer-Encoding" +off=85 header_field complete +off=86 len=7 span[header_value]="chunked" +off=95 header_value complete +off=95 len=17 span[header_field]="Transfer-Encoding" +off=113 header_field complete +off=114 len=7 span[header_value]="deflate" +off=123 header_value complete +off=125 headers complete method=3 v=1/1 flags=200 content_length=0 +off=125 len=5 span[body]="World" +``` + +## POST with `chunked` as last transfer-encoding + + +```http +POST /post_identity_body_world?q=search#hey HTTP/1.1 +Accept: */* +Transfer-Encoding: deflate, chunked + +5 +World +0 + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=38 span[url]="/post_identity_body_world?q=search#hey" +off=44 url complete +off=49 len=3 span[version]="1.1" +off=52 version complete +off=54 len=6 span[header_field]="Accept" +off=61 header_field complete +off=62 len=3 span[header_value]="*/*" +off=67 header_value complete +off=67 len=17 span[header_field]="Transfer-Encoding" +off=85 header_field complete +off=86 len=16 span[header_value]="deflate, chunked" +off=104 header_value complete +off=106 headers complete method=3 v=1/1 flags=208 content_length=0 +off=109 chunk header len=5 +off=109 len=5 span[body]="World" +off=116 chunk complete +off=119 chunk header len=0 +off=121 chunk complete +off=121 message complete +``` + +## POST with `chunked` as last transfer-encoding (multiple headers) + + +```http +POST /post_identity_body_world?q=search#hey HTTP/1.1 +Accept: */* +Transfer-Encoding: deflate +Transfer-Encoding: chunked + +5 +World +0 + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=38 span[url]="/post_identity_body_world?q=search#hey" +off=44 url complete +off=49 len=3 span[version]="1.1" +off=52 version complete +off=54 len=6 span[header_field]="Accept" +off=61 header_field complete +off=62 len=3 span[header_value]="*/*" +off=67 header_value complete +off=67 len=17 span[header_field]="Transfer-Encoding" +off=85 header_field complete +off=86 len=7 span[header_value]="deflate" +off=95 header_value complete +off=95 len=17 span[header_field]="Transfer-Encoding" +off=113 header_field complete +off=114 len=7 span[header_value]="chunked" +off=123 header_value complete +off=125 headers complete method=3 v=1/1 flags=208 content_length=0 +off=128 chunk header len=5 +off=128 len=5 span[body]="World" +off=135 chunk complete +off=138 chunk header len=0 +off=140 chunk complete +off=140 message complete +``` + +## POST with `chunkedchunked` as transfer-encoding + + +```http +POST /post_identity_body_world?q=search#hey HTTP/1.1 +Accept: */* +Transfer-Encoding: chunkedchunked + +5 +World +0 + + +``` + +```log +off=0 message begin +off=0 len=4 span[method]="POST" +off=4 method complete +off=5 len=38 span[url]="/post_identity_body_world?q=search#hey" +off=44 url complete +off=49 len=3 span[version]="1.1" +off=52 version complete +off=54 len=6 span[header_field]="Accept" +off=61 header_field complete +off=62 len=3 span[header_value]="*/*" +off=67 header_value complete +off=67 len=17 span[header_field]="Transfer-Encoding" +off=85 header_field complete +off=86 len=14 span[header_value]="chunkedchunked" +off=102 header_value complete +off=104 headers complete method=3 v=1/1 flags=200 content_length=0 +off=104 error code=15 reason="Request has invalid `Transfer-Encoding`" +``` + +## Missing last-chunk + + +```http +PUT /url HTTP/1.1 +Transfer-Encoding: chunked + +3 +foo + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=17 span[header_field]="Transfer-Encoding" +off=37 header_field complete +off=38 len=7 span[header_value]="chunked" +off=47 header_value complete +off=49 headers complete method=4 v=1/1 flags=208 content_length=0 +off=52 chunk header len=3 +off=52 len=3 span[body]="foo" +off=57 chunk complete +off=57 error code=12 reason="Invalid character in chunk size" +``` + +## Validate chunk parameters + + +```http +PUT /url HTTP/1.1 +Transfer-Encoding: chunked + +3 \n \r\n\ +foo + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=17 span[header_field]="Transfer-Encoding" +off=37 header_field complete +off=38 len=7 span[header_value]="chunked" +off=47 header_value complete +off=49 headers complete method=4 v=1/1 flags=208 content_length=0 +off=51 error code=12 reason="Invalid character in chunk size" +``` + +## Invalid OBS fold after chunked value + + +```http +PUT /url HTTP/1.1 +Transfer-Encoding: chunked + abc + +5 +World +0 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=17 span[header_field]="Transfer-Encoding" +off=37 header_field complete +off=38 len=7 span[header_value]="chunked" +off=47 len=5 span[header_value]=" abc" +off=54 header_value complete +off=56 headers complete method=4 v=1/1 flags=200 content_length=0 +off=56 error code=15 reason="Request has invalid `Transfer-Encoding`" +``` + +### Chunk header not terminated by CRLF + + + +```http +GET / HTTP/1.1 +Host: a +Connection: close +Transfer-Encoding: chunked + +5\r\r;ABCD +34 +E +0 + +GET / HTTP/1.1 +Host: a +Content-Length: 5 + +0 + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=4 span[header_field]="Host" +off=21 header_field complete +off=22 len=1 span[header_value]="a" +off=25 header_value complete +off=25 len=10 span[header_field]="Connection" +off=36 header_field complete +off=37 len=6 span[header_value]="close " +off=45 header_value complete +off=45 len=17 span[header_field]="Transfer-Encoding" +off=63 header_field complete +off=64 len=8 span[header_value]="chunked " +off=74 header_value complete +off=76 headers complete method=1 v=1/1 flags=20a content_length=0 +off=78 error code=2 reason="Expected LF after chunk size" +``` + +### Chunk header not terminated by CRLF (lenient) + + + +```http +GET / HTTP/1.1 +Host: a +Connection: close +Transfer-Encoding: chunked + +6\r\r;ABCD +33 +E +0 + +GET / HTTP/1.1 +Host: a +Content-Length: 5 +0 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=4 span[header_field]="Host" +off=21 header_field complete +off=22 len=1 span[header_value]="a" +off=25 header_value complete +off=25 len=10 span[header_field]="Connection" +off=36 header_field complete +off=37 len=6 span[header_value]="close " +off=45 header_value complete +off=45 len=17 span[header_field]="Transfer-Encoding" +off=63 header_field complete +off=64 len=8 span[header_value]="chunked " +off=74 header_value complete +off=76 headers complete method=1 v=1/1 flags=20a content_length=0 +off=78 chunk header len=6 +off=78 len=1 span[body]=cr +off=79 len=5 span[body]=";ABCD" +off=86 chunk complete +off=90 chunk header len=51 +off=90 len=1 span[body]="E" +off=91 len=1 span[body]=cr +off=92 len=1 span[body]=lf +off=93 len=1 span[body]="0" +off=94 len=1 span[body]=cr +off=95 len=1 span[body]=lf +off=96 len=1 span[body]=cr +off=97 len=1 span[body]=lf +off=98 len=15 span[body]="GET / HTTP/1.1 " +off=113 len=1 span[body]=cr +off=114 len=1 span[body]=lf +off=115 len=7 span[body]="Host: a" +off=122 len=1 span[body]=cr +off=123 len=1 span[body]=lf +off=124 len=17 span[body]="Content-Length: 5" +off=143 chunk complete +off=146 chunk header len=0 +off=148 chunk complete +off=148 message complete +``` + +### Chunk data not terminated by CRLF + + + +```http +GET / HTTP/1.1 +Host: a +Connection: close +Transfer-Encoding: chunked + +5 +ABCDE0 + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=4 span[header_field]="Host" +off=21 header_field complete +off=22 len=1 span[header_value]="a" +off=25 header_value complete +off=25 len=10 span[header_field]="Connection" +off=36 header_field complete +off=37 len=6 span[header_value]="close " +off=45 header_value complete +off=45 len=17 span[header_field]="Transfer-Encoding" +off=63 header_field complete +off=64 len=8 span[header_value]="chunked " +off=74 header_value complete +off=76 headers complete method=1 v=1/1 flags=20a content_length=0 +off=79 chunk header len=5 +off=79 len=5 span[body]="ABCDE" +off=84 error code=2 reason="Expected LF after chunk data" +``` + +### Chunk data not terminated by CRLF (lenient) + + + +```http +GET / HTTP/1.1 +Host: a +Connection: close +Transfer-Encoding: chunked + +5 +ABCDE0 + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=1 span[url]="/" +off=6 url complete +off=11 len=3 span[version]="1.1" +off=14 version complete +off=16 len=4 span[header_field]="Host" +off=21 header_field complete +off=22 len=1 span[header_value]="a" +off=25 header_value complete +off=25 len=10 span[header_field]="Connection" +off=36 header_field complete +off=37 len=6 span[header_value]="close " +off=45 header_value complete +off=45 len=17 span[header_field]="Transfer-Encoding" +off=63 header_field complete +off=64 len=8 span[header_value]="chunked " +off=74 header_value complete +off=76 headers complete method=1 v=1/1 flags=20a content_length=0 +off=79 chunk header len=5 +off=79 len=5 span[body]="ABCDE" +off=84 chunk complete +off=87 chunk header len=0 +``` + +## Space after chunk header + + +```http +PUT /url HTTP/1.1 +Transfer-Encoding: chunked + +a \r\n0123456789 +0 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=17 span[header_field]="Transfer-Encoding" +off=37 header_field complete +off=38 len=7 span[header_value]="chunked" +off=47 header_value complete +off=49 headers complete method=4 v=1/1 flags=208 content_length=0 +off=51 error code=12 reason="Invalid character in chunk size" +``` + +## Space after chunk header (lenient) + + +```http +PUT /url HTTP/1.1 +Transfer-Encoding: chunked + +a \r\n0123456789 +0 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="PUT" +off=3 method complete +off=4 len=4 span[url]="/url" +off=9 url complete +off=14 len=3 span[version]="1.1" +off=17 version complete +off=19 len=17 span[header_field]="Transfer-Encoding" +off=37 header_field complete +off=38 len=7 span[header_value]="chunked" +off=47 header_value complete +off=49 headers complete method=4 v=1/1 flags=208 content_length=0 +off=53 chunk header len=10 +off=53 len=10 span[body]="0123456789" +off=65 chunk complete +off=68 chunk header len=0 +off=70 chunk complete +off=70 message complete +``` diff --git a/llhttp/test/request/uri.md b/llhttp/test/request/uri.md new file mode 100644 index 0000000..f7f12b0 --- /dev/null +++ b/llhttp/test/request/uri.md @@ -0,0 +1,243 @@ +URI +=== + +## Quotes in URI + + +```http +GET /with_"lovely"_quotes?foo=\"bar\" HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=33 span[url]="/with_"lovely"_quotes?foo=\"bar\"" +off=38 url complete +off=43 len=3 span[version]="1.1" +off=46 version complete +off=50 headers complete method=1 v=1/1 flags=0 content_length=0 +off=50 message complete +``` + +## Query URL with question mark + +Some clients include `?` characters in query strings. + + +```http +GET /test.cgi?foo=bar?baz HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=21 span[url]="/test.cgi?foo=bar?baz" +off=26 url complete +off=31 len=3 span[version]="1.1" +off=34 version complete +off=38 headers complete method=1 v=1/1 flags=0 content_length=0 +off=38 message complete +``` + +## Host terminated by a query string + + +```http +GET http://hypnotoad.org?hail=all HTTP/1.1\r\n + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=29 span[url]="http://hypnotoad.org?hail=all" +off=34 url complete +off=39 len=3 span[version]="1.1" +off=42 version complete +off=46 headers complete method=1 v=1/1 flags=0 content_length=0 +off=46 message complete +``` + +## `host:port` terminated by a query string + + +```http +GET http://hypnotoad.org:1234?hail=all HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=34 span[url]="http://hypnotoad.org:1234?hail=all" +off=39 url complete +off=44 len=3 span[version]="1.1" +off=47 version complete +off=51 headers complete method=1 v=1/1 flags=0 content_length=0 +off=51 message complete +``` + +## Query URL with vertical bar character + +It should be allowed to have vertical bar symbol in URI: `|`. + +See: https://github.com/nodejs/node/issues/27584 + + +```http +GET /test.cgi?query=| HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=17 span[url]="/test.cgi?query=|" +off=22 url complete +off=27 len=3 span[version]="1.1" +off=30 version complete +off=34 headers complete method=1 v=1/1 flags=0 content_length=0 +off=34 message complete +``` + +## `host:port` terminated by a space + + +```http +GET http://hypnotoad.org:1234 HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=25 span[url]="http://hypnotoad.org:1234" +off=30 url complete +off=35 len=3 span[version]="1.1" +off=38 version complete +off=42 headers complete method=1 v=1/1 flags=0 content_length=0 +off=42 message complete +``` + +## Disallow UTF-8 in URI path in strict mode + + +```http +GET /δ¶/δt/pope?q=1#narf HTTP/1.1 +Host: github.com + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=5 error code=7 reason="Invalid char in url path" +``` + +## Fragment in URI + + +```http +GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=40 span[url]="/forums/1/topics/2375?page=1#posts-17408" +off=45 url complete +off=50 len=3 span[version]="1.1" +off=53 version complete +off=57 headers complete method=1 v=1/1 flags=0 content_length=0 +off=57 message complete +``` + +## Underscore in hostname + + +```http +CONNECT home_0.netscape.com:443 HTTP/1.0 +User-agent: Mozilla/1.1N +Proxy-authorization: basic aGVsbG86d29ybGQ= + + +``` + +```log +off=0 message begin +off=0 len=7 span[method]="CONNECT" +off=7 method complete +off=8 len=23 span[url]="home_0.netscape.com:443" +off=32 url complete +off=37 len=3 span[version]="1.0" +off=40 version complete +off=42 len=10 span[header_field]="User-agent" +off=53 header_field complete +off=54 len=12 span[header_value]="Mozilla/1.1N" +off=68 header_value complete +off=68 len=19 span[header_field]="Proxy-authorization" +off=88 header_field complete +off=89 len=22 span[header_value]="basic aGVsbG86d29ybGQ=" +off=113 header_value complete +off=115 headers complete method=5 v=1/0 flags=0 content_length=0 +off=115 message complete +off=115 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +## `host:port` and basic auth + + +```http +GET http://a%12:b!&*$@hypnotoad.org:1234/toto HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=41 span[url]="http://a%12:b!&*$@hypnotoad.org:1234/toto" +off=46 url complete +off=51 len=3 span[version]="1.1" +off=54 version complete +off=58 headers complete method=1 v=1/1 flags=0 content_length=0 +off=58 message complete +``` + +## Space in URI + + +```http +GET /foo bar/ HTTP/1.1 + + +``` + +```log +off=0 message begin +off=0 len=3 span[method]="GET" +off=3 method complete +off=4 len=4 span[url]="/foo" +off=9 url complete +off=9 error code=8 reason="Expected HTTP/" +``` diff --git a/llhttp/test/response/connection.md b/llhttp/test/response/connection.md new file mode 100644 index 0000000..11f9eb6 --- /dev/null +++ b/llhttp/test/response/connection.md @@ -0,0 +1,647 @@ +Connection header +================= + +## Proxy-Connection + + +```http +HTTP/1.1 200 OK +Content-Type: text/html; charset=UTF-8 +Content-Length: 11 +Proxy-Connection: close +Date: Thu, 31 Dec 2009 20:55:48 +0000 + +hello world +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=12 span[header_field]="Content-Type" +off=30 header_field complete +off=31 len=24 span[header_value]="text/html; charset=UTF-8" +off=57 header_value complete +off=57 len=14 span[header_field]="Content-Length" +off=72 header_field complete +off=73 len=2 span[header_value]="11" +off=77 header_value complete +off=77 len=16 span[header_field]="Proxy-Connection" +off=94 header_field complete +off=95 len=5 span[header_value]="close" +off=102 header_value complete +off=102 len=4 span[header_field]="Date" +off=107 header_field complete +off=108 len=31 span[header_value]="Thu, 31 Dec 2009 20:55:48 +0000" +off=141 header_value complete +off=143 headers complete status=200 v=1/1 flags=22 content_length=11 +off=143 len=11 span[body]="hello world" +off=154 message complete +``` + +## HTTP/1.0 with keep-alive and EOF-terminated 200 status + +There is no `Content-Length` in this response, so even though the +`keep-alive` is on - it should read until EOF. + + +```http +HTTP/1.0 200 OK +Connection: keep-alive + +HTTP/1.0 200 OK +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.0" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=10 span[header_field]="Connection" +off=28 header_field complete +off=29 len=10 span[header_value]="keep-alive" +off=41 header_value complete +off=43 headers complete status=200 v=1/0 flags=1 content_length=0 +off=43 len=15 span[body]="HTTP/1.0 200 OK" +``` + +## HTTP/1.0 with keep-alive and 204 status + +Responses with `204` status cannot have a body. + + +```http +HTTP/1.0 204 No content +Connection: keep-alive + +HTTP/1.0 200 OK +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.0" +off=8 version complete +off=13 len=10 span[status]="No content" +off=25 status complete +off=25 len=10 span[header_field]="Connection" +off=36 header_field complete +off=37 len=10 span[header_value]="keep-alive" +off=49 header_value complete +off=51 headers complete status=204 v=1/0 flags=1 content_length=0 +off=51 message complete +off=51 reset +off=51 message begin +off=56 len=3 span[version]="1.0" +off=59 version complete +off=64 len=2 span[status]="OK" +``` + +## HTTP/1.1 with EOF-terminated 200 status + +There is no `Content-Length` in this response, so even though the +`keep-alive` is on (implicitly in HTTP 1.1) - it should read until EOF. + + +```http +HTTP/1.1 200 OK + +HTTP/1.1 200 OK +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=19 headers complete status=200 v=1/1 flags=0 content_length=0 +off=19 len=15 span[body]="HTTP/1.1 200 OK" +``` + +## HTTP/1.1 with 204 status + +Responses with `204` status cannot have a body. + + +```http +HTTP/1.1 204 No content + +HTTP/1.1 200 OK +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=10 span[status]="No content" +off=25 status complete +off=27 headers complete status=204 v=1/1 flags=0 content_length=0 +off=27 message complete +off=27 reset +off=27 message begin +off=32 len=3 span[version]="1.1" +off=35 version complete +off=40 len=2 span[status]="OK" +``` + +## HTTP/1.1 with keep-alive disabled and 204 status + + +```http +HTTP/1.1 204 No content +Connection: close + +HTTP/1.1 200 OK +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=10 span[status]="No content" +off=25 status complete +off=25 len=10 span[header_field]="Connection" +off=36 header_field complete +off=37 len=5 span[header_value]="close" +off=44 header_value complete +off=46 headers complete status=204 v=1/1 flags=2 content_length=0 +off=46 message complete +off=47 error code=5 reason="Data after `Connection: close`" +``` + +## HTTP/1.1 with keep-alive disabled, content-length (lenient) + +Parser should discard extra request in lenient mode. + + +```http +HTTP/1.1 200 No content +Content-Length: 5 +Connection: close + +2ad731e3-4dcd-4f70-b871-0ad284b29ffc +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=10 span[status]="No content" +off=25 status complete +off=25 len=14 span[header_field]="Content-Length" +off=40 header_field complete +off=41 len=1 span[header_value]="5" +off=44 header_value complete +off=44 len=10 span[header_field]="Connection" +off=55 header_field complete +off=56 len=5 span[header_value]="close" +off=63 header_value complete +off=65 headers complete status=200 v=1/1 flags=22 content_length=5 +off=65 len=5 span[body]="2ad73" +off=70 message complete +``` + +## HTTP/1.1 with keep-alive disabled, content-length + +Parser should discard extra request in strict mode. + + +```http +HTTP/1.1 200 No content +Content-Length: 5 +Connection: close + +2ad731e3-4dcd-4f70-b871-0ad284b29ffc +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=10 span[status]="No content" +off=25 status complete +off=25 len=14 span[header_field]="Content-Length" +off=40 header_field complete +off=41 len=1 span[header_value]="5" +off=44 header_value complete +off=44 len=10 span[header_field]="Connection" +off=55 header_field complete +off=56 len=5 span[header_value]="close" +off=63 header_value complete +off=65 headers complete status=200 v=1/1 flags=22 content_length=5 +off=65 len=5 span[body]="2ad73" +off=70 message complete +off=71 error code=5 reason="Data after `Connection: close`" +``` + +## HTTP/1.1 with keep-alive disabled and 204 status (lenient) + + +```http +HTTP/1.1 204 No content +Connection: close + +HTTP/1.1 200 OK +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=10 span[status]="No content" +off=25 status complete +off=25 len=10 span[header_field]="Connection" +off=36 header_field complete +off=37 len=5 span[header_value]="close" +off=44 header_value complete +off=46 headers complete status=204 v=1/1 flags=2 content_length=0 +off=46 message complete +off=46 reset +off=46 message begin +off=51 len=3 span[version]="1.1" +off=54 version complete +off=59 len=2 span[status]="OK" +``` + +## HTTP 101 response with Upgrade and Content-Length header + + +```http +HTTP/1.1 101 Switching Protocols +Connection: upgrade +Upgrade: h2c +Content-Length: 4 + +body\ +proto +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=19 span[status]="Switching Protocols" +off=34 status complete +off=34 len=10 span[header_field]="Connection" +off=45 header_field complete +off=46 len=7 span[header_value]="upgrade" +off=55 header_value complete +off=55 len=7 span[header_field]="Upgrade" +off=63 header_field complete +off=64 len=3 span[header_value]="h2c" +off=69 header_value complete +off=69 len=14 span[header_field]="Content-Length" +off=84 header_field complete +off=85 len=1 span[header_value]="4" +off=88 header_value complete +off=90 headers complete status=101 v=1/1 flags=34 content_length=4 +off=90 message complete +off=90 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +## HTTP 101 response with Upgrade and Transfer-Encoding header + + +```http +HTTP/1.1 101 Switching Protocols +Connection: upgrade +Upgrade: h2c +Transfer-Encoding: chunked + +2 +bo +2 +dy +0 + +proto +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=19 span[status]="Switching Protocols" +off=34 status complete +off=34 len=10 span[header_field]="Connection" +off=45 header_field complete +off=46 len=7 span[header_value]="upgrade" +off=55 header_value complete +off=55 len=7 span[header_field]="Upgrade" +off=63 header_field complete +off=64 len=3 span[header_value]="h2c" +off=69 header_value complete +off=69 len=17 span[header_field]="Transfer-Encoding" +off=87 header_field complete +off=88 len=7 span[header_value]="chunked" +off=97 header_value complete +off=99 headers complete status=101 v=1/1 flags=21c content_length=0 +off=99 message complete +off=99 error code=22 reason="Pause on CONNECT/Upgrade" +``` + +## HTTP 200 response with Upgrade header + + +```http +HTTP/1.1 200 OK +Connection: upgrade +Upgrade: h2c + +body +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=10 span[header_field]="Connection" +off=28 header_field complete +off=29 len=7 span[header_value]="upgrade" +off=38 header_value complete +off=38 len=7 span[header_field]="Upgrade" +off=46 header_field complete +off=47 len=3 span[header_value]="h2c" +off=52 header_value complete +off=54 headers complete status=200 v=1/1 flags=14 content_length=0 +off=54 len=4 span[body]="body" +``` + +## HTTP 200 response with Upgrade header and Content-Length + + +```http +HTTP/1.1 200 OK +Connection: upgrade +Upgrade: h2c +Content-Length: 4 + +body +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=10 span[header_field]="Connection" +off=28 header_field complete +off=29 len=7 span[header_value]="upgrade" +off=38 header_value complete +off=38 len=7 span[header_field]="Upgrade" +off=46 header_field complete +off=47 len=3 span[header_value]="h2c" +off=52 header_value complete +off=52 len=14 span[header_field]="Content-Length" +off=67 header_field complete +off=68 len=1 span[header_value]="4" +off=71 header_value complete +off=73 headers complete status=200 v=1/1 flags=34 content_length=4 +off=73 len=4 span[body]="body" +off=77 message complete +``` + +## HTTP 200 response with Upgrade header and Transfer-Encoding + + +```http +HTTP/1.1 200 OK +Connection: upgrade +Upgrade: h2c +Transfer-Encoding: chunked + +2 +bo +2 +dy +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=10 span[header_field]="Connection" +off=28 header_field complete +off=29 len=7 span[header_value]="upgrade" +off=38 header_value complete +off=38 len=7 span[header_field]="Upgrade" +off=46 header_field complete +off=47 len=3 span[header_value]="h2c" +off=52 header_value complete +off=52 len=17 span[header_field]="Transfer-Encoding" +off=70 header_field complete +off=71 len=7 span[header_value]="chunked" +off=80 header_value complete +off=82 headers complete status=200 v=1/1 flags=21c content_length=0 +off=85 chunk header len=2 +off=85 len=2 span[body]="bo" +off=89 chunk complete +off=92 chunk header len=2 +off=92 len=2 span[body]="dy" +off=96 chunk complete +off=99 chunk header len=0 +off=101 chunk complete +off=101 message complete +``` + +## HTTP 304 with Content-Length + + +```http +HTTP/1.1 304 Not Modified +Content-Length: 10 + + +HTTP/1.1 200 OK +Content-Length: 5 + +hello +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=12 span[status]="Not Modified" +off=27 status complete +off=27 len=14 span[header_field]="Content-Length" +off=42 header_field complete +off=43 len=2 span[header_value]="10" +off=47 header_value complete +off=49 headers complete status=304 v=1/1 flags=20 content_length=10 +off=49 message complete +off=51 reset +off=51 message begin +off=56 len=3 span[version]="1.1" +off=59 version complete +off=64 len=2 span[status]="OK" +off=68 status complete +off=68 len=14 span[header_field]="Content-Length" +off=83 header_field complete +off=84 len=1 span[header_value]="5" +off=87 header_value complete +off=89 headers complete status=200 v=1/1 flags=20 content_length=5 +off=89 len=5 span[body]="hello" +off=94 message complete +``` + +## HTTP 304 with Transfer-Encoding + + +```http +HTTP/1.1 304 Not Modified +Transfer-Encoding: chunked + +HTTP/1.1 200 OK +Transfer-Encoding: chunked + +5 +hello +0 + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=12 span[status]="Not Modified" +off=27 status complete +off=27 len=17 span[header_field]="Transfer-Encoding" +off=45 header_field complete +off=46 len=7 span[header_value]="chunked" +off=55 header_value complete +off=57 headers complete status=304 v=1/1 flags=208 content_length=0 +off=57 message complete +off=57 reset +off=57 message begin +off=62 len=3 span[version]="1.1" +off=65 version complete +off=70 len=2 span[status]="OK" +off=74 status complete +off=74 len=17 span[header_field]="Transfer-Encoding" +off=92 header_field complete +off=93 len=7 span[header_value]="chunked" +off=102 header_value complete +off=104 headers complete status=200 v=1/1 flags=208 content_length=0 +off=107 chunk header len=5 +off=107 len=5 span[body]="hello" +off=114 chunk complete +off=117 chunk header len=0 +``` + +## HTTP 100 first, then 400 + + +```http +HTTP/1.1 100 Continue + + +HTTP/1.1 404 Not Found +Content-Type: text/plain; charset=utf-8 +Content-Length: 14 +Date: Fri, 15 Sep 2023 19:47:23 GMT +Server: Python/3.10 aiohttp/4.0.0a2.dev0 + +404: Not Found +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=8 span[status]="Continue" +off=23 status complete +off=25 headers complete status=100 v=1/1 flags=0 content_length=0 +off=25 message complete +off=27 reset +off=27 message begin +off=32 len=3 span[version]="1.1" +off=35 version complete +off=40 len=9 span[status]="Not Found" +off=51 status complete +off=51 len=12 span[header_field]="Content-Type" +off=64 header_field complete +off=65 len=25 span[header_value]="text/plain; charset=utf-8" +off=92 header_value complete +off=92 len=14 span[header_field]="Content-Length" +off=107 header_field complete +off=108 len=2 span[header_value]="14" +off=112 header_value complete +off=112 len=4 span[header_field]="Date" +off=117 header_field complete +off=118 len=29 span[header_value]="Fri, 15 Sep 2023 19:47:23 GMT" +off=149 header_value complete +off=149 len=6 span[header_field]="Server" +off=156 header_field complete +off=157 len=32 span[header_value]="Python/3.10 aiohttp/4.0.0a2.dev0" +off=191 header_value complete +off=193 headers complete status=404 v=1/1 flags=20 content_length=14 +off=193 len=14 span[body]="404: Not Found" +off=207 message complete +``` + +## HTTP 103 first, then 200 + + +```http +HTTP/1.1 103 Early Hints +Link: ; rel=preload; as=style + +HTTP/1.1 200 OK +Date: Wed, 13 Sep 2023 11:09:41 GMT +Connection: keep-alive +Keep-Alive: timeout=5 +Content-Length: 17 + +response content +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=11 span[status]="Early Hints" +off=26 status complete +off=26 len=4 span[header_field]="Link" +off=31 header_field complete +off=32 len=36 span[header_value]="; rel=preload; as=style" +off=70 header_value complete +off=72 headers complete status=103 v=1/1 flags=0 content_length=0 +off=72 message complete +off=72 reset +off=72 message begin +off=77 len=3 span[version]="1.1" +off=80 version complete +off=85 len=2 span[status]="OK" +off=89 status complete +off=89 len=4 span[header_field]="Date" +off=94 header_field complete +off=95 len=29 span[header_value]="Wed, 13 Sep 2023 11:09:41 GMT" +off=126 header_value complete +off=126 len=10 span[header_field]="Connection" +off=137 header_field complete +off=138 len=10 span[header_value]="keep-alive" +off=150 header_value complete +off=150 len=10 span[header_field]="Keep-Alive" +off=161 header_field complete +off=162 len=9 span[header_value]="timeout=5" +off=173 header_value complete +off=173 len=14 span[header_field]="Content-Length" +off=188 header_field complete +off=189 len=2 span[header_value]="17" +off=193 header_value complete +off=195 headers complete status=200 v=1/1 flags=21 content_length=17 +off=195 len=16 span[body]="response content" +``` \ No newline at end of file diff --git a/llhttp/test/response/content-length.md b/llhttp/test/response/content-length.md new file mode 100644 index 0000000..6c33924 --- /dev/null +++ b/llhttp/test/response/content-length.md @@ -0,0 +1,158 @@ +Content-Length header +===================== + +## Response without `Content-Length`, but with body + +The client should wait for the server's EOF. That is, when +`Content-Length` is not specified, and `Connection: close`, the end of body is +specified by the EOF. + +_(Compare with APACHEBENCH_GET)_ + + +```http +HTTP/1.1 200 OK +Date: Tue, 04 Aug 2009 07:59:32 GMT +Server: Apache +X-Powered-By: Servlet/2.5 JSP/2.1 +Content-Type: text/xml; charset=utf-8 +Connection: close + +\n\ +\n\ + \n\ + \n\ + SOAP-ENV:Client\n\ + Client Error\n\ + \n\ + \n\ + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=4 span[header_field]="Date" +off=22 header_field complete +off=23 len=29 span[header_value]="Tue, 04 Aug 2009 07:59:32 GMT" +off=54 header_value complete +off=54 len=6 span[header_field]="Server" +off=61 header_field complete +off=62 len=6 span[header_value]="Apache" +off=70 header_value complete +off=70 len=12 span[header_field]="X-Powered-By" +off=83 header_field complete +off=84 len=19 span[header_value]="Servlet/2.5 JSP/2.1" +off=105 header_value complete +off=105 len=12 span[header_field]="Content-Type" +off=118 header_field complete +off=119 len=23 span[header_value]="text/xml; charset=utf-8" +off=144 header_value complete +off=144 len=10 span[header_field]="Connection" +off=155 header_field complete +off=156 len=5 span[header_value]="close" +off=163 header_value complete +off=165 headers complete status=200 v=1/1 flags=2 content_length=0 +off=165 len=42 span[body]="" +off=207 len=1 span[body]=lf +off=208 len=80 span[body]="" +off=288 len=1 span[body]=lf +off=289 len=17 span[body]=" " +off=306 len=1 span[body]=lf +off=307 len=20 span[body]=" " +off=327 len=1 span[body]=lf +off=328 len=45 span[body]=" SOAP-ENV:Client" +off=373 len=1 span[body]=lf +off=374 len=46 span[body]=" Client Error" +off=420 len=1 span[body]=lf +off=421 len=21 span[body]=" " +off=442 len=1 span[body]=lf +off=443 len=18 span[body]=" " +off=461 len=1 span[body]=lf +off=462 len=20 span[body]="" +``` + +## Content-Length-X + +The header that starts with `Content-Length*` should not be treated as +`Content-Length`. + + +```http +HTTP/1.1 200 OK +Content-Length-X: 0 +Transfer-Encoding: chunked + +2 +OK +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=16 span[header_field]="Content-Length-X" +off=34 header_field complete +off=35 len=1 span[header_value]="0" +off=38 header_value complete +off=38 len=17 span[header_field]="Transfer-Encoding" +off=56 header_field complete +off=57 len=7 span[header_value]="chunked" +off=66 header_value complete +off=68 headers complete status=200 v=1/1 flags=208 content_length=0 +off=71 chunk header len=2 +off=71 len=2 span[body]="OK" +off=75 chunk complete +off=78 chunk header len=0 +off=80 chunk complete +off=80 message complete +``` + +## Content-Length reset when no body is received + + +```http +HTTP/1.1 200 OK +Content-Length: 123 + +HTTP/1.1 200 OK +Content-Length: 456 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=3 span[header_value]="123" +off=38 header_value complete +off=40 headers complete status=200 v=1/1 flags=20 content_length=123 +off=40 skip body +off=40 message complete +off=40 reset +off=40 message begin +off=45 len=3 span[version]="1.1" +off=48 version complete +off=53 len=2 span[status]="OK" +off=57 status complete +off=57 len=14 span[header_field]="Content-Length" +off=72 header_field complete +off=73 len=3 span[header_value]="456" +off=78 header_value complete +off=80 headers complete status=200 v=1/1 flags=20 content_length=456 +off=80 skip body +off=80 message complete +``` diff --git a/llhttp/test/response/finish.md b/llhttp/test/response/finish.md new file mode 100644 index 0000000..2938b83 --- /dev/null +++ b/llhttp/test/response/finish.md @@ -0,0 +1,23 @@ +Finish +====== + +Those tests check the return codes and the behavior of `llhttp_finish()` C API. + +## It should be safe to finish with cb after empty response + + +```http +HTTP/1.1 200 OK + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=19 headers complete status=200 v=1/1 flags=0 content_length=0 +off=NULL finish=1 +``` diff --git a/llhttp/test/response/invalid.md b/llhttp/test/response/invalid.md new file mode 100644 index 0000000..034fc4d --- /dev/null +++ b/llhttp/test/response/invalid.md @@ -0,0 +1,285 @@ +Invalid responses +================= + +### Incomplete HTTP protocol + + +```http +HTP/1.1 200 OK + + +``` + +```log +off=0 message begin +off=2 error code=8 reason="Expected HTTP/" +``` + +### Extra digit in HTTP major version + + +```http +HTTP/01.1 200 OK + + +``` + +```log +off=0 message begin +off=5 len=1 span[version]="0" +off=6 error code=9 reason="Expected dot" +``` + +### Extra digit in HTTP major version #2 + + +```http +HTTP/11.1 200 OK + + +``` + +```log +off=0 message begin +off=5 len=1 span[version]="1" +off=6 error code=9 reason="Expected dot" +``` + +### Extra digit in HTTP minor version + + +```http +HTTP/1.01 200 OK + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.0" +off=8 version complete +off=8 error code=9 reason="Expected space after version" +``` +--> + +### Tab after HTTP version + + +```http +HTTP/1.1\t200 OK + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=8 error code=9 reason="Expected space after version" +``` + +### CR before response and tab after HTTP version + + +```http +\rHTTP/1.1\t200 OK + + +``` + +```log +off=1 message begin +off=6 len=3 span[version]="1.1" +off=9 version complete +off=9 error code=9 reason="Expected space after version" +``` + +### Headers separated by CR + + +```http +HTTP/1.1 200 OK +Foo: 1\rBar: 2 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=3 span[header_field]="Foo" +off=21 header_field complete +off=22 len=1 span[header_value]="1" +off=24 error code=3 reason="Missing expected LF after header value" +``` + +### Invalid HTTP version + + +```http +HTTP/5.6 200 OK + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="5.6" +off=8 error code=9 reason="Invalid HTTP version" +``` + +## Invalid space after start line + + +```http +HTTP/1.1 200 OK + Host: foo +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=18 error code=30 reason="Unexpected space after start line" +``` + +### Extra space between HTTP version and status code + + +```http +HTTP/1.1 200 OK + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=9 error code=13 reason="Invalid status code" +``` + +### Extra space between status code and reason + + +```http +HTTP/1.1 200 OK + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=3 span[status]=" OK" +off=18 status complete +off=20 headers complete status=200 v=1/1 flags=0 content_length=0 +``` + +### One-digit status code + + +```http +HTTP/1.1 2 OK + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=10 error code=13 reason="Invalid status code" +``` + +### Only LFs present and no body + + +```http +HTTP/1.1 200 OK\nContent-Length: 0\n\n +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=16 error code=25 reason="Missing expected CR after response line" +``` + +### Only LFs present and no body (lenient) + + +```http +HTTP/1.1 200 OK\nContent-Length: 0\n\n +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=16 status complete +off=16 len=14 span[header_field]="Content-Length" +off=31 header_field complete +off=32 len=1 span[header_value]="0" +off=34 header_value complete +off=35 headers complete status=200 v=1/1 flags=20 content_length=0 +off=35 message complete +``` + +### Only LFs present + + +```http +HTTP/1.1 200 OK\n\ +Foo: abc\n\ +Bar: def\n\ +\n\ +BODY\n\ +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=16 error code=25 reason="Missing expected CR after response line" +``` + +### Only LFs present (lenient) + + +```http +HTTP/1.1 200 OK\n\ +Foo: abc\n\ +Bar: def\n\ +\n\ +BODY\n\ +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=16 status complete +off=16 len=3 span[header_field]="Foo" +off=20 header_field complete +off=21 len=3 span[header_value]="abc" +off=25 header_value complete +off=25 len=3 span[header_field]="Bar" +off=29 header_field complete +off=30 len=3 span[header_value]="def" +off=34 header_value complete +off=35 headers complete status=200 v=1/1 flags=0 content_length=0 +off=35 len=4 span[body]="BODY" +off=39 len=1 span[body]=lf +off=40 len=1 span[body]="\" +``` \ No newline at end of file diff --git a/llhttp/test/response/lenient-version.md b/llhttp/test/response/lenient-version.md new file mode 100644 index 0000000..86c6ede --- /dev/null +++ b/llhttp/test/response/lenient-version.md @@ -0,0 +1,20 @@ +Lenient HTTP version parsing +============================ + +### Invalid HTTP version (lenient) + + +```http +HTTP/5.6 200 OK + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="5.6" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=19 headers complete status=200 v=5/6 flags=0 content_length=0 +``` diff --git a/llhttp/test/response/pausing.md b/llhttp/test/response/pausing.md new file mode 100644 index 0000000..d2e870b --- /dev/null +++ b/llhttp/test/response/pausing.md @@ -0,0 +1,330 @@ +Pausing +======= + +### on_message_begin + + +```http +HTTP/1.1 200 OK +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=0 pause +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete status=200 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_message_complete + + +```http +HTTP/1.1 200 OK +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete status=200 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +off=41 pause +``` + +### on_version_complete + + +```http +HTTP/1.1 200 OK +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=8 pause +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete status=200 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_status_complete + + +```http +HTTP/1.1 200 OK +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 pause +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete status=200 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_header_field_complete + + +```http +HTTP/1.1 200 OK +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=32 pause +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete status=200 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_header_value_complete + + +```http +HTTP/1.1 200 OK +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=36 pause +off=38 headers complete status=200 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_headers_complete + + +```http +HTTP/1.1 200 OK +Content-Length: 3 + +abc +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete status=200 v=1/1 flags=20 content_length=3 +off=38 pause +off=38 len=3 span[body]="abc" +off=41 message complete +``` + +### on_chunk_header + + +```http +HTTP/1.1 200 OK +Transfer-Encoding: chunked + +a +0123456789 +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=17 span[header_field]="Transfer-Encoding" +off=35 header_field complete +off=36 len=7 span[header_value]="chunked" +off=45 header_value complete +off=47 headers complete status=200 v=1/1 flags=208 content_length=0 +off=50 chunk header len=10 +off=50 pause +off=50 len=10 span[body]="0123456789" +off=62 chunk complete +off=65 chunk header len=0 +off=65 pause +off=67 chunk complete +off=67 message complete +``` + +### on_chunk_extension_name + + +```http +HTTP/1.1 200 OK +Transfer-Encoding: chunked + +a;foo=bar +0123456789 +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=17 span[header_field]="Transfer-Encoding" +off=35 header_field complete +off=36 len=7 span[header_value]="chunked" +off=45 header_value complete +off=47 headers complete status=200 v=1/1 flags=208 content_length=0 +off=49 len=3 span[chunk_extension_name]="foo" +off=53 chunk_extension_name complete +off=53 pause +off=53 len=3 span[chunk_extension_value]="bar" +off=57 chunk_extension_value complete +off=58 chunk header len=10 +off=58 len=10 span[body]="0123456789" +off=70 chunk complete +off=73 chunk header len=0 +off=75 chunk complete +off=75 message complete +``` + +### on_chunk_extension_value + + +```http +HTTP/1.1 200 OK +Transfer-Encoding: chunked + +a;foo=bar +0123456789 +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=17 span[header_field]="Transfer-Encoding" +off=35 header_field complete +off=36 len=7 span[header_value]="chunked" +off=45 header_value complete +off=47 headers complete status=200 v=1/1 flags=208 content_length=0 +off=49 len=3 span[chunk_extension_name]="foo" +off=53 chunk_extension_name complete +off=53 len=3 span[chunk_extension_value]="bar" +off=57 chunk_extension_value complete +off=57 pause +off=58 chunk header len=10 +off=58 len=10 span[body]="0123456789" +off=70 chunk complete +off=73 chunk header len=0 +off=75 chunk complete +off=75 message complete +``` + +### on_chunk_complete + + +```http +HTTP/1.1 200 OK +Transfer-Encoding: chunked + +a +0123456789 +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=17 span[header_field]="Transfer-Encoding" +off=35 header_field complete +off=36 len=7 span[header_value]="chunked" +off=45 header_value complete +off=47 headers complete status=200 v=1/1 flags=208 content_length=0 +off=50 chunk header len=10 +off=50 len=10 span[body]="0123456789" +off=62 chunk complete +off=62 pause +off=65 chunk header len=0 +off=67 chunk complete +off=67 pause +off=67 message complete +``` diff --git a/llhttp/test/response/pipelining.md b/llhttp/test/response/pipelining.md new file mode 100644 index 0000000..01e007a --- /dev/null +++ b/llhttp/test/response/pipelining.md @@ -0,0 +1,60 @@ +Pipelining +========== + +## Should parse multiple events + + +```http +HTTP/1.1 200 OK +Content-Length: 3 + +AAA +HTTP/1.1 201 Created +Content-Length: 4 + +BBBB +HTTP/1.1 202 Accepted +Content-Length: 5 + +CCCC +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=14 span[header_field]="Content-Length" +off=32 header_field complete +off=33 len=1 span[header_value]="3" +off=36 header_value complete +off=38 headers complete status=200 v=1/1 flags=20 content_length=3 +off=38 len=3 span[body]="AAA" +off=41 message complete +off=43 reset +off=43 message begin +off=48 len=3 span[version]="1.1" +off=51 version complete +off=56 len=7 span[status]="Created" +off=65 status complete +off=65 len=14 span[header_field]="Content-Length" +off=80 header_field complete +off=81 len=1 span[header_value]="4" +off=84 header_value complete +off=86 headers complete status=201 v=1/1 flags=20 content_length=4 +off=86 len=4 span[body]="BBBB" +off=90 message complete +off=92 reset +off=92 message begin +off=97 len=3 span[version]="1.1" +off=100 version complete +off=105 len=8 span[status]="Accepted" +off=115 status complete +off=115 len=14 span[header_field]="Content-Length" +off=130 header_field complete +off=131 len=1 span[header_value]="5" +off=134 header_value complete +off=136 headers complete status=202 v=1/1 flags=20 content_length=5 +off=136 len=4 span[body]="CCCC" +``` \ No newline at end of file diff --git a/llhttp/test/response/sample.md b/llhttp/test/response/sample.md new file mode 100644 index 0000000..be2e82d --- /dev/null +++ b/llhttp/test/response/sample.md @@ -0,0 +1,653 @@ +Sample responses +================ + +## Simple response + + +```http +HTTP/1.1 200 OK +Header1: Value1 +Header2:\t Value2 +Content-Length: 0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=7 span[header_field]="Header1" +off=25 header_field complete +off=26 len=6 span[header_value]="Value1" +off=34 header_value complete +off=34 len=7 span[header_field]="Header2" +off=42 header_field complete +off=44 len=6 span[header_value]="Value2" +off=52 header_value complete +off=52 len=14 span[header_field]="Content-Length" +off=67 header_field complete +off=68 len=1 span[header_value]="0" +off=71 header_value complete +off=73 headers complete status=200 v=1/1 flags=20 content_length=0 +off=73 message complete +``` + +## Error on invalid response start + +Every response must start with `HTTP/`. + + +```http +HTTPER/1.1 200 OK + + +``` + +```log +off=0 message begin +off=4 error code=8 reason="Expected HTTP/" +``` + +## Empty body should not trigger spurious span callbacks + + +```http +HTTP/1.1 200 OK + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=19 headers complete status=200 v=1/1 flags=0 content_length=0 +``` + +## Google 301 + + +```http +HTTP/1.1 301 Moved Permanently +Location: http://www.google.com/ +Content-Type: text/html; charset=UTF-8 +Date: Sun, 26 Apr 2009 11:11:49 GMT +Expires: Tue, 26 May 2009 11:11:49 GMT +X-$PrototypeBI-Version: 1.6.0.3 +Cache-Control: public, max-age=2592000 +Server: gws +Content-Length: 219 + +\n\ +301 Moved\n\ +

301 Moved

\n\ +The document has moved\n\ +
here. + +``` +_(Note the `$` char in header field)_ + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=17 span[status]="Moved Permanently" +off=32 status complete +off=32 len=8 span[header_field]="Location" +off=41 header_field complete +off=42 len=22 span[header_value]="http://www.google.com/" +off=66 header_value complete +off=66 len=12 span[header_field]="Content-Type" +off=79 header_field complete +off=80 len=24 span[header_value]="text/html; charset=UTF-8" +off=106 header_value complete +off=106 len=4 span[header_field]="Date" +off=111 header_field complete +off=112 len=29 span[header_value]="Sun, 26 Apr 2009 11:11:49 GMT" +off=143 header_value complete +off=143 len=7 span[header_field]="Expires" +off=151 header_field complete +off=152 len=29 span[header_value]="Tue, 26 May 2009 11:11:49 GMT" +off=183 header_value complete +off=183 len=22 span[header_field]="X-$PrototypeBI-Version" +off=206 header_field complete +off=207 len=7 span[header_value]="1.6.0.3" +off=216 header_value complete +off=216 len=13 span[header_field]="Cache-Control" +off=230 header_field complete +off=231 len=23 span[header_value]="public, max-age=2592000" +off=256 header_value complete +off=256 len=6 span[header_field]="Server" +off=263 header_field complete +off=264 len=3 span[header_value]="gws" +off=269 header_value complete +off=269 len=14 span[header_field]="Content-Length" +off=284 header_field complete +off=286 len=5 span[header_value]="219 " +off=293 header_value complete +off=295 headers complete status=301 v=1/1 flags=20 content_length=219 +off=295 len=74 span[body]="" +off=369 len=1 span[body]=lf +off=370 len=37 span[body]="301 Moved" +off=407 len=1 span[body]=lf +off=408 len=18 span[body]="

301 Moved

" +off=426 len=1 span[body]=lf +off=427 len=22 span[body]="The document has moved" +off=449 len=1 span[body]=lf +off=450 len=42 span[body]="here." +off=492 len=1 span[body]=cr +off=493 len=1 span[body]=lf +off=494 len=14 span[body]="" +``` + +## amazon.com + + +```http +HTTP/1.1 301 MovedPermanently +Date: Wed, 15 May 2013 17:06:33 GMT +Server: Server +x-amz-id-1: 0GPHKXSJQ826RK7GZEB2 +p3p: policyref="http://www.amazon.com/w3c/p3p.xml",CP="CAO DSP LAW CUR ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC " +x-amz-id-2: STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNcx4oAD +Location: http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=1263340922&pf_rd_i=507846 +Vary: Accept-Encoding,User-Agent +Content-Type: text/html; charset=ISO-8859-1 +Transfer-Encoding: chunked + +1 +\n +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=16 span[status]="MovedPermanently" +off=31 status complete +off=31 len=4 span[header_field]="Date" +off=36 header_field complete +off=37 len=29 span[header_value]="Wed, 15 May 2013 17:06:33 GMT" +off=68 header_value complete +off=68 len=6 span[header_field]="Server" +off=75 header_field complete +off=76 len=6 span[header_value]="Server" +off=84 header_value complete +off=84 len=10 span[header_field]="x-amz-id-1" +off=95 header_field complete +off=96 len=20 span[header_value]="0GPHKXSJQ826RK7GZEB2" +off=118 header_value complete +off=118 len=3 span[header_field]="p3p" +off=122 header_field complete +off=123 len=178 span[header_value]="policyref="http://www.amazon.com/w3c/p3p.xml",CP="CAO DSP LAW CUR ADM IVAo IVDo CONo OTPo OUR DELi PUBi OTRi BUS PHY ONL UNI PUR FIN COM NAV INT DEM CNT STA HEA PRE LOC GOV OTC "" +off=303 header_value complete +off=303 len=10 span[header_field]="x-amz-id-2" +off=314 header_field complete +off=315 len=64 span[header_value]="STN69VZxIFSz9YJLbz1GDbxpbjG6Qjmmq5E3DxRhOUw+Et0p4hr7c/Q8qNcx4oAD" +off=381 header_value complete +off=381 len=8 span[header_field]="Location" +off=390 header_field complete +off=391 len=214 span[header_value]="http://www.amazon.com/Dan-Brown/e/B000AP9DSU/ref=s9_pop_gw_al1?_encoding=UTF8&refinementId=618073011&pf_rd_m=ATVPDKIKX0DER&pf_rd_s=center-2&pf_rd_r=0SHYY5BZXN3KR20BNFAY&pf_rd_t=101&pf_rd_p=1263340922&pf_rd_i=507846" +off=607 header_value complete +off=607 len=4 span[header_field]="Vary" +off=612 header_field complete +off=613 len=26 span[header_value]="Accept-Encoding,User-Agent" +off=641 header_value complete +off=641 len=12 span[header_field]="Content-Type" +off=654 header_field complete +off=655 len=29 span[header_value]="text/html; charset=ISO-8859-1" +off=686 header_value complete +off=686 len=17 span[header_field]="Transfer-Encoding" +off=704 header_field complete +off=705 len=7 span[header_value]="chunked" +off=714 header_value complete +off=716 headers complete status=301 v=1/1 flags=208 content_length=0 +off=719 chunk header len=1 +off=719 len=1 span[body]=lf +off=722 chunk complete +off=725 chunk header len=0 +off=727 chunk complete +off=727 message complete +``` + +## No headers and no body + + +```http +HTTP/1.1 404 Not Found + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=9 span[status]="Not Found" +off=24 status complete +off=26 headers complete status=404 v=1/1 flags=0 content_length=0 +``` + +## No reason phrase + + +```http +HTTP/1.1 301 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=14 status complete +off=16 headers complete status=301 v=1/1 flags=0 content_length=0 +``` + +## Empty reason phrase after space + + +```http +HTTP/1.1 200 \r\n\ + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=0 span[status]="" +off=15 status complete +off=17 headers complete status=200 v=1/1 flags=0 content_length=0 +``` + +## No carriage ret + + +```http +HTTP/1.1 200 OK\n\ +Content-Type: text/html; charset=utf-8\n\ +Connection: close\n\ +\n\ +these headers are from http://news.ycombinator.com/ +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=16 error code=25 reason="Missing expected CR after response line" +``` + +## No carriage ret (lenient) + + +```http +HTTP/1.1 200 OK\n\ +Content-Type: text/html; charset=utf-8\n\ +Connection: close\n\ +\n\ +these headers are from http://news.ycombinator.com/ +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=16 status complete +off=16 len=12 span[header_field]="Content-Type" +off=29 header_field complete +off=30 len=24 span[header_value]="text/html; charset=utf-8" +off=55 header_value complete +off=55 len=10 span[header_field]="Connection" +off=66 header_field complete +off=67 len=5 span[header_value]="close" +off=73 header_value complete +off=74 headers complete status=200 v=1/1 flags=2 content_length=0 +off=74 len=51 span[body]="these headers are from http://news.ycombinator.com/" +``` + +## Underscore in header key + +Shown by: `curl -o /dev/null -v "http://ad.doubleclick.net/pfadx/DARTSHELLCONFIGXML;dcmt=text/xml;"` + + +```http +HTTP/1.1 200 OK +Server: DCLK-AdSvr +Content-Type: text/xml +Content-Length: 0 +DCLK_imp: v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;dcmt=text/xml;;~cs=o + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=6 span[header_field]="Server" +off=24 header_field complete +off=25 len=10 span[header_value]="DCLK-AdSvr" +off=37 header_value complete +off=37 len=12 span[header_field]="Content-Type" +off=50 header_field complete +off=51 len=8 span[header_value]="text/xml" +off=61 header_value complete +off=61 len=14 span[header_field]="Content-Length" +off=76 header_field complete +off=77 len=1 span[header_value]="0" +off=80 header_value complete +off=80 len=8 span[header_field]="DCLK_imp" +off=89 header_field complete +off=90 len=81 span[header_value]="v7;x;114750856;0-0;0;17820020;0/0;21603567/21621457/1;;~okv=;dcmt=text/xml;;~cs=o" +off=173 header_value complete +off=175 headers complete status=200 v=1/1 flags=20 content_length=0 +off=175 message complete +``` + +## bonjourmadame.fr + +The client should not merge two headers fields when the first one doesn't +have a value. + + +```http +HTTP/1.0 301 Moved Permanently +Date: Thu, 03 Jun 2010 09:56:32 GMT +Server: Apache/2.2.3 (Red Hat) +Cache-Control: public +Pragma: \r\n\ +Location: http://www.bonjourmadame.fr/ +Vary: Accept-Encoding +Content-Length: 0 +Content-Type: text/html; charset=UTF-8 +Connection: keep-alive + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.0" +off=8 version complete +off=13 len=17 span[status]="Moved Permanently" +off=32 status complete +off=32 len=4 span[header_field]="Date" +off=37 header_field complete +off=38 len=29 span[header_value]="Thu, 03 Jun 2010 09:56:32 GMT" +off=69 header_value complete +off=69 len=6 span[header_field]="Server" +off=76 header_field complete +off=77 len=22 span[header_value]="Apache/2.2.3 (Red Hat)" +off=101 header_value complete +off=101 len=13 span[header_field]="Cache-Control" +off=115 header_field complete +off=116 len=6 span[header_value]="public" +off=124 header_value complete +off=124 len=6 span[header_field]="Pragma" +off=131 header_field complete +off=134 len=0 span[header_value]="" +off=134 header_value complete +off=134 len=8 span[header_field]="Location" +off=143 header_field complete +off=144 len=28 span[header_value]="http://www.bonjourmadame.fr/" +off=174 header_value complete +off=174 len=4 span[header_field]="Vary" +off=179 header_field complete +off=180 len=15 span[header_value]="Accept-Encoding" +off=197 header_value complete +off=197 len=14 span[header_field]="Content-Length" +off=212 header_field complete +off=213 len=1 span[header_value]="0" +off=216 header_value complete +off=216 len=12 span[header_field]="Content-Type" +off=229 header_field complete +off=230 len=24 span[header_value]="text/html; charset=UTF-8" +off=256 header_value complete +off=256 len=10 span[header_field]="Connection" +off=267 header_field complete +off=268 len=10 span[header_value]="keep-alive" +off=280 header_value complete +off=282 headers complete status=301 v=1/0 flags=21 content_length=0 +off=282 message complete +``` + +## Spaces in header value + + +```http +HTTP/1.1 200 OK +Date: Tue, 28 Sep 2010 01:14:13 GMT +Server: Apache +Cache-Control: no-cache, must-revalidate +Expires: Mon, 26 Jul 1997 05:00:00 GMT +.et-Cookie: PlaxoCS=1274804622353690521; path=/; domain=.plaxo.com +Vary: Accept-Encoding +_eep-Alive: timeout=45 +_onnection: Keep-Alive +Transfer-Encoding: chunked +Content-Type: text/html +Connection: close + +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=4 span[header_field]="Date" +off=22 header_field complete +off=23 len=29 span[header_value]="Tue, 28 Sep 2010 01:14:13 GMT" +off=54 header_value complete +off=54 len=6 span[header_field]="Server" +off=61 header_field complete +off=62 len=6 span[header_value]="Apache" +off=70 header_value complete +off=70 len=13 span[header_field]="Cache-Control" +off=84 header_field complete +off=85 len=25 span[header_value]="no-cache, must-revalidate" +off=112 header_value complete +off=112 len=7 span[header_field]="Expires" +off=120 header_field complete +off=121 len=29 span[header_value]="Mon, 26 Jul 1997 05:00:00 GMT" +off=152 header_value complete +off=152 len=10 span[header_field]=".et-Cookie" +off=163 header_field complete +off=164 len=54 span[header_value]="PlaxoCS=1274804622353690521; path=/; domain=.plaxo.com" +off=220 header_value complete +off=220 len=4 span[header_field]="Vary" +off=225 header_field complete +off=226 len=15 span[header_value]="Accept-Encoding" +off=243 header_value complete +off=243 len=10 span[header_field]="_eep-Alive" +off=254 header_field complete +off=255 len=10 span[header_value]="timeout=45" +off=267 header_value complete +off=267 len=10 span[header_field]="_onnection" +off=278 header_field complete +off=279 len=10 span[header_value]="Keep-Alive" +off=291 header_value complete +off=291 len=17 span[header_field]="Transfer-Encoding" +off=309 header_field complete +off=310 len=7 span[header_value]="chunked" +off=319 header_value complete +off=319 len=12 span[header_field]="Content-Type" +off=332 header_field complete +off=333 len=9 span[header_value]="text/html" +off=344 header_value complete +off=344 len=10 span[header_field]="Connection" +off=355 header_field complete +off=356 len=5 span[header_value]="close" +off=363 header_value complete +off=365 headers complete status=200 v=1/1 flags=20a content_length=0 +off=368 chunk header len=0 +off=370 chunk complete +off=370 message complete +``` + +## Spaces in header name + + +```http +HTTP/1.1 200 OK +Server: Microsoft-IIS/6.0 +X-Powered-By: ASP.NET +en-US Content-Type: text/xml +Content-Type: text/xml +Content-Length: 16 +Date: Fri, 23 Jul 2010 18:45:38 GMT +Connection: keep-alive + +hello +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=6 span[header_field]="Server" +off=24 header_field complete +off=25 len=17 span[header_value]="Microsoft-IIS/6.0" +off=44 header_value complete +off=44 len=12 span[header_field]="X-Powered-By" +off=57 header_field complete +off=58 len=7 span[header_value]="ASP.NET" +off=67 header_value complete +off=72 error code=10 reason="Invalid header token" +``` + +## Non ASCII in status line + + +```http +HTTP/1.1 500 Oriëntatieprobleem +Date: Fri, 5 Nov 2010 23:07:12 GMT+2 +Content-Length: 0 +Connection: close + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=19 span[status]="Oriëntatieprobleem" +off=34 status complete +off=34 len=4 span[header_field]="Date" +off=39 header_field complete +off=40 len=30 span[header_value]="Fri, 5 Nov 2010 23:07:12 GMT+2" +off=72 header_value complete +off=72 len=14 span[header_field]="Content-Length" +off=87 header_field complete +off=88 len=1 span[header_value]="0" +off=91 header_value complete +off=91 len=10 span[header_field]="Connection" +off=102 header_field complete +off=103 len=5 span[header_value]="close" +off=110 header_value complete +off=112 headers complete status=500 v=1/1 flags=22 content_length=0 +off=112 message complete +``` + +## HTTP version 0.9 + + +```http +HTTP/0.9 200 OK + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="0.9" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=19 headers complete status=200 v=0/9 flags=0 content_length=0 +``` + +## No Content-Length, no Transfer-Encoding + +The client should wait for the server's EOF. That is, when neither +content-length nor transfer-encoding is specified, the end of body +is specified by the EOF. + + +```http +HTTP/1.1 200 OK +Content-Type: text/plain + +hello world +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=12 span[header_field]="Content-Type" +off=30 header_field complete +off=31 len=10 span[header_value]="text/plain" +off=43 header_value complete +off=45 headers complete status=200 v=1/1 flags=0 content_length=0 +off=45 len=11 span[body]="hello world" +``` + +## Response starting with CRLF + + +```http +\r\nHTTP/1.1 200 OK +Header1: Value1 +Header2:\t Value2 +Content-Length: 0 + + +``` + +```log +off=2 message begin +off=7 len=3 span[version]="1.1" +off=10 version complete +off=15 len=2 span[status]="OK" +off=19 status complete +off=19 len=7 span[header_field]="Header1" +off=27 header_field complete +off=28 len=6 span[header_value]="Value1" +off=36 header_value complete +off=36 len=7 span[header_field]="Header2" +off=44 header_field complete +off=46 len=6 span[header_value]="Value2" +off=54 header_value complete +off=54 len=14 span[header_field]="Content-Length" +off=69 header_field complete +off=70 len=1 span[header_value]="0" +off=73 header_value complete +off=75 headers complete status=200 v=1/1 flags=20 content_length=0 +off=75 message complete +``` diff --git a/llhttp/test/response/transfer-encoding.md b/llhttp/test/response/transfer-encoding.md new file mode 100644 index 0000000..e1fd10a --- /dev/null +++ b/llhttp/test/response/transfer-encoding.md @@ -0,0 +1,410 @@ +Transfer-Encoding header +======================== + +## Trailing space on chunked body + + +```http +HTTP/1.1 200 OK +Content-Type: text/plain +Transfer-Encoding: chunked + +25 \r\n\ +This is the data in the first chunk + +1C +and this is the second one + +0 \r\n\ + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=12 span[header_field]="Content-Type" +off=30 header_field complete +off=31 len=10 span[header_value]="text/plain" +off=43 header_value complete +off=43 len=17 span[header_field]="Transfer-Encoding" +off=61 header_field complete +off=62 len=7 span[header_value]="chunked" +off=71 header_value complete +off=73 headers complete status=200 v=1/1 flags=208 content_length=0 +off=76 error code=12 reason="Invalid character in chunk size" +``` + +## `chunked` before other transfer-encoding + + +```http +HTTP/1.1 200 OK +Accept: */* +Transfer-Encoding: chunked, deflate + +World +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=6 span[header_field]="Accept" +off=24 header_field complete +off=25 len=3 span[header_value]="*/*" +off=30 header_value complete +off=30 len=17 span[header_field]="Transfer-Encoding" +off=48 header_field complete +off=49 len=16 span[header_value]="chunked, deflate" +off=67 header_value complete +off=69 headers complete status=200 v=1/1 flags=200 content_length=0 +off=69 len=5 span[body]="World" +``` + +## multiple transfer-encoding where chunked is not the last one + + +```http +HTTP/1.1 200 OK +Accept: */* +Transfer-Encoding: chunked +Transfer-Encoding: identity + +World +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=6 span[header_field]="Accept" +off=24 header_field complete +off=25 len=3 span[header_value]="*/*" +off=30 header_value complete +off=30 len=17 span[header_field]="Transfer-Encoding" +off=48 header_field complete +off=49 len=7 span[header_value]="chunked" +off=58 header_value complete +off=58 len=17 span[header_field]="Transfer-Encoding" +off=76 header_field complete +off=77 len=8 span[header_value]="identity" +off=87 header_value complete +off=89 headers complete status=200 v=1/1 flags=200 content_length=0 +off=89 len=5 span[body]="World" +``` + +## `chunkedchunked` transfer-encoding does not enable chunked enconding + +This check that the word `chunked` repeat more than once (with or without spaces) does not mistakenly enables chunked encoding. + + +```http +HTTP/1.1 200 OK +Accept: */* +Transfer-Encoding: chunkedchunked + +2 +OK +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=6 span[header_field]="Accept" +off=24 header_field complete +off=25 len=3 span[header_value]="*/*" +off=30 header_value complete +off=30 len=17 span[header_field]="Transfer-Encoding" +off=48 header_field complete +off=49 len=14 span[header_value]="chunkedchunked" +off=65 header_value complete +off=67 headers complete status=200 v=1/1 flags=200 content_length=0 +off=67 len=1 span[body]="2" +off=68 len=1 span[body]=cr +off=69 len=1 span[body]=lf +off=70 len=2 span[body]="OK" +off=72 len=1 span[body]=cr +off=73 len=1 span[body]=lf +off=74 len=1 span[body]="0" +off=75 len=1 span[body]=cr +off=76 len=1 span[body]=lf +off=77 len=1 span[body]=cr +off=78 len=1 span[body]=lf +``` + +## Chunk extensions + + +```http +HTTP/1.1 200 OK +Host: localhost +Transfer-encoding: chunked + +5;ilovew3;somuchlove=aretheseparametersfor +hello +6;blahblah;blah + world +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=4 span[header_field]="Host" +off=22 header_field complete +off=23 len=9 span[header_value]="localhost" +off=34 header_value complete +off=34 len=17 span[header_field]="Transfer-encoding" +off=52 header_field complete +off=53 len=7 span[header_value]="chunked" +off=62 header_value complete +off=64 headers complete status=200 v=1/1 flags=208 content_length=0 +off=66 len=7 span[chunk_extension_name]="ilovew3" +off=74 chunk_extension_name complete +off=74 len=10 span[chunk_extension_name]="somuchlove" +off=85 chunk_extension_name complete +off=85 len=21 span[chunk_extension_value]="aretheseparametersfor" +off=107 chunk_extension_value complete +off=108 chunk header len=5 +off=108 len=5 span[body]="hello" +off=115 chunk complete +off=117 len=8 span[chunk_extension_name]="blahblah" +off=126 chunk_extension_name complete +off=126 len=4 span[chunk_extension_name]="blah" +off=131 chunk_extension_name complete +off=132 chunk header len=6 +off=132 len=6 span[body]=" world" +off=140 chunk complete +off=143 chunk header len=0 +off=145 chunk complete +off=145 message complete +``` + +## No semicolon before chunk extensions + + +```http +HTTP/1.1 200 OK +Host: localhost +Transfer-encoding: chunked + +2 erfrferferf +aa +0 rrrr + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=4 span[header_field]="Host" +off=22 header_field complete +off=23 len=9 span[header_value]="localhost" +off=34 header_value complete +off=34 len=17 span[header_field]="Transfer-encoding" +off=52 header_field complete +off=53 len=7 span[header_value]="chunked" +off=62 header_value complete +off=64 headers complete status=200 v=1/1 flags=208 content_length=0 +off=66 error code=12 reason="Invalid character in chunk size" +``` + + +## No extension after semicolon + + +```http +HTTP/1.1 200 OK +Host: localhost +Transfer-encoding: chunked + +2; +aa +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=4 span[header_field]="Host" +off=22 header_field complete +off=23 len=9 span[header_value]="localhost" +off=34 header_value complete +off=34 len=17 span[header_field]="Transfer-encoding" +off=52 header_field complete +off=53 len=7 span[header_value]="chunked" +off=62 header_value complete +off=64 headers complete status=200 v=1/1 flags=208 content_length=0 +off=67 error code=2 reason="Invalid character in chunk extensions" +``` + + +## Chunk extensions quoting + + +```http +HTTP/1.1 200 OK +Host: localhost +Transfer-Encoding: chunked + +5;ilovew3="I love; extensions";somuchlove="aretheseparametersfor";blah;foo=bar +hello +6;blahblah;blah + world +0 + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=4 span[header_field]="Host" +off=22 header_field complete +off=23 len=9 span[header_value]="localhost" +off=34 header_value complete +off=34 len=17 span[header_field]="Transfer-Encoding" +off=52 header_field complete +off=53 len=7 span[header_value]="chunked" +off=62 header_value complete +off=64 headers complete status=200 v=1/1 flags=208 content_length=0 +off=66 len=7 span[chunk_extension_name]="ilovew3" +off=74 chunk_extension_name complete +off=74 len=20 span[chunk_extension_value]=""I love; extensions"" +off=94 chunk_extension_value complete +off=95 len=10 span[chunk_extension_name]="somuchlove" +off=106 chunk_extension_name complete +off=106 len=23 span[chunk_extension_value]=""aretheseparametersfor"" +off=129 chunk_extension_value complete +off=130 len=4 span[chunk_extension_name]="blah" +off=135 chunk_extension_name complete +off=135 len=3 span[chunk_extension_name]="foo" +off=139 chunk_extension_name complete +off=139 len=3 span[chunk_extension_value]="bar" +off=143 chunk_extension_value complete +off=144 chunk header len=5 +off=144 len=5 span[body]="hello" +off=151 chunk complete +off=153 len=8 span[chunk_extension_name]="blahblah" +off=162 chunk_extension_name complete +off=162 len=4 span[chunk_extension_name]="blah" +off=167 chunk_extension_name complete +off=168 chunk header len=6 +off=168 len=6 span[body]=" world" +off=176 chunk complete +off=179 chunk header len=0 +``` + + +## Unbalanced chunk extensions quoting + + +```http +HTTP/1.1 200 OK +Host: localhost +Transfer-Encoding: chunked + +5;ilovew3="abc";somuchlove="def; ghi +hello +6;blahblah;blah + world +0 + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=4 span[header_field]="Host" +off=22 header_field complete +off=23 len=9 span[header_value]="localhost" +off=34 header_value complete +off=34 len=17 span[header_field]="Transfer-Encoding" +off=52 header_field complete +off=53 len=7 span[header_value]="chunked" +off=62 header_value complete +off=64 headers complete status=200 v=1/1 flags=208 content_length=0 +off=66 len=7 span[chunk_extension_name]="ilovew3" +off=74 chunk_extension_name complete +off=74 len=5 span[chunk_extension_value]=""abc"" +off=79 chunk_extension_value complete +off=80 len=10 span[chunk_extension_name]="somuchlove" +off=91 chunk_extension_name complete +off=91 len=9 span[chunk_extension_value]=""def; ghi" +off=101 error code=2 reason="Invalid character in chunk extensions quoted value" +``` + + +## Invalid OBS fold after chunked value + + +```http +HTTP/1.1 200 OK +Transfer-Encoding: chunked + abc + +5 +World +0 + + +``` + +```log +off=0 message begin +off=5 len=3 span[version]="1.1" +off=8 version complete +off=13 len=2 span[status]="OK" +off=17 status complete +off=17 len=17 span[header_field]="Transfer-Encoding" +off=35 header_field complete +off=36 len=7 span[header_value]="chunked" +off=45 len=5 span[header_value]=" abc" +off=52 header_value complete +off=54 headers complete status=200 v=1/1 flags=200 content_length=0 +off=54 len=1 span[body]="5" +off=55 len=1 span[body]=cr +off=56 len=1 span[body]=lf +off=57 len=5 span[body]="World" +off=62 len=1 span[body]=cr +off=63 len=1 span[body]=lf +off=64 len=1 span[body]="0" +off=65 len=1 span[body]=cr +off=66 len=1 span[body]=lf +off=67 len=1 span[body]=cr +off=68 len=1 span[body]=lf +``` + diff --git a/llhttp/test/url.md b/llhttp/test/url.md new file mode 100644 index 0000000..13a1b01 --- /dev/null +++ b/llhttp/test/url.md @@ -0,0 +1,261 @@ +# URL tests + +## Absolute URL + +```url +http://example.com/path?query=value#schema +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=11 span[url.host]="example.com" +off=18 len=5 span[url.path]="/path" +off=24 len=11 span[url.query]="query=value" +off=36 len=6 span[url.fragment]="schema" +``` + +## Relative URL + +```url +/path?query=value#schema +``` + +```log +off=0 len=5 span[url.path]="/path" +off=6 len=11 span[url.query]="query=value" +off=18 len=6 span[url.fragment]="schema" +``` + +## Failing on broken schema + + +```url +schema:/path?query=value#schema +``` + +```log +off=0 len=6 span[url.schema]="schema" +off=8 error code=7 reason="Unexpected char in url schema" +``` + +## Proxy request + +```url +http://hostname/ +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=8 span[url.host]="hostname" +off=15 len=1 span[url.path]="/" +``` + +## Proxy request with port + +```url +http://hostname:444/ +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=12 span[url.host]="hostname:444" +off=19 len=1 span[url.path]="/" +``` + +## Proxy IPv6 request + +```url +http://[1:2::3:4]/ +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=10 span[url.host]="[1:2::3:4]" +off=17 len=1 span[url.path]="/" +``` + +## Proxy IPv6 request with port + +```url +http://[1:2::3:4]:67/ +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=13 span[url.host]="[1:2::3:4]:67" +off=20 len=1 span[url.path]="/" +``` + +## IPv4 in IPv6 address + +```url +http://[2001:0000:0000:0000:0000:0000:1.9.1.1]/ +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=39 span[url.host]="[2001:0000:0000:0000:0000:0000:1.9.1.1]" +off=46 len=1 span[url.path]="/" +``` + +## Extra `?` in query string + +```url +http://a.tbcdn.cn/p/fp/2010c/??fp-header-min.css,fp-base-min.css,\ +fp-channel-min.css,fp-product-min.css,fp-mall-min.css,fp-category-min.css,\ +fp-sub-min.css,fp-gdp4p-min.css,fp-css3-min.css,fp-misc-min.css?t=20101022.css +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=10 span[url.host]="a.tbcdn.cn" +off=17 len=12 span[url.path]="/p/fp/2010c/" +off=30 len=187 span[url.query]="?fp-header-min.css,fp-base-min.css,fp-channel-min.css,fp-product-min.css,fp-mall-min.css,fp-category-min.css,fp-sub-min.css,fp-gdp4p-min.css,fp-css3-min.css,fp-misc-min.css?t=20101022.css" +``` + +## URL encoded space + +```url +/toto.html?toto=a%20b +``` + +```log +off=0 len=10 span[url.path]="/toto.html" +off=11 len=10 span[url.query]="toto=a%20b" +``` + +## URL fragment + +```url +/toto.html#titi +``` + +```log +off=0 len=10 span[url.path]="/toto.html" +off=11 len=4 span[url.fragment]="titi" +``` + +## Complex URL fragment + +```url +http://www.webmasterworld.com/r.cgi?f=21&d=8405&url=\ +http://www.example.com/index.html?foo=bar&hello=world#midpage +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=22 span[url.host]="www.webmasterworld.com" +off=29 len=6 span[url.path]="/r.cgi" +off=36 len=69 span[url.query]="f=21&d=8405&url=http://www.example.com/index.html?foo=bar&hello=world" +off=106 len=7 span[url.fragment]="midpage" +``` + +## Complex URL from node.js url parser doc + +```url +http://host.com:8080/p/a/t/h?query=string#hash +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=13 span[url.host]="host.com:8080" +off=20 len=8 span[url.path]="/p/a/t/h" +off=29 len=12 span[url.query]="query=string" +off=42 len=4 span[url.fragment]="hash" +``` + +## Complex URL with basic auth from node.js url parser doc + +```url +http://a:b@host.com:8080/p/a/t/h?query=string#hash +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=17 span[url.host]="a:b@host.com:8080" +off=24 len=8 span[url.path]="/p/a/t/h" +off=33 len=12 span[url.query]="query=string" +off=46 len=4 span[url.fragment]="hash" +``` + +## Double `@` + + +```url +http://a:b@@hostname:443/ +``` + +```log +off=0 len=4 span[url.schema]="http" +off=12 error code=7 reason="Double @ in url" +``` + +## Proxy basic auth with url encoded space + +```url +http://a%20:b@host.com/ +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=15 span[url.host]="a%20:b@host.com" +off=22 len=1 span[url.path]="/" +``` + +## Proxy basic auth with unreserved chars + +```url +http://a!;-_!=+$@host.com/ +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=18 span[url.host]="a!;-_!=+$@host.com" +off=25 len=1 span[url.path]="/" +``` + +## IPv6 address with Zone ID + +```url +http://[fe80::a%25eth0]/ +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=16 span[url.host]="[fe80::a%25eth0]" +off=23 len=1 span[url.path]="/" +``` + +## IPv6 address with Zone ID, but `%` is not percent-encoded + +```url +http://[fe80::a%eth0]/ +``` + +```log +off=0 len=4 span[url.schema]="http" +off=7 len=14 span[url.host]="[fe80::a%eth0]" +off=21 len=1 span[url.path]="/" +``` + +## Disallow tab in URL + + +```url +/foo\tbar/ +``` + +```log +off=5 error code=7 reason="Invalid characters in url" +``` + +## Disallow form-feed in URL + + +```url +/foo\fbar/ +``` + +```log +off=5 error code=7 reason="Invalid characters in url" +``` diff --git a/llhttp/tsconfig.json b/llhttp/tsconfig.json new file mode 100644 index 0000000..01ec7c2 --- /dev/null +++ b/llhttp/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "strict": true, + "target": "es2017", + "module": "commonjs", + "moduleResolution": "node", + "outDir": "./lib", + "declaration": true, + "pretty": true, + "sourceMap": true + }, + "include": [ + "src/**/*.ts" + ] +} diff --git a/llhttp/tslint.json b/llhttp/tslint.json new file mode 100644 index 0000000..b0aaf97 --- /dev/null +++ b/llhttp/tslint.json @@ -0,0 +1,14 @@ +{ + "defaultSeverity": "error", + "extends": [ + "tslint:recommended" + ], + "jsRules": {}, + "rules": { + "no-bitwise": null, + "quotemark": [ + true, "single", "avoid-escape", "avoid-template" + ] + }, + "rulesDirectory": [] +} diff --git a/llparse-builder/.gitignore b/llparse-builder/.gitignore new file mode 100644 index 0000000..5e67ab3 --- /dev/null +++ b/llparse-builder/.gitignore @@ -0,0 +1,3 @@ +node_modules/ +npm-debug.log +lib/ diff --git a/llparse-builder/.travis.yml b/llparse-builder/.travis.yml new file mode 100644 index 0000000..b5efd79 --- /dev/null +++ b/llparse-builder/.travis.yml @@ -0,0 +1,4 @@ +sudo: false +language: node_js +node_js: + - "stable" diff --git a/llparse-builder/README.md b/llparse-builder/README.md new file mode 100644 index 0000000..522fba2 --- /dev/null +++ b/llparse-builder/README.md @@ -0,0 +1,32 @@ +# llparse-builder +[![Build Status](https://secure.travis-ci.org/indutny/llparse-builder.svg)](http://travis-ci.org/indutny/llparse-builder) +[![NPM version](https://badge.fury.io/js/llparse-builder.svg)](https://badge.fury.io/js/llparse-builder) + +See [llparse][0]. + +#### LICENSE + +This software is licensed under the MIT License. + +Copyright Fedor Indutny, 2018. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. + +[0]: https://github.com/indutny/llparse diff --git a/llparse-builder/package-lock.json b/llparse-builder/package-lock.json new file mode 100644 index 0000000..5e76f34 --- /dev/null +++ b/llparse-builder/package-lock.json @@ -0,0 +1,1466 @@ +{ + "name": "llparse-builder", + "version": "1.5.2", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "@babel/code-frame": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.4.tgz", + "integrity": "sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==", + "dev": true, + "requires": { + "@babel/highlight": "^7.10.4" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz", + "integrity": "sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw==", + "dev": true + }, + "@babel/highlight": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.10.4.tgz", + "integrity": "sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.10.4", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@types/debug": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.5.tgz", + "integrity": "sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ==" + }, + "@types/mocha": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-8.0.3.tgz", + "integrity": "sha512-vyxR57nv8NfcU0GZu8EUXZLTbCMupIUwy95LJ6lllN+JRPG25CwMHoB1q5xKh8YKhQnHYRAn4yW2yuHbf/5xgg==", + "dev": true + }, + "@types/node": { + "version": "14.11.8", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.11.8.tgz", + "integrity": "sha512-KPcKqKm5UKDkaYPTuXSx8wEP7vE9GnuaXIZKijwRYcePpZFDVuy2a57LarFKiORbHOuTOOwYzxVxcUzsh2P2Pw==", + "dev": true + }, + "ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "anymatch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", + "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "array.prototype.map": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array.prototype.map/-/array.prototype.map-1.0.2.tgz", + "integrity": "sha512-Az3OYxgsa1g7xDYp86l0nnN4bcmuEITGe1rbdEBVkrqkzMgDcbdQ2R7r41pNzti+4NMces3H8gMmuioZUilLgw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "es-array-method-boxes-properly": "^1.0.0", + "is-string": "^1.0.4" + } + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "binary-extensions": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.1.0.tgz", + "integrity": "sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ==", + "dev": true + }, + "binary-search": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/binary-search/-/binary-search-1.3.6.tgz", + "integrity": "sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", + "dev": true + }, + "builtin-modules": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", + "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=", + "dev": true + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "chalk": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.2.tgz", + "integrity": "sha512-ZM4j2/ld/YZDc3Ma8PgN7gyAk+kHMMMyzLNryCPGhWrsfAuDVeuid5bpRFTDgMH9JBK2lA4dyyAkkZYF/WcqDQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "dependencies": { + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "supports-color": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.3.0.tgz", + "integrity": "sha512-0aP01LLIskjKs3lq52EC0aGBAJhLq7B2Rd8HC/DR/PtNNpcLilNmHC12O+hu0usQpo7wtHNRqtrhBwtDb0+dNg==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "chokidar": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.4.2.tgz", + "integrity": "sha512-IZHaDeBeI+sZJRX7lGcXsdzgvZqKv6sECqsbErJA4mHWfpRrD8B97kSFN4cQz6nGBGiuFia1MKR4d6c1o8Cv7A==", + "dev": true, + "requires": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "fsevents": "~2.1.2", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.4.0" + } + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "color-convert": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.1.tgz", + "integrity": "sha512-mjGanIiwQJskCC18rPR6OmrZ6fm2Lc7PeGFYwCmy5J34wC6F1PzdGL6xeMfmgicfYcNLGuVFA3WzXtIDCQSZxQ==", + "dev": true, + "requires": { + "color-name": "^1.1.1" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "commander": { + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", + "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "debug": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", + "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "requires": { + "ms": "2.1.2" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true + }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "requires": { + "object-keys": "^1.0.12" + } + }, + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "es-abstract": { + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz", + "integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.18.0-next.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", + "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.0", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + }, + "object.assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.1.tgz", + "integrity": "sha512-VT/cxmx5yaoHSOTSyrCygIDFco+RsibY2NM0a4RdEeY/4KgqezwFtK1yr3U67xYhqJSlASm2pKhLVzPj2lr4bA==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.18.0-next.0", + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.18.0-next.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", + "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.0", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + } + } + } + } + }, + "es-array-method-boxes-properly": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz", + "integrity": "sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==", + "dev": true + }, + "es-get-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.0.tgz", + "integrity": "sha512-UfrmHuWQlNMTs35e1ypnvikg6jCz3SK8v8ImvmDsh36fCVUR1MqoFDiyn0/k52C8NqO3YsO8Oe0azeesNuqSsQ==", + "dev": true, + "requires": { + "es-abstract": "^1.17.4", + "has-symbols": "^1.0.1", + "is-arguments": "^1.0.4", + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-string": "^1.0.5", + "isarray": "^2.0.5" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "flat": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/flat/-/flat-4.1.0.tgz", + "integrity": "sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==", + "dev": true, + "requires": { + "is-buffer": "~2.0.3" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fsevents": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.3.tgz", + "integrity": "sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "glob": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", + "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", + "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==" + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true + }, + "is-arguments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.0.4.tgz", + "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==", + "dev": true + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-buffer": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", + "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==", + "dev": true + }, + "is-callable": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.2.tgz", + "integrity": "sha512-dnMqspv5nU3LoewK2N/y7KLtxtakvTuaCsU9FU50/QDmdbHNy/4/JuRtMHqRU22o3q+W89YQndQEeCVwK+3qrA==" + }, + "is-date-object": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==" + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-map": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.1.tgz", + "integrity": "sha512-T/S49scO8plUiAOA2DBTBG3JHpn1yiw0kRp6dgiZ0v2/6twi5eiB0rHtHFH9ZIrvlWc6+4O+m4zg5+Z833aXgw==", + "dev": true + }, + "is-negative-zero": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.0.tgz", + "integrity": "sha1-lVOxIbD6wohp2p7UWeIMdUN4hGE=" + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=", + "dev": true + }, + "is-regex": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.1.tgz", + "integrity": "sha512-1+QkEcxiLlB7VEyFtyBg94e08OAsvq7FUBgApTq/w2ymCLyKJgDPsybBENVtA7XCQEgEXxKPonG+mvYRxh/LIg==", + "requires": { + "has-symbols": "^1.0.1" + } + }, + "is-set": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.1.tgz", + "integrity": "sha512-eJEzOtVyenDs1TMzSQ3kU3K+E0GUS9sno+F0OBT97xsgcJsF9nXMBtkT9/kut5JEpM7oL7X/0qxR17K3mcwIAA==", + "dev": true + }, + "is-string": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "dev": true + }, + "is-symbol": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "requires": { + "has-symbols": "^1.0.1" + } + }, + "isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "iterate-iterator": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/iterate-iterator/-/iterate-iterator-1.0.1.tgz", + "integrity": "sha512-3Q6tudGN05kbkDQDI4CqjaBf4qf85w6W6GnuZDtUVYwKgtC1q8yxYX7CZed7N+tLzQqS6roujWvszf13T+n9aw==", + "dev": true + }, + "iterate-value": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/iterate-value/-/iterate-value-1.0.2.tgz", + "integrity": "sha512-A6fMAio4D2ot2r/TYzr4yUWrmwNdsN5xL7+HUiyACE4DXm+q8HtPcnFTp+NnW3k4N05tZ7FVYFFb2CR13NxyHQ==", + "dev": true, + "requires": { + "es-get-iterator": "^1.0.2", + "iterate-iterator": "^1.0.1" + } + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "js-yaml": { + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.0.tgz", + "integrity": "sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "log-symbols": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", + "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", + "dev": true, + "requires": { + "chalk": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", + "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + } + } + }, + "make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + }, + "dependencies": { + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + } + } + }, + "mocha": { + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-8.1.3.tgz", + "integrity": "sha512-ZbaYib4hT4PpF4bdSO2DohooKXIn4lDeiYqB+vTmCdr6l2woW0b6H3pf5x4sM5nwQMru9RvjjHYWVGltR50ZBw==", + "dev": true, + "requires": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.4.2", + "debug": "4.1.1", + "diff": "4.0.2", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.1.6", + "growl": "1.10.5", + "he": "1.2.0", + "js-yaml": "3.14.0", + "log-symbols": "4.0.0", + "minimatch": "3.0.4", + "ms": "2.1.2", + "object.assign": "4.1.0", + "promise.allsettled": "1.0.2", + "serialize-javascript": "4.0.0", + "strip-json-comments": "3.0.1", + "supports-color": "7.1.0", + "which": "2.0.2", + "wide-align": "1.1.3", + "workerpool": "6.0.0", + "yargs": "13.3.2", + "yargs-parser": "13.1.2", + "yargs-unparser": "1.6.1" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "object-inspect": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.8.0.tgz", + "integrity": "sha512-jLdtEOB112fORuypAyl/50VRVIBIdVQOSUUGQHzJ4xBSbit81zRarz7GThkEFZy1RceYrWYcPcBFPQwHyAc1gA==" + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" + }, + "object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "dev": true, + "requires": { + "define-properties": "^1.1.2", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.0", + "object-keys": "^1.0.11" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "p-limit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.0.2.tgz", + "integrity": "sha512-iwqZSOoWIW+Ew4kAGUlN16J4M7OB3ysMLSZtnhmqx7njIHFPlxWBX8xo3lVTyFVq6mI/lL9qt2IsN1sHwaxJkg==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "path-parse": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "dev": true + }, + "picomatch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", + "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "dev": true + }, + "promise.allsettled": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/promise.allsettled/-/promise.allsettled-1.0.2.tgz", + "integrity": "sha512-UpcYW5S1RaNKT6pd+s9jp9K9rlQge1UXKskec0j6Mmuq7UJCvlS2J2/s/yuPN8ehftf9HXMxWlKiPbGGUzpoRg==", + "dev": true, + "requires": { + "array.prototype.map": "^1.0.1", + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "iterate-value": "^1.0.0" + } + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "readdirp": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.4.0.tgz", + "integrity": "sha512-0xe001vZBnJEK+uKcj8qOhyAKPzIT+gStxWr3LCB0DwcXR5NZJ3IaC+yGnHCYzB/S7ov3m3EEbZI2zeNvX+hGQ==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "resolve": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", + "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, + "serialize-javascript": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-4.0.0.tgz", + "integrity": "sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "source-map-support": { + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", + "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "string.prototype.trimend": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz", + "integrity": "sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, + "string.prototype.trimstart": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz", + "integrity": "sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + }, + "strip-json-comments": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", + "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", + "dev": true + }, + "supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "ts-node": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-9.0.0.tgz", + "integrity": "sha512-/TqB4SnererCDR/vb4S/QvSZvzQMJN8daAslg7MeaiHvD8rDZsSfXmNeNumyZZzMned72Xoq/isQljYSt8Ynfg==", + "dev": true, + "requires": { + "arg": "^4.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "source-map-support": "^0.5.17", + "yn": "3.1.1" + } + }, + "tslib": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.13.0.tgz", + "integrity": "sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q==", + "dev": true + }, + "tslint": { + "version": "5.20.1", + "resolved": "https://registry.npmjs.org/tslint/-/tslint-5.20.1.tgz", + "integrity": "sha512-EcMxhzCFt8k+/UP5r8waCf/lzmeSyVlqxqMEDQE7rWYiQky8KpIBz1JAoYXfROHrPZ1XXd43q8yQnULOLiBRQg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "builtin-modules": "^1.1.1", + "chalk": "^2.3.0", + "commander": "^2.12.1", + "diff": "^4.0.1", + "glob": "^7.1.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "resolve": "^1.3.2", + "semver": "^5.3.0", + "tslib": "^1.8.0", + "tsutils": "^2.29.0" + }, + "dependencies": { + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + } + } + }, + "tsutils": { + "version": "2.29.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-2.29.0.tgz", + "integrity": "sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA==", + "dev": true, + "requires": { + "tslib": "^1.8.1" + } + }, + "typescript": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.0.3.tgz", + "integrity": "sha512-tEu6DGxGgRJPb/mVPIZ48e69xCn2yRmCgYmDugAVwmJ6o+0u1RI18eO7E7WBTLYLaEVVOhwQmcdhQHweux/WPg==", + "dev": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "dev": true, + "requires": { + "string-width": "^1.0.2 || 2" + } + }, + "workerpool": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.0.0.tgz", + "integrity": "sha512-fU2OcNA/GVAJLLyKUoHkAgIhKb0JoCpSjLC/G2vYKxUjVmQwGbRVeoPJ1a8U4pnVofz4AQV5Y/NEw8oKqxEBtA==", + "dev": true + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "y18n": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "dev": true + }, + "yargs": { + "version": "13.3.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", + "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.2" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "yargs-parser": { + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + }, + "yargs-unparser": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-1.6.1.tgz", + "integrity": "sha512-qZV14lK9MWsGCmcr7u5oXGH0dbGqZAIxTDrWXZDo5zUr6b6iUmelNKO6x6R1dQT24AH3LgRxJpr8meWy2unolA==", + "dev": true, + "requires": { + "camelcase": "^5.3.1", + "decamelize": "^1.2.0", + "flat": "^4.1.0", + "is-plain-obj": "^1.1.0", + "yargs": "^14.2.3" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "yargs": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-14.2.3.tgz", + "integrity": "sha512-ZbotRWhF+lkjijC/VhmOT9wSgyBQ7+zr13+YLkhfsSiTriYsMzkTUFP18pFhWwBeMa5gUc1MzbhrO6/VB7c9Xg==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "decamelize": "^1.2.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^15.0.1" + } + }, + "yargs-parser": { + "version": "15.0.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-15.0.1.tgz", + "integrity": "sha512-0OAMV2mAZQrs3FkNpDQcBk1x5HXb8X4twADss4S0Iuk+2dGnLOE/fRHrsYm542GduMveyA77OF4wrNJuanRCWw==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + } + } + }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true + } + } +} diff --git a/llparse-builder/package.json b/llparse-builder/package.json new file mode 100644 index 0000000..1c1ac54 --- /dev/null +++ b/llparse-builder/package.json @@ -0,0 +1,48 @@ +{ + "name": "llparse-builder", + "version": "1.5.2", + "description": "Build graph for consumption in LLParse", + "main": "lib/builder.js", + "types": "lib/builder.d.ts", + "files": [ + "lib", + "src" + ], + "scripts": { + "build": "tsc", + "clean": "rm -rf lib", + "prepare": "npm run clean && npm run build", + "lint": "tslint -c tslint.json src/*.ts src/**/*.ts src/**/**/*.ts test/*.ts", + "mocha": "mocha -r ts-node/register/type-check --reporter spec test/*-test.ts", + "test": "npm run mocha && npm run lint" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/indutny/llparse-builder.git" + }, + "keywords": [ + "llparse", + "builder", + "llvm", + "bitcode" + ], + "author": "Fedor Indutny (http://darksi.de/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/indutny/llparse-builder/issues" + }, + "homepage": "https://github.com/indutny/llparse-builder#readme", + "devDependencies": { + "@types/mocha": "^8.0.3", + "@types/node": "^14.11.8", + "mocha": "^8.1.3", + "ts-node": "^9.0.0", + "tslint": "^5.20.1", + "typescript": "^4.0.3" + }, + "dependencies": { + "@types/debug": "4.1.5 ", + "binary-search": "^1.3.6", + "debug": "^4.2.0" + } +} diff --git a/llparse-builder/src/builder.ts b/llparse-builder/src/builder.ts new file mode 100644 index 0000000..a335a85 --- /dev/null +++ b/llparse-builder/src/builder.ts @@ -0,0 +1,147 @@ +import * as code from './code'; +import * as node from './node'; +import { Property, PropertyType } from './property'; +import { Span } from './span'; +import * as transform from './transform'; + +export { code, node, transform, Property, PropertyType, Span }; +export { Edge } from './edge'; +export { LoopChecker } from './loop-checker'; +export { ISpanAllocatorResult, SpanAllocator } from './span-allocator'; +export { Reachability } from './reachability'; + +/** + * Construct parsing graph for later use in `llparse`. + */ +export class Builder { + /** + * API for creating external callbacks and intrinsic operations. + */ + public readonly code: code.Creator = new code.Creator(); + + /** + * API for creating character transforms for use in nodes created with + * `builder.node()` + */ + public readonly transform: transform.Creator = new transform.Creator(); + + private readonly privProperties: Map = new Map(); + + // Various nodes + + /** + * Create regular node for matching characters and sequences. + * + * @param name Node name + */ + public node(name: string): node.Match { + return new node.Match(name); + } + + /** + * Create terminal error node. Returns error code to user, and sets reason + * in the parser's state object. + * + * This node does not consume any bytes upon execution. + * + * @param errorCode Integer error code + * @param reason Error description + */ + public error(errorCode: number, reason: string): node.Error { + return new node.Error(errorCode, reason); + } + + /** + * Create invoke node that calls either external user callback or an + * intrinsic operation. + * + * This node does not consume any bytes upon execution. + * + * NOTE: When `.invoke()` is a target of `node().select()` - callback must + * have signature that accepts `.select()`'s value, otherwise it must be of + * the signature that takes no such value. + * + * @param fn Code instance to invoke + * @param map Object with integer keys and `Node` values. Describes + * nodes that are visited upon receiving particular + * return integer value + * @param otherwise Convenience `Node` argument. Effect is the same as calling + * `p.invoke(...).otherwise(node)` + */ + public invoke(fn: code.Code, map?: node.IInvokeMap | node.Node, + otherwise?: node.Node): node.Invoke { + let res: node.Invoke; + + // `.invoke(name)` + if (map === undefined) { + res = new node.Invoke(fn, {}); + // `.invoke(name, otherwise)` + } else if (map instanceof node.Node) { + res = new node.Invoke(fn, {}); + otherwise = map; + } else { + res = new node.Invoke(fn, map as node.IInvokeMap); + } + + if (otherwise !== undefined) { + res.otherwise(otherwise); + } + return res; + } + + /** + * Create node that consumes number of bytes specified by value of the + * state's property with name in `field` argument. + * + * @param field Property name to use + */ + public consume(field: string): node.Consume { + return new node.Consume(field); + } + + /** + * Create non-terminal node that returns `errorCode` as error number to + * user, but still allows feeding more data to the parser. + * + * This node does not consume any bytes upon execution. + * + * @param errorCode Integer error code + * @param reason Error description + */ + public pause(errorCode: number, reason: string): node.Pause { + return new node.Pause(errorCode, reason); + } + + // Span + + /** + * Create Span with given `callback`. + * + * @param callback External span callback, must be result of + * `.code.span(...)` + */ + public span(callback: code.Span): Span { + return new Span(callback); + } + + // Custom property API + + /** + * Allocate space for property in parser's state. + */ + public property(ty: PropertyType, name: string): void { + if (this.privProperties.has(name)) { + throw new Error(`Duplicate property with a name: "${name}"`); + } + + const prop = new Property(ty, name); + this.privProperties.set(name, prop); + } + + /** + * Return list of all allocated properties in parser's state. + */ + public get properties(): ReadonlyArray { + return Array.from(this.privProperties.values()); + } +} diff --git a/llparse-builder/src/code/and.ts b/llparse-builder/src/code/and.ts new file mode 100644 index 0000000..5f78675 --- /dev/null +++ b/llparse-builder/src/code/and.ts @@ -0,0 +1,7 @@ +import { FieldValue } from './field-value'; + +export class And extends FieldValue { + constructor(field: string, value: number) { + super('match', 'and', field, value); + } +} diff --git a/llparse-builder/src/code/base.ts b/llparse-builder/src/code/base.ts new file mode 100644 index 0000000..00b479f --- /dev/null +++ b/llparse-builder/src/code/base.ts @@ -0,0 +1,16 @@ +export type Signature = 'match' | 'value'; + +/** + * Base code class. + */ +export abstract class Code { + /** + * @param signature Code signature to be used. `match` means that code takes + * no input value (from `.select()`), otherwise it must be + * `value` + * @param name External function or intrinsic name. + */ + constructor(public readonly signature: Signature, + public readonly name: string) { + } +} diff --git a/llparse-builder/src/code/creator.ts b/llparse-builder/src/code/creator.ts new file mode 100644 index 0000000..98f9296 --- /dev/null +++ b/llparse-builder/src/code/creator.ts @@ -0,0 +1,184 @@ +import * as code from './'; + +/** + * API for creating external callbacks and intrinsic operations. + */ +export class Creator { + // Callbacks to external C functions + + /** + * Create an external callback that **has no** `value` argument. + * + * This callback can be used in all `Invoke` nodes except those that are + * targets of `.select()` method. + * + * C signature of callback must be: + * + * ```c + * int name(llparse_t* state, const char* p, const char* endp) + * ``` + * + * Where `llparse_t` is parser state's type name. + * + * @param name External function name. + */ + public match(name: string): code.Match { + return new code.Match(name); + } + + /** + * Create an external callback that **has** `value` argument. + * + * This callback can be used only in `Invoke` nodes that are targets of + * `.select()` method. + * + * C signature of callback must be: + * + * ```c + * int name(llparse_t* state, const char* p, const char* endp, int value) + * ``` + * + * Where `llparse_t` is parser state's type name. + * + * @param name External function name. + */ + public value(name: string): code.Value { + return new code.Value(name); + } + + /** + * Create an external span callback. + * + * This callback can be used only in `Span` constructor. + * + * C signature of callback must be: + * + * ```c + * int name(llparse_t* state, const char* p, const char* endp) + * ``` + * + * NOTE: non-zero return value is treated as resumable error. + * + * @param name External function name. + */ + public span(name: string): code.Span { + return new code.Span(name); + } + + // Helpers + + /** + * Intrinsic operation. Stores `value` from `.select()` node into the state's + * property with the name specified by `field`, returns zero. + * + * state[field] = value; + * return 0; + * + * @param field Property name + */ + public store(field: string): code.Store { + return new code.Store(field); + } + + /** + * Intrinsic operation. Loads and returns state's property with the name + * specified by `field`. + * + * The value of the property is either truncated or zero-extended to fit into + * 32-bit unsigned integer. + * + * return state[field]; + * + * @param field Property name. + */ + public load(field: string): code.Load { + return new code.Load(field); + } + + /** + * Intrinsic operation. Takes `value` from `.select()`, state's property + * with the name `field` and does: + * + * field = state[field]; + * field *= options.base; + * field += value; + * state[field] = field; + * return 0; // or 1 on overflow + * + * Return values are: + * + * - 0 - success + * - 1 - overflow + * + * @param field Property name + * @param options See `code.MulAdd` documentation. + */ + public mulAdd(field: string, options: code.IMulAddOptions): code.MulAdd { + return new code.MulAdd(field, options); + } + + /** + * Intrinsic operation. Puts `value` integer into the state's property with + * the name specified by `field`. + * + * state[field] = value; + * return 0; + * + * @param field Property name + * @param value Integer value to be stored into the property. + */ + public update(field: string, value: number): code.Update { + return new code.Update(field, value); + } + + /** + * Intrinsic operation. Returns 1 if the integer `value` is equal to the + * state's property with the name specified by `field`. + * + * return state[field] === value ? 1 : 0; + * + * @param field Property name + * @param value Integer value to be checked against. + */ + public isEqual(field: string, value: number): code.IsEqual { + return new code.IsEqual(field, value); + } + + /** + * Intrinsic operation. + * + * state[field] &= value + * return 0; + * + * @param field Property name + * @param value Integer value + */ + public and(field: string, value: number): code.And { + return new code.And(field, value); + } + + /** + * Intrinsic operation. + * + * state[field] |= value + * return 0; + * + * @param field Property name + * @param value Integer value + */ + public or(field: string, value: number): code.Or { + return new code.Or(field, value); + } + + /** + * Intrinsic operation. + * + * return (state[field] & value) == value ? 1 : 0; + * + * @param field Property name + * @param value Integer value + */ + public test(field: string, value: number): code.Test { + return new code.Test(field, value); + } +} diff --git a/llparse-builder/src/code/field-value.ts b/llparse-builder/src/code/field-value.ts new file mode 100644 index 0000000..2ceea69 --- /dev/null +++ b/llparse-builder/src/code/field-value.ts @@ -0,0 +1,9 @@ +import { Signature } from './base'; +import { Field } from './field'; + +export abstract class FieldValue extends Field { + constructor(signature: Signature, name: string, field: string, + public readonly value: number) { + super(signature, name, field); + } +} diff --git a/llparse-builder/src/code/field.ts b/llparse-builder/src/code/field.ts new file mode 100644 index 0000000..af58c84 --- /dev/null +++ b/llparse-builder/src/code/field.ts @@ -0,0 +1,10 @@ +import * as assert from 'assert'; +import { Code, Signature } from './base'; + +export abstract class Field extends Code { + constructor(signature: Signature, name: string, + public readonly field: string) { + super(signature, name + '_' + field); + assert(!/^_/.test(field), 'Can\'t access internal field from user code'); + } +} diff --git a/llparse-builder/src/code/index.ts b/llparse-builder/src/code/index.ts new file mode 100644 index 0000000..7a651e3 --- /dev/null +++ b/llparse-builder/src/code/index.ts @@ -0,0 +1,15 @@ +export { Code } from './base'; +export { Creator } from './creator'; +export { Field } from './field'; +export { FieldValue } from './field-value'; +export { IsEqual } from './is-equal'; +export { Load } from './load'; +export { Match } from './match'; +export { IMulAddOptions, MulAdd } from './mul-add'; +export { Or } from './or'; +export { And } from './and'; +export { Span } from './span'; +export { Store } from './store'; +export { Test } from './test'; +export { Update } from './update'; +export { Value } from './value'; diff --git a/llparse-builder/src/code/is-equal.ts b/llparse-builder/src/code/is-equal.ts new file mode 100644 index 0000000..91bb957 --- /dev/null +++ b/llparse-builder/src/code/is-equal.ts @@ -0,0 +1,7 @@ +import { FieldValue } from './field-value'; + +export class IsEqual extends FieldValue { + constructor(field: string, value: number) { + super('match', 'is_equal', field, value); + } +} diff --git a/llparse-builder/src/code/load.ts b/llparse-builder/src/code/load.ts new file mode 100644 index 0000000..9f3df2e --- /dev/null +++ b/llparse-builder/src/code/load.ts @@ -0,0 +1,7 @@ +import { Field } from './field'; + +export class Load extends Field { + constructor(field: string) { + super('match', 'load', field); + } +} diff --git a/llparse-builder/src/code/match.ts b/llparse-builder/src/code/match.ts new file mode 100644 index 0000000..631376a --- /dev/null +++ b/llparse-builder/src/code/match.ts @@ -0,0 +1,7 @@ +import { Code } from './base'; + +export class Match extends Code { + constructor(name: string) { + super('match', name); + } +} diff --git a/llparse-builder/src/code/mul-add.ts b/llparse-builder/src/code/mul-add.ts new file mode 100644 index 0000000..fd648ed --- /dev/null +++ b/llparse-builder/src/code/mul-add.ts @@ -0,0 +1,28 @@ +import { Field } from './field'; + +/** + * Options for `code.mulAdd()`. + */ +export interface IMulAddOptions { + /** Value to multiply the property with in the first step */ + readonly base: number; + + /** + * Maximum value of the property. If at any point of computation the + * intermediate result exceeds it - `mulAdd` returns 1 (overflow). + */ + readonly max?: number; + + /** + * If `true` - all arithmetics perfomed by `mulAdd` will be signed. + * + * Default value: `false` + */ + readonly signed?: boolean; +} + +export class MulAdd extends Field { + constructor(field: string, public readonly options: IMulAddOptions) { + super('value', 'mul_add', field); + } +} diff --git a/llparse-builder/src/code/or.ts b/llparse-builder/src/code/or.ts new file mode 100644 index 0000000..33bd402 --- /dev/null +++ b/llparse-builder/src/code/or.ts @@ -0,0 +1,7 @@ +import { FieldValue } from './field-value'; + +export class Or extends FieldValue { + constructor(field: string, value: number) { + super('match', 'or', field, value); + } +} diff --git a/llparse-builder/src/code/span.ts b/llparse-builder/src/code/span.ts new file mode 100644 index 0000000..b97e09e --- /dev/null +++ b/llparse-builder/src/code/span.ts @@ -0,0 +1,5 @@ +import { Match } from './match'; + +export class Span extends Match { + // no-op +} diff --git a/llparse-builder/src/code/store.ts b/llparse-builder/src/code/store.ts new file mode 100644 index 0000000..84abfef --- /dev/null +++ b/llparse-builder/src/code/store.ts @@ -0,0 +1,7 @@ +import { Field } from './field'; + +export class Store extends Field { + constructor(field: string) { + super('value', 'store', field); + } +} diff --git a/llparse-builder/src/code/test.ts b/llparse-builder/src/code/test.ts new file mode 100644 index 0000000..a9d0a22 --- /dev/null +++ b/llparse-builder/src/code/test.ts @@ -0,0 +1,7 @@ +import { FieldValue } from './field-value'; + +export class Test extends FieldValue { + constructor(field: string, value: number) { + super('match', 'test', field, value); + } +} diff --git a/llparse-builder/src/code/update.ts b/llparse-builder/src/code/update.ts new file mode 100644 index 0000000..de62476 --- /dev/null +++ b/llparse-builder/src/code/update.ts @@ -0,0 +1,7 @@ +import { FieldValue } from './field-value'; + +export class Update extends FieldValue { + constructor(field: string, value: number) { + super('match', 'update', field, value); + } +} diff --git a/llparse-builder/src/code/value.ts b/llparse-builder/src/code/value.ts new file mode 100644 index 0000000..06c6fd7 --- /dev/null +++ b/llparse-builder/src/code/value.ts @@ -0,0 +1,7 @@ +import { Code } from './base'; + +export class Value extends Code { + constructor(name: string) { + super('value', name); + } +} diff --git a/llparse-builder/src/edge.ts b/llparse-builder/src/edge.ts new file mode 100644 index 0000000..f6b55cc --- /dev/null +++ b/llparse-builder/src/edge.ts @@ -0,0 +1,54 @@ +import * as assert from 'assert'; + +import { Buffer } from 'buffer'; +import { Invoke, Node } from './node'; + +/** + * This class represents an edge in the parser graph. + */ +export class Edge { + /** + * Comparator for `.sort()` function. + */ + public static compare(a: Edge, b: Edge): number { + if (typeof a.key === 'number') { + return a.key - (b.key as number); + } + return a.key!.compare(b.key as Buffer); + } + + /** + * @param node Edge target + * @param noAdvance If `true` - the parent should not consume bytes before + * moving to the target `node` + * @param key `Buffer` for `node.Match`, `number` for `node.Invoke`, + * `undefined` for edges created with `.otherwise()` + * @param value `.select()` value associated with the edge + */ + constructor(public readonly node: Node, + public readonly noAdvance: boolean, + public readonly key: Buffer | number | undefined, + public readonly value: number | undefined) { + if (node instanceof Invoke) { + if (value === undefined) { + assert.strictEqual(node.code.signature, 'match', + 'Invalid Invoke\'s code signature'); + } else { + assert.strictEqual(node.code.signature, 'value', + 'Invalid Invoke\'s code signature'); + } + } else { + assert.strictEqual(value, undefined, + 'Attempting to pass value to non-Invoke node'); + } + + if (Buffer.isBuffer(key)) { + assert(key.length > 0, 'Invalid edge buffer length'); + + if (noAdvance) { + assert.strictEqual(key.length, 1, + 'Only 1-char keys are allowed in `noAdvance` edges'); + } + } + } +} diff --git a/llparse-builder/src/loop-checker/index.ts b/llparse-builder/src/loop-checker/index.ts new file mode 100644 index 0000000..5751955 --- /dev/null +++ b/llparse-builder/src/loop-checker/index.ts @@ -0,0 +1,205 @@ +import * as assert from 'assert'; +import * as debugAPI from 'debug'; + +import { Node } from '../node'; +import { Reachability } from '../reachability'; +import { Lattice } from './lattice'; + +const debug = debugAPI('llparse-builder:loop-checker'); + +const EMPTY_VALUE = new Lattice('empty'); +const ANY_VALUE = new Lattice('any'); + +/** + * This class implements a loop checker pass. The goal of this pass is to verify + * that the graph doesn't contain infinite loops. + */ +export class LoopChecker { + private readonly lattice: Map = new Map(); + + // Just a cache of terminated keys + private readonly terminatedCache: Map = new Map(); + + /** + * Run loop checker pass on a graph starting from `root`. + * + * Throws on failure. + * + * @param root Graph root node + */ + public check(root: Node): void { + const r = new Reachability(); + + const nodes = r.build(root); + + for (const node of nodes) { + debug('checking loops starting from %j', node.name); + + // Set initial lattice value for all nodes + this.clear(nodes); + + // Mark root as reachable with any value + this.lattice.set(node, ANY_VALUE); + + // Raise lattice values + let changed: Set = new Set([ root ]); + while (changed.size !== 0) { + if (debug.enabled) { + debug('changed %j', Array.from(changed).map((other) => other.name)); + } + + const next: Set = new Set(); + for (const changedNode of changed) { + this.propagate(changedNode, next); + } + changed = next; + } + + debug('lattice stabilized'); + + // Visit nodes and walk through reachable edges to detect loops + this.visit(node, []); + } + } + + private clear(nodes: ReadonlyArray): void { + for (const node of nodes) { + this.lattice.set(node, EMPTY_VALUE); + } + } + + private propagate(node: Node, changed: Set): void { + let value: Lattice = this.lattice.get(node)!; + debug('propagate(%j), initial value %j', node.name, value); + + // Terminate values that are consumed by `match`/`select` + const terminated = this.terminate(node, value, changed); + if (!terminated.isEqual(EMPTY_VALUE)) { + debug('node %j terminates %j', node.name, terminated); + value = value.subtract(terminated); + if (value.isEqual(EMPTY_VALUE)) { + return; + } + } + + const keysByTarget: Map = new Map(); + // Propagate value through `.peek()`/`.otherwise()` edges + for (const edge of node.getAllEdges()) { + if (!edge.noAdvance) { + continue; + } + + let targetValue: Lattice; + if (keysByTarget.has(edge.node)) { + targetValue = keysByTarget.get(edge.node)!; + } else { + targetValue = this.lattice.get(edge.node)!; + } + + // `otherwise` or `Invoke`'s edges + if (edge.key === undefined || typeof edge.key === 'number') { + targetValue = targetValue.union(value); + } else { + // `.peek()` + const edgeValue = new Lattice([ edge.key[0] ]).intersect(value); + if (edgeValue.isEqual(EMPTY_VALUE)) { + continue; + } + + targetValue = targetValue.union(edgeValue); + } + + keysByTarget.set(edge.node, targetValue); + } + + for (const [ child, childValue ] of keysByTarget) { + debug('node %j propagates %j to %j', node.name, childValue, + child.name); + this.update(child, childValue, changed); + } + } + + private update(node: Node, newValue: Lattice, changed: Set): boolean { + const value = this.lattice.get(node)!; + if (newValue.isEqual(value)) { + return false; + } + + this.lattice.set(node, newValue); + changed.add(node); + return true; + } + + private terminate(node: Node, value: Lattice, changed: Set): Lattice { + if (this.terminatedCache.has(node)) { + return this.terminatedCache.get(node)!; + } + + const terminated: number[] = []; + for (const edge of node.getAllEdges()) { + if (edge.noAdvance) { + continue; + } + + // Ignore `otherwise` and `Invoke`'s edges + if (edge.key === undefined || typeof edge.key === 'number') { + continue; + } + + terminated.push(edge.key[0]); + } + + const result = new Lattice(terminated); + this.terminatedCache.set(node, result); + return result; + } + + private visit(node: Node, path: ReadonlyArray): void { + let value = this.lattice.get(node)!; + debug('enter %j, value is %j', node.name, value); + + const terminated = this.terminatedCache.has(node) ? + this.terminatedCache.get(node)! : EMPTY_VALUE; + if (!terminated.isEqual(EMPTY_VALUE)) { + debug('subtract terminated %j', terminated); + value = value.subtract(terminated); + if (value.isEqual(EMPTY_VALUE)) { + debug('terminated everything'); + return; + } + } + + for (const edge of node.getAllEdges()) { + if (!edge.noAdvance) { + continue; + } + + let edgeValue = value; + + // `otherwise` or `Invoke`'s edges + if (edge.key === undefined || typeof edge.key === 'number') { + // nothing to do + // `.peek()` + } else { + edgeValue = edgeValue.intersect(new Lattice([ edge.key[0] ])); + } + + // Ignore unreachable edges + if (edgeValue.isEqual(EMPTY_VALUE)) { + continue; + } + if (path.indexOf(edge.node) !== -1) { + if (path.length === 0) { + throw new Error( + `Detected loop in "${edge.node.name}" through "${edge.node.name}"`); + } + throw new Error( + `Detected loop in "${edge.node.name}" through chain ` + + `${path.map((parent) => '"' + parent.name + '"').join(' -> ')}`); + } + this.visit(edge.node, path.concat(edge.node)); + } + + debug('leave %j', node.name); + } +} diff --git a/llparse-builder/src/loop-checker/lattice.ts b/llparse-builder/src/loop-checker/lattice.ts new file mode 100644 index 0000000..8d2a7fe --- /dev/null +++ b/llparse-builder/src/loop-checker/lattice.ts @@ -0,0 +1,115 @@ +import * as assert from 'assert'; + +const MAX_VALUE = 256; +const WORD_SIZE = 32; +const SIZE = (MAX_VALUE / WORD_SIZE) | 0; +const WORD_FILL = -1 | 0; + +assert.strictEqual(MAX_VALUE % WORD_SIZE, 0); + +export type LatticeValue = 'empty' | ReadonlyArray | 'any'; + +/** + * A fixed-size bitfield, really + */ +export class Lattice { + protected readonly words: number[]; + + constructor(value: LatticeValue) { + this.words = new Array(SIZE).fill(value === 'any' ? WORD_FILL : 0); + + if (Array.isArray(value)) { + for (const single of value) { + this.add(single); + } + } + } + + public check(bit: number): boolean { + assert(0 <= bit && bit < MAX_VALUE, 'Invalid bit'); + const index = (bit / WORD_SIZE) | 0; + const off = bit % WORD_SIZE; + return (this.words[index] & (1 << off)) !== 0; + } + + public union(other: Lattice): Lattice { + const result = new Lattice('empty'); + + for (let i = 0; i < SIZE; i++) { + result.words[i] = this.words[i] | other.words[i]; + } + + return result; + } + + public intersect(other: Lattice): Lattice { + const result = new Lattice('empty'); + + for (let i = 0; i < SIZE; i++) { + result.words[i] = this.words[i] & other.words[i]; + } + + return result; + } + + public subtract(other: Lattice): Lattice { + const result = new Lattice('empty'); + + for (let i = 0; i < SIZE; i++) { + result.words[i] = this.words[i] & (~other.words[i]); + } + + return result; + } + + public isEqual(other: Lattice): boolean { + if (this === other) { + return true; + } + + for (let i = 0; i < SIZE; i++) { + if (this.words[i] !== other.words[i]) { + return false; + } + } + return true; + } + + public *[Symbol.iterator](): Iterator { + // TODO(indutny): improve speed if needed + for (let i = 0; i < MAX_VALUE; i++) { + if (this.check(i)) { + yield i; + } + } + } + + public toJSON(): any { + let isEmpty = true; + let isFull = true; + for (let i = 0; i < SIZE; i++) { + if (this.words[i] !== 0) { + isEmpty = false; + } + if (this.words[i] !== WORD_FILL) { + isFull = false; + } + } + if (isEmpty) { + return 'empty'; + } + if (isFull) { + return 'any'; + } + return Array.from(this); + } + + // Private + + private add(bit: number): void { + assert(0 <= bit && bit < MAX_VALUE, 'Invalid bit'); + const index = (bit / WORD_SIZE) | 0; + const off = bit % WORD_SIZE; + this.words[index] |= 1 << off; + } +} diff --git a/llparse-builder/src/node/base.ts b/llparse-builder/src/node/base.ts new file mode 100644 index 0000000..9840f16 --- /dev/null +++ b/llparse-builder/src/node/base.ts @@ -0,0 +1,96 @@ +import * as assert from 'assert'; +import binarySearch = require('binary-search'); +import { Edge } from '../edge'; + +/** + * Base class for all graph nodes. + */ +export abstract class Node { + private otherwiseEdge: Edge | undefined; + private privEdges: Edge[] = []; + + /** + * @param name Node name + */ + constructor(public readonly name: string) { + // no-op + } + + /** + * Create an otherwise edge to node `node`. + * + * This edge is executed when no other edges match current input. No + * characters are consumed upon transition. + * + * NOTE: At most one otherwise (skipping or not) edge can be set, most nodes + * except `Error` require it. + * + * @param node Target node + */ + public otherwise(node: Node): this { + if (this.otherwiseEdge !== undefined) { + throw new Error('Node already has `otherwise` or `skipTo`'); + } + + this.otherwiseEdge = new Edge(node, true, undefined, undefined); + return this; + } + + /** + * Create a skipping otherwise edge to node `node`. + * + * This edge is executed when no other edges match current input. Single + * character is consumed upon transition. + * + * NOTE: At most one otherwise (skipping or not) edge can be set, most nodes + * except `Error` require it. + * + * @param node Target node + */ + public skipTo(node: Node): this { + if (this.otherwiseEdge !== undefined) { + throw new Error('Node already has `otherwise` or `skipTo`'); + } + + this.otherwiseEdge = new Edge(node, false, undefined, undefined); + return this; + } + + // Limited public use + + /** Get otherwise edge. */ + public getOtherwiseEdge(): Edge | undefined { + return this.otherwiseEdge; + } + + /** Get list of all non-otherwise edges. */ + public getEdges(): ReadonlyArray { + return this.privEdges; + } + + /** Get list of all edges (including otherwise, if present). */ + public getAllEdges(): ReadonlyArray { + const res = this.privEdges; + if (this.otherwiseEdge === undefined) { + return res; + } else { + return res.concat(this.otherwiseEdge); + } + } + + /** Get iterator through all non-otherwise edges. */ + public *[Symbol.iterator](): Iterator { + yield* this.privEdges; + } + + // Internal + + protected addEdge(edge: Edge): void { + assert.notStrictEqual(edge.key, undefined); + + const index = binarySearch(this.privEdges, edge, Edge.compare); + assert(index < 0, 'Attempting to create duplicate edge'); + + this.privEdges.splice(-1 - index, 0, edge); + } +} diff --git a/llparse-builder/src/node/consume.ts b/llparse-builder/src/node/consume.ts new file mode 100644 index 0000000..eff4037 --- /dev/null +++ b/llparse-builder/src/node/consume.ts @@ -0,0 +1,19 @@ +import * as assert from 'assert'; +import { Node } from './base'; + +/** + * This node consumes number of characters specified by state's property with + * name `field` from the input, and forwards execution to `otherwise` node. + */ +export class Consume extends Node { + /** + * @param field State's property name + */ + constructor(public readonly field: string) { + super('consume_' + field); + + if (/^_/.test(field)) { + throw new Error(`Can't use internal field in \`consume()\`: "${field}"`); + } + } +} diff --git a/llparse-builder/src/node/error.ts b/llparse-builder/src/node/error.ts new file mode 100644 index 0000000..393f566 --- /dev/null +++ b/llparse-builder/src/node/error.ts @@ -0,0 +1,24 @@ +import * as assert from 'assert'; +import { Node } from './base'; + +/** + * This node terminates the execution with an error + */ +class NodeError extends Node { + /** + * @param code Error code to return to user + * @param reason Error description to store in parser's state + */ + constructor(public readonly code: number, public readonly reason: string) { + super('error'); + assert.strictEqual(code, code | 0, 'code must be integer'); + } + + /** `.otherwise()` is not supported on this type of node */ + public otherwise(node: Node): this { throw new Error('Not supported'); } + + /** `.skipTo()` is not supported on this type of node */ + public skipTo(node: Node): this { throw new Error('Not supported'); } +} + +export { NodeError as Error }; diff --git a/llparse-builder/src/node/index.ts b/llparse-builder/src/node/index.ts new file mode 100644 index 0000000..e3d5fe5 --- /dev/null +++ b/llparse-builder/src/node/index.ts @@ -0,0 +1,8 @@ +export { Node } from './base'; +export { Consume } from './consume'; +export { Error } from './error'; +export { Invoke, IInvokeMap } from './invoke'; +export { Match } from './match'; +export { Pause } from './pause'; +export { SpanStart } from './span-start'; +export { SpanEnd } from './span-end'; diff --git a/llparse-builder/src/node/invoke.ts b/llparse-builder/src/node/invoke.ts new file mode 100644 index 0000000..d6791a7 --- /dev/null +++ b/llparse-builder/src/node/invoke.ts @@ -0,0 +1,39 @@ +import * as assert from 'assert'; + +import { Code } from '../code'; +import { Edge } from '../edge'; +import { Node } from './base'; + +/** + * Map of return codes of the callback. Each key is a return code, + * value is the target node that must be executed upon getting such return code. + */ +export interface IInvokeMap { + readonly [key: number]: Node; +} + +/** + * This node invokes either external callback or intrinsic code and passes the + * execution to either a target from a `map` (if the return code matches one of + * registered in it), or to `otherwise` node. + */ +export class Invoke extends Node { + /** + * @param code External callback or intrinsic code. Can be created with + * `builder.code.*()` methods. + * @param map Map from callback return codes to target nodes + */ + constructor(public readonly code: Code, map: IInvokeMap) { + super('invoke_' + code.name); + + Object.keys(map).forEach((mapKey) => { + const numKey: number = parseInt(mapKey, 10); + const targetNode = map[numKey]!; + + assert.strictEqual(numKey, numKey | 0, + 'Invoke\'s map keys must be integers'); + + this.addEdge(new Edge(targetNode, true, numKey, undefined)); + }); + } +} diff --git a/llparse-builder/src/node/match.ts b/llparse-builder/src/node/match.ts new file mode 100644 index 0000000..617a659 --- /dev/null +++ b/llparse-builder/src/node/match.ts @@ -0,0 +1,162 @@ +import * as assert from 'assert'; +import { Buffer } from 'buffer'; + +import { Edge } from '../edge'; +import { Transform } from '../transform'; +import { toBuffer } from '../utils'; +import { Node } from './base'; + +/** + * Character/sequence to match. + * + * May have following types: + * + * * `number` - for single character + * * `string` - for printable character sequence + * * `Buffer` - for raw byte sequence + */ +export type MatchSingleValue = string | number | Buffer; + +/** + * Convenience type for passing several characters/sequences to match methods. + */ +export type MatchValue = MatchSingleValue | ReadonlyArray; + +/** + * A map from characters/sequences to `.select()`'s values. Used for specifying + * the value to be passed to `.select()'`s targets. + */ +export interface IMatchSelect { + readonly [key: string]: number; +} + +/** + * This node matches characters/sequences and forwards the execution according + * to matched character with optional attached value (See `.select()`). + */ +export class Match extends Node { + private transformFn: Transform | undefined; + + /** + * Set character transformation function. + * + * @param transform Transformation to apply. Can be created with + * `builder.transform.*()` methods. + */ + public transform(transformFn: Transform): this { + this.transformFn = transformFn; + return this; + } + + /** + * Match sequence/character and forward execution to `next` on success, + * consuming matched bytes of the input. + * + * No value is attached on such execution forwarding, and the target node + * **must not** be an `Invoke` node with a callback expecting the value. + * + * @param value Sequence/character to be matched + * @param next Target node to be executed on success. + */ + public match(value: MatchValue, next: Node): this { + if (Array.isArray(value)) { + for (const subvalue of value) { + this.match(subvalue, next); + } + return this; + } + + const buffer = toBuffer(value as MatchSingleValue); + const edge = new Edge(next, false, buffer, undefined); + this.addEdge(edge); + return this; + } + + /** + * Match character and forward execution to `next` on success + * without consuming one byte of the input. + * + * No value is attached on such execution forwarding, and the target node + * **must not** be an `Invoke` with a callback expecting the value. + * + * @param value Character to be matched + * @param next Target node to be executed on success. + */ + public peek(value: MatchValue, next: Node): this { + if (Array.isArray(value)) { + for (const subvalue of value) { + this.peek(subvalue, next); + } + return this; + } + + const buffer = toBuffer(value as MatchSingleValue); + assert.strictEqual(buffer.length, 1, + '`.peek()` accepts only single character keys'); + + const edge = new Edge(next, true, buffer, undefined); + this.addEdge(edge); + return this; + } + + /** + * Match character/sequence and forward execution to `next` on success + * consumed matched bytes of the input. + * + * Value is attached on such execution forwarding, and the target node + * **must** be an `Invoke` with a callback expecting the value. + * + * Possible signatures: + * + * * `.select(key, value [, next ])` + * * `.select({ key: value } [, next])` + * + * @param keyOrMap Either a sequence to match, or a map from sequences to + * values + * @param valueOrNext Either an integer value to be forwarded to the target + * node, or an otherwise node + * @param next Convenience param. Same as calling `.otherwise(...)` + */ + public select(keyOrMap: MatchSingleValue | IMatchSelect, + valueOrNext?: number | Node, next?: Node): this { + // .select({ key: value, ... }, next) + if (typeof keyOrMap === 'object') { + assert(valueOrNext instanceof Node, + 'Invalid `next` argument of `.select()`'); + assert.strictEqual(next, undefined, + 'Invalid argument count of `.select()`'); + + const map: IMatchSelect = keyOrMap as IMatchSelect; + next = valueOrNext as Node | undefined; + + Object.keys(map).forEach((mapKey) => { + const numKey: number = mapKey as any; + + this.select(numKey, map[numKey]!, next); + }); + return this; + } + + // .select(key, value, next) + assert.strictEqual(typeof valueOrNext, 'number', + 'Invalid `value` argument of `.select()`'); + assert.notStrictEqual(next, undefined, + 'Invalid `next` argument of `.select()`'); + + const key = toBuffer(keyOrMap as MatchSingleValue); + const value = valueOrNext as number; + + const edge = new Edge(next!, false, key, value); + this.addEdge(edge); + return this; + } + + // Limited public use + + /** + * Get tranformation function + */ + public getTransform(): Transform | undefined { + return this.transformFn; + } +} diff --git a/llparse-builder/src/node/pause.ts b/llparse-builder/src/node/pause.ts new file mode 100644 index 0000000..2dcf5d1 --- /dev/null +++ b/llparse-builder/src/node/pause.ts @@ -0,0 +1,25 @@ +import * as assert from 'assert'; +import { Node } from './base'; + +/** + * This returns the specified error code, but makes the resumption to + * `otherwise` target possible. + */ +export class Pause extends Node { + /** + * @param code Error code to return + * @param reason Error description + */ + constructor(public readonly code: number, public readonly reason: string) { + super('pause'); + assert.strictEqual(code, code | 0, 'code must be integer'); + } + + /** + * `.skipTo()` is not supported on this type of node, please use + * `.otherwise()` + */ + public skipTo(node: Node): this { + throw new Error('Not supported, please use `pause.otherwise()`'); + } +} diff --git a/llparse-builder/src/node/span-end.ts b/llparse-builder/src/node/span-end.ts new file mode 100644 index 0000000..377cd73 --- /dev/null +++ b/llparse-builder/src/node/span-end.ts @@ -0,0 +1,19 @@ +import { Span } from '../span'; +import { Node } from './base'; + +/** + * Indicates span end. + * + * A callback will be invoked with all input data since the most recent of: + * + * * Span start invocation + * * Parser execution + */ +export class SpanEnd extends Node { + /** + * @param span Span instance + */ + constructor(public readonly span: Span) { + super(`span_end_${span.callback.name}`); + } +} diff --git a/llparse-builder/src/node/span-start.ts b/llparse-builder/src/node/span-start.ts new file mode 100644 index 0000000..f81b432 --- /dev/null +++ b/llparse-builder/src/node/span-start.ts @@ -0,0 +1,16 @@ +import { Span } from '../span'; +import { Node } from './base'; + +/** + * Indicates span start. + * + * See `SpanEnd` for details on callback invocation. + */ +export class SpanStart extends Node { + /** + * @param span Span instance + */ + constructor(public readonly span: Span) { + super(`span_start_${span.callback.name}`); + } +} diff --git a/llparse-builder/src/property.ts b/llparse-builder/src/property.ts new file mode 100644 index 0000000..cf2fe4b --- /dev/null +++ b/llparse-builder/src/property.ts @@ -0,0 +1,12 @@ +export type PropertyType = 'i8' | 'i16' | 'i32' | 'i64' | 'ptr'; + +/** + * Class describing allocated property in parser's state + */ +export class Property { + constructor(public readonly ty: PropertyType, public readonly name: string) { + if (/^_/.test(name)) { + throw new Error(`Can't use internal property name: "${name}"`); + } + } +} diff --git a/llparse-builder/src/reachability.ts b/llparse-builder/src/reachability.ts new file mode 100644 index 0000000..88bcd65 --- /dev/null +++ b/llparse-builder/src/reachability.ts @@ -0,0 +1,31 @@ +import { Node } from './node'; + +/** + * This class finds all reachable nodes + */ +export class Reachability { + /** + * Build and return list of reachable nodes. + */ + public build(root: Node): ReadonlyArray { + const res = new Set(); + const queue = [ root ]; + while (queue.length !== 0) { + const node = queue.pop()!; + if (res.has(node)) { + continue; + } + res.add(node); + + for (const edge of node) { + queue.push(edge.node); + } + + const otherwise = node.getOtherwiseEdge(); + if (otherwise !== undefined) { + queue.push(otherwise.node); + } + } + return Array.from(res) as ReadonlyArray; + } +} diff --git a/llparse-builder/src/span-allocator.ts b/llparse-builder/src/span-allocator.ts new file mode 100644 index 0000000..b3e8f6b --- /dev/null +++ b/llparse-builder/src/span-allocator.ts @@ -0,0 +1,182 @@ +import * as assert from 'assert'; +import * as debugAPI from 'debug'; + +import { Node, SpanEnd, SpanStart } from './node'; +import { Reachability } from './reachability'; +import { Span } from './span'; + +const debug = debugAPI('llparse-builder:span-allocator'); + +type SpanSet = Set; + +interface ISpanActiveInfo { + readonly active: Map; + readonly spans: ReadonlyArray; +} + +type SpanOverlap = Map; + +export interface ISpanAllocatorResult { + readonly colors: ReadonlyMap; + readonly concurrency: ReadonlyArray >; + readonly max: number; +} + +function id(node: SpanStart | SpanEnd): Span { + return node.span; +} + +export class SpanAllocator { + public allocate(root: Node): ISpanAllocatorResult { + const r = new Reachability(); + const nodes = r.build(root); + const info = this.computeActive(nodes); + this.check(info); + const overlap = this.computeOverlap(info); + return this.color(info.spans, overlap); + } + + private computeActive(nodes: ReadonlyArray): ISpanActiveInfo { + const activeMap: Map = new Map(); + nodes.forEach((node) => activeMap.set(node, new Set())); + + const queue: Set = new Set(nodes); + const spans: SpanSet = new Set(); + for (const node of queue) { + queue.delete(node); + + const active = activeMap.get(node)!; + + if (node instanceof SpanStart) { + const span = id(node); + spans.add(span); + active.add(span); + } + + active.forEach((span) => { + // Don't propagate span past the spanEnd + if (node instanceof SpanEnd && span === id(node)) { + return; + } + + node.getAllEdges().forEach((edge) => { + const edgeNode = edge.node; + + // Disallow loops + if (edgeNode instanceof SpanStart) { + assert.notStrictEqual(id(edgeNode), span, + `Detected loop in span "${span.callback.name}", started ` + + `at "${node.name}"`); + } + + const edgeActive = activeMap.get(edgeNode)!; + if (edgeActive.has(span)) { + return; + } + + edgeActive.add(span); + queue.add(edgeNode); + }); + }); + } + + return { active: activeMap, spans: Array.from(spans) }; + } + + private check(info: ISpanActiveInfo): void { + debug('check start'); + for (const [ node, spans ] of info.active) { + for (const edge of node.getAllEdges()) { + if (edge.node instanceof SpanStart) { + continue; + } + + // Skip terminal nodes + if (edge.node.getAllEdges().length === 0) { + continue; + } + + debug('checking edge from %j to %j', node.name, edge.node.name); + + const edgeSpans = info.active.get(edge.node)!; + for (const subSpan of edgeSpans) { + assert(spans.has(subSpan), + `Unmatched span end for "${subSpan.callback.name}" ` + + `at "${edge.node.name}", coming from "${node.name}"`); + } + + if (edge.node instanceof SpanEnd) { + const span = id(edge.node); + assert(spans.has(span), + `Unmatched span end for "${span.callback.name}"`); + } + } + } + } + + private computeOverlap(info: ISpanActiveInfo): SpanOverlap { + const active = info.active; + const overlap: SpanOverlap = new Map(); + + info.spans.forEach((span) => overlap.set(span, new Set())); + + active.forEach((spans) => { + spans.forEach((one) => { + const set = overlap.get(one)!; + spans.forEach((other) => { + if (other !== one) { + set.add(other); + } + }); + }); + }); + + return overlap; + } + + private color(spans: ReadonlyArray, overlapMap: SpanOverlap) + : ISpanAllocatorResult { + let max = -1; + const colors: Map = new Map(); + + const allocate = (span: Span): number => { + if (colors.has(span)) { + return colors.get(span)!; + } + + const overlap = overlapMap.get(span)!; + + // See which colors are already used + const used: Set = new Set(); + for (const subSpan of overlap) { + if (colors.has(subSpan)) { + used.add(colors.get(subSpan)!); + } + } + + // Find minimum available color + let i; + for (i = 0; used.has(i); i++) { + // no-op + } + + max = Math.max(max, i); + colors.set(span, i); + + return i; + }; + + const map: Map = new Map(); + + spans.forEach((span) => map.set(span, allocate(span))); + + const concurrency: Span[][] = new Array(max + 1); + for (let i = 0; i < concurrency.length; i++) { + concurrency[i] = []; + } + + spans.forEach((span) => concurrency[allocate(span)].push(span)); + + return { colors: map, concurrency, max }; + } +} diff --git a/llparse-builder/src/span.ts b/llparse-builder/src/span.ts new file mode 100644 index 0000000..99cafb0 --- /dev/null +++ b/llparse-builder/src/span.ts @@ -0,0 +1,57 @@ +import * as assert from 'assert'; + +import { Span as SpanCallback } from './code'; +import { Node, SpanEnd, SpanStart } from './node'; + +/** + * Spans are used for notifying parser user about matched data. Each byte after + * span start will be sent to the span callback until span end is called. + */ +export class Span { + private readonly startCache: Map = new Map(); + private readonly endCache: Map = new Map(); + + /** + * @param callback External callback, must be `code.span(...)` result. + */ + constructor(public readonly callback: SpanCallback) { + } + + /** + * Create `SpanStart` that indicates the start of the span. + * + * @param otherwise Optional convenience value. Same as calling + * `span.start().otherwise(...)` + */ + public start(otherwise?: Node) { + if (otherwise !== undefined && this.startCache.has(otherwise)) { + return this.startCache.get(otherwise)!; + } + + const res = new SpanStart(this); + if (otherwise !== undefined) { + res.otherwise(otherwise); + this.startCache.set(otherwise, res); + } + return res; + } + + /** + * Create `SpanEnd` that indicates the end of the span. + * + * @param otherwise Optional convenience value. Same as calling + * `span.end().otherwise(...)` + */ + public end(otherwise?: Node) { + if (otherwise !== undefined && this.endCache.has(otherwise)) { + return this.endCache.get(otherwise)!; + } + + const res = new SpanEnd(this); + if (otherwise !== undefined) { + res.otherwise(otherwise); + this.endCache.set(otherwise, res); + } + return res; + } +} diff --git a/llparse-builder/src/transform/base.ts b/llparse-builder/src/transform/base.ts new file mode 100644 index 0000000..902199c --- /dev/null +++ b/llparse-builder/src/transform/base.ts @@ -0,0 +1,12 @@ +export type TransformName = 'to_lower_unsafe' | 'to_lower'; + +/** + * Character transformation. + */ +export abstract class Transform { + /** + * @param name Transform name + */ + constructor(public readonly name: TransformName) { + } +} diff --git a/llparse-builder/src/transform/creator.ts b/llparse-builder/src/transform/creator.ts new file mode 100644 index 0000000..eaf3d5c --- /dev/null +++ b/llparse-builder/src/transform/creator.ts @@ -0,0 +1,28 @@ +import { Transform } from './base'; +import { ToLower } from './to-lower'; +import { ToLowerUnsafe } from './to-lower-unsafe'; + +/** + * API for creating character transformations. + * + * The results of methods of this class can be used as an argument to: + * `p.node().transform(...)`. + */ +export class Creator { + /** + * Unsafe transform to lowercase. + * + * The operation of this transformation is equivalent to: + * `String.fromCharCode(input.charCodeAt(0) | 0x20)`. + */ + public toLowerUnsafe(): Transform { + return new ToLowerUnsafe(); + } + + /** + * Safe transform to lowercase. + */ + public toLower(): Transform { + return new ToLower(); + } +} diff --git a/llparse-builder/src/transform/index.ts b/llparse-builder/src/transform/index.ts new file mode 100644 index 0000000..acdcf01 --- /dev/null +++ b/llparse-builder/src/transform/index.ts @@ -0,0 +1,3 @@ +export { Transform } from './base'; +export { Creator } from './creator'; +export { ToLowerUnsafe } from './to-lower-unsafe'; diff --git a/llparse-builder/src/transform/to-lower-unsafe.ts b/llparse-builder/src/transform/to-lower-unsafe.ts new file mode 100644 index 0000000..99d9618 --- /dev/null +++ b/llparse-builder/src/transform/to-lower-unsafe.ts @@ -0,0 +1,7 @@ +import { Transform } from './base'; + +export class ToLowerUnsafe extends Transform { + constructor() { + super('to_lower_unsafe'); + } +} diff --git a/llparse-builder/src/transform/to-lower.ts b/llparse-builder/src/transform/to-lower.ts new file mode 100644 index 0000000..b333fce --- /dev/null +++ b/llparse-builder/src/transform/to-lower.ts @@ -0,0 +1,7 @@ +import { Transform } from './base'; + +export class ToLower extends Transform { + constructor() { + super('to_lower'); + } +} diff --git a/llparse-builder/src/utils.ts b/llparse-builder/src/utils.ts new file mode 100644 index 0000000..3521b20 --- /dev/null +++ b/llparse-builder/src/utils.ts @@ -0,0 +1,19 @@ +import * as assert from 'assert'; +import { Buffer } from 'buffer'; + +/** + * Internal + */ +export function toBuffer(value: number | string | Buffer): Buffer { + let res: Buffer; + if (Buffer.isBuffer(value)) { + res = value; + } else if (typeof value === 'string') { + res = Buffer.from(value); + } else { + assert(0 <= value && value <= 0xff, 'Invalid byte value'); + res = Buffer.from([ value ]); + } + assert(res.length >= 1, 'Invalid key length'); + return res; +} diff --git a/llparse-builder/test/builder-test.ts b/llparse-builder/test/builder-test.ts new file mode 100644 index 0000000..82723ec --- /dev/null +++ b/llparse-builder/test/builder-test.ts @@ -0,0 +1,94 @@ +import * as assert from 'assert'; + +import { Builder } from '../src/builder'; + +describe('LLParse/Builder', () => { + let b: Builder; + beforeEach(() => { + b = new Builder(); + }); + + it('should build primitive graph', () => { + const start = b.node('start'); + const end = b.node('end'); + + start + .peek('e', end) + .match('a', start) + .otherwise(b.error(1, 'error')); + + end + .skipTo(start); + + const edges = start.getEdges(); + assert.strictEqual(edges.length, 2); + + assert(!edges[0].noAdvance); + assert.strictEqual(edges[0].node, start); + + assert(edges[1].noAdvance); + assert.strictEqual(edges[1].node, end); + }); + + it('should disallow duplicate edges', () => { + const start = b.node('start'); + + start.peek('e', start); + + assert.throws(() => { + start.peek('e', start); + }, /duplicate edge/); + }); + + it('should disallow select to non-invoke', () => { + const start = b.node('start'); + + assert.throws(() => { + start.select('a', 1, start); + }, /value to non-Invoke/); + }); + + it('should disallow select to match-invoke', () => { + const start = b.node('start'); + const invoke = b.invoke(b.code.match('something')); + + assert.throws(() => { + start.select('a', 1, invoke); + }, /Invalid.*code signature/); + }); + + it('should disallow peek to value-invoke', () => { + const start = b.node('start'); + const invoke = b.invoke(b.code.value('something')); + + assert.throws(() => { + start.peek('a', invoke); + }, /Invalid.*code signature/); + }); + + it('should allow select to value-invoke', () => { + const start = b.node('start'); + const invoke = b.invoke(b.code.value('something')); + + assert.doesNotThrow(() => { + start.select('a', 1, invoke); + }); + }); + + it('should create edges for Invoke', () => { + const start = b.node('start'); + const invoke = b.invoke(b.code.value('something'), { + '-1': start, + '1': start, + '10': start, + }); + + const edges = invoke.getEdges(); + const keys = edges.map((edge) => edge.key!); + assert.deepStrictEqual(keys, [ + -1, + 1, + 10, + ]); + }); +}); diff --git a/llparse-builder/test/loop-checker-test.ts b/llparse-builder/test/loop-checker-test.ts new file mode 100644 index 0000000..0df6064 --- /dev/null +++ b/llparse-builder/test/loop-checker-test.ts @@ -0,0 +1,118 @@ +import * as assert from 'assert'; + +import { Builder, LoopChecker } from '../src/builder'; + +describe('LLParse/LoopChecker', () => { + let b: Builder; + let lc: LoopChecker; + beforeEach(() => { + b = new Builder(); + lc = new LoopChecker(); + }); + + it('should detect shallow loops', () => { + const start = b.node('start'); + + start + .otherwise(start); + + assert.throws(() => { + lc.check(start); + }, /Detected loop in "start".*"start"/); + }); + + it('should detect loops', () => { + const start = b.node('start'); + const a = b.node('a'); + const invoke = b.invoke(b.code.match('nop'), { + 0: start, + }, b.error(1, 'error')); + + start + .peek('a', a) + .otherwise(b.error(1, 'error')); + + a.otherwise(invoke); + + assert.throws(() => { + lc.check(start); + }, /Detected loop in "a".*"a" -> "invoke_nop"/); + }); + + it('should detect seemingly unreachable keys', () => { + const start = b.node('start'); + const loop = b.node('loop'); + + start + .peek('a', loop) + .otherwise(b.error(1, 'error')); + + loop + .match('a', loop) + .otherwise(loop); + + assert.throws(() => { + lc.check(start); + }, /Detected loop in "loop" through.*"loop"/); + }); + + it('should ignore loops through `peek` to `match`', () => { + const start = b.node('start'); + const a = b.node('a'); + const invoke = b.invoke(b.code.match('nop'), { + 0: start, + }, b.error(1, 'error')); + + start + .peek('a', a) + .otherwise(b.error(1, 'error')); + + a + .match('abc', invoke) + .otherwise(start); + + assert.doesNotThrow(() => lc.check(start)); + }); + + it('should ignore irrelevant `peek`s', () => { + const start = b.node('start'); + const a = b.node('a'); + + start + .peek('a', a) + .otherwise(b.error(1, 'error')); + + a + .peek('b', start) + .otherwise(b.error(1, 'error')); + + assert.doesNotThrow(() => lc.check(start)); + }); + + it('should ignore loops with multi `peek`/`match`', () => { + const start = b.node('start'); + const another = b.node('another'); + + const NUM: ReadonlyArray = [ + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', + ]; + + const ALPHA: ReadonlyArray = [ + 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', + 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', + 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', + 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', + ]; + + start + .match(ALPHA, start) + .peek(NUM, another) + .skipTo(start); + + another + .match(NUM, another) + .otherwise(start); + + assert.doesNotThrow(() => lc.check(start)); + }); +}); diff --git a/llparse-builder/test/span-allocator-test.ts b/llparse-builder/test/span-allocator-test.ts new file mode 100644 index 0000000..bc8f656 --- /dev/null +++ b/llparse-builder/test/span-allocator-test.ts @@ -0,0 +1,146 @@ +import * as assert from 'assert'; + +import { Builder, SpanAllocator } from '../src/builder'; + +describe('LLParse/LoopChecker', () => { + let b: Builder; + let sa: SpanAllocator; + beforeEach(() => { + b = new Builder(); + sa = new SpanAllocator(); + }); + + it('should allocate single span', () => { + const span = b.span(b.code.span('span')); + const start = b.node('start'); + const body = b.node('body'); + + start + .otherwise(span.start(body)); + + body + .skipTo(span.end(start)); + + const res = sa.allocate(start); + + assert.strictEqual(res.max, 0); + + assert.strictEqual(res.concurrency.length, 1); + assert.ok(res.concurrency[0].includes(span)); + + assert.strictEqual(res.colors.size, 1); + assert.strictEqual(res.colors.get(span), 0); + }); + + it('should allocate overlapping spans', () => { + const span1 = b.span(b.code.span('span1')); + const span2 = b.span(b.code.span('span2')); + + const start = b.node('start'); + const body1 = b.node('body1'); + const body2 = b.node('body2'); + + start + .otherwise(span1.start(body1)); + + body1 + .otherwise(span2.start(body2)); + + body2 + .skipTo(span2.end(span1.end(start))); + + const res = sa.allocate(start); + + assert.strictEqual(res.max, 1); + + assert.strictEqual(res.concurrency.length, 2); + assert.ok(res.concurrency[0].includes(span1)); + assert.ok(res.concurrency[1].includes(span2)); + + assert.strictEqual(res.colors.size, 2); + assert.strictEqual(res.colors.get(span1), 0); + assert.strictEqual(res.colors.get(span2), 1); + }); + + it('should allocate non-overlapping spans', () => { + const span1 = b.span(b.code.span('span1')); + const span2 = b.span(b.code.span('span2')); + + const start = b.node('start'); + const body1 = b.node('body1'); + const body2 = b.node('body2'); + + start + .match('a', span1.start(body1)) + .otherwise(span2.start(body2)); + + body1 + .skipTo(span1.end(start)); + + body2 + .skipTo(span2.end(start)); + + const res = sa.allocate(start); + + assert.strictEqual(res.max, 0); + + assert.strictEqual(res.concurrency.length, 1); + assert.ok(res.concurrency[0].includes(span1)); + assert.ok(res.concurrency[0].includes(span2)); + + assert.strictEqual(res.colors.size, 2); + assert.strictEqual(res.colors.get(span1), 0); + assert.strictEqual(res.colors.get(span2), 0); + }); + + it('should throw on loops', () => { + const span = b.span(b.code.span('span_name')); + + const start = b.node('start'); + + start + .otherwise(span.start(start)); + + assert.throws(() => { + sa.allocate(start); + }, /loop.*span_name/); + }); + + it('should throw on unmatched ends', () => { + const start = b.node('start'); + const span = b.span(b.code.span('on_data')); + + start.otherwise(span.end().skipTo(start)); + + assert.throws(() => sa.allocate(start), /unmatched.*on_data/i); + }); + + it('should throw on branched unmatched ends', () => { + const start = b.node('start'); + const end = b.node('end'); + const span = b.span(b.code.span('on_data')); + + start + .match('a', end) + .match('b', span.start(end)) + .otherwise(b.error(1, 'error')); + + end + .otherwise(span.end(start)); + + assert.throws(() => sa.allocate(start), /unmatched.*on_data/i); + }); + + it('should propagate through the Invoke map', () => { + const start = b.node('start'); + const span = b.span(b.code.span('llparse__on_data')); + + b.property('i8', 'custom'); + + start.otherwise(b.invoke(b.code.load('custom'), { + 0: span.end().skipTo(start), + }, span.end().skipTo(start))); + + assert.doesNotThrow(() => sa.allocate(span.start(start))); + }); +}); diff --git a/llparse-builder/tsconfig.json b/llparse-builder/tsconfig.json new file mode 100644 index 0000000..01ec7c2 --- /dev/null +++ b/llparse-builder/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "strict": true, + "target": "es2017", + "module": "commonjs", + "moduleResolution": "node", + "outDir": "./lib", + "declaration": true, + "pretty": true, + "sourceMap": true + }, + "include": [ + "src/**/*.ts" + ] +} diff --git a/llparse-builder/tslint.json b/llparse-builder/tslint.json new file mode 100644 index 0000000..b0aaf97 --- /dev/null +++ b/llparse-builder/tslint.json @@ -0,0 +1,14 @@ +{ + "defaultSeverity": "error", + "extends": [ + "tslint:recommended" + ], + "jsRules": {}, + "rules": { + "no-bitwise": null, + "quotemark": [ + true, "single", "avoid-escape", "avoid-template" + ] + }, + "rulesDirectory": [] +} diff --git a/llparse-frontend/.gitignore b/llparse-frontend/.gitignore new file mode 100644 index 0000000..88edb62 --- /dev/null +++ b/llparse-frontend/.gitignore @@ -0,0 +1,2 @@ +node_modules/ +lib/ diff --git a/llparse-frontend/.travis.yml b/llparse-frontend/.travis.yml new file mode 100644 index 0000000..03f4af5 --- /dev/null +++ b/llparse-frontend/.travis.yml @@ -0,0 +1,6 @@ +sudo: false +language: node_js +node_js: + - "stable" +script: + npm test diff --git a/llparse-frontend/README.md b/llparse-frontend/README.md new file mode 100644 index 0000000..359dd9b --- /dev/null +++ b/llparse-frontend/README.md @@ -0,0 +1,30 @@ +# llparse-frontend +[![Build Status](https://secure.travis-ci.org/indutny/llparse-frontend.svg)](http://travis-ci.org/indutny/llparse-frontend) +[![NPM version](https://badge.fury.io/js/llparse-frontend.svg)](https://badge.fury.io/js/llparse-frontend) + +WIP + +#### LICENSE + +This software is licensed under the MIT License. + +Copyright Fedor Indutny, 2018. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/llparse-frontend/package-lock.json b/llparse-frontend/package-lock.json new file mode 100644 index 0000000..3cfef7a --- /dev/null +++ b/llparse-frontend/package-lock.json @@ -0,0 +1,1516 @@ +{ + "name": "llparse-frontend", + "version": "3.0.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "@babel/code-frame": { + "version": "7.10.3", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.3.tgz", + "integrity": "sha512-fDx9eNW0qz0WkUeqL6tXEXzVlPh6Y5aCDEZesl0xBGA8ndRukX91Uk44ZqnkECp01NAZUdCAl+aiQNGi0k88Eg==", + "dev": true, + "requires": { + "@babel/highlight": "^7.10.3" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.10.3", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.3.tgz", + "integrity": "sha512-bU8JvtlYpJSBPuj1VUmKpFGaDZuLxASky3LhaKj3bmpSTY6VWooSM8msk+Z0CZoErFye2tlABF6yDkT3FOPAXw==", + "dev": true + }, + "@babel/highlight": { + "version": "7.10.3", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.10.3.tgz", + "integrity": "sha512-Ih9B/u7AtgEnySE2L2F0Xm0GaM729XqqLfHkalTsbjXGyqmf/6M0Cu0WpvqueUlW+xk88BHw9Nkpj49naU+vWw==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.10.3", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@types/debug": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.5.tgz", + "integrity": "sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ==", + "dev": true + }, + "@types/mocha": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-8.0.3.tgz", + "integrity": "sha512-vyxR57nv8NfcU0GZu8EUXZLTbCMupIUwy95LJ6lllN+JRPG25CwMHoB1q5xKh8YKhQnHYRAn4yW2yuHbf/5xgg==", + "dev": true + }, + "@types/node": { + "version": "14.11.8", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.11.8.tgz", + "integrity": "sha512-KPcKqKm5UKDkaYPTuXSx8wEP7vE9GnuaXIZKijwRYcePpZFDVuy2a57LarFKiORbHOuTOOwYzxVxcUzsh2P2Pw==", + "dev": true + }, + "ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "anymatch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", + "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "array.prototype.map": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array.prototype.map/-/array.prototype.map-1.0.2.tgz", + "integrity": "sha512-Az3OYxgsa1g7xDYp86l0nnN4bcmuEITGe1rbdEBVkrqkzMgDcbdQ2R7r41pNzti+4NMces3H8gMmuioZUilLgw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "es-array-method-boxes-properly": "^1.0.0", + "is-string": "^1.0.4" + } + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "binary-extensions": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.1.0.tgz", + "integrity": "sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ==", + "dev": true + }, + "binary-search": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/binary-search/-/binary-search-1.3.6.tgz", + "integrity": "sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", + "dev": true + }, + "builtin-modules": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", + "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=", + "dev": true + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "chokidar": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.4.2.tgz", + "integrity": "sha512-IZHaDeBeI+sZJRX7lGcXsdzgvZqKv6sECqsbErJA4mHWfpRrD8B97kSFN4cQz6nGBGiuFia1MKR4d6c1o8Cv7A==", + "dev": true, + "requires": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "fsevents": "~2.1.2", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.4.0" + } + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "commander": { + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.15.1.tgz", + "integrity": "sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true + }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "requires": { + "object-keys": "^1.0.12" + } + }, + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "es-abstract": { + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz", + "integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.18.0-next.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", + "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.0", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + }, + "object.assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.1.tgz", + "integrity": "sha512-VT/cxmx5yaoHSOTSyrCygIDFco+RsibY2NM0a4RdEeY/4KgqezwFtK1yr3U67xYhqJSlASm2pKhLVzPj2lr4bA==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.18.0-next.0", + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.18.0-next.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", + "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.0", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + } + } + } + } + }, + "es-array-method-boxes-properly": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz", + "integrity": "sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==", + "dev": true + }, + "es-get-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.0.tgz", + "integrity": "sha512-UfrmHuWQlNMTs35e1ypnvikg6jCz3SK8v8ImvmDsh36fCVUR1MqoFDiyn0/k52C8NqO3YsO8Oe0azeesNuqSsQ==", + "dev": true, + "requires": { + "es-abstract": "^1.17.4", + "has-symbols": "^1.0.1", + "is-arguments": "^1.0.4", + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-string": "^1.0.5", + "isarray": "^2.0.5" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "flat": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/flat/-/flat-4.1.0.tgz", + "integrity": "sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==", + "dev": true, + "requires": { + "is-buffer": "~2.0.3" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fsevents": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.3.tgz", + "integrity": "sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "glob": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", + "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", + "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==" + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true + }, + "is-arguments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.0.4.tgz", + "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==", + "dev": true + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-buffer": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", + "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==", + "dev": true + }, + "is-callable": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.2.tgz", + "integrity": "sha512-dnMqspv5nU3LoewK2N/y7KLtxtakvTuaCsU9FU50/QDmdbHNy/4/JuRtMHqRU22o3q+W89YQndQEeCVwK+3qrA==" + }, + "is-date-object": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==" + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-map": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.1.tgz", + "integrity": "sha512-T/S49scO8plUiAOA2DBTBG3JHpn1yiw0kRp6dgiZ0v2/6twi5eiB0rHtHFH9ZIrvlWc6+4O+m4zg5+Z833aXgw==", + "dev": true + }, + "is-negative-zero": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.0.tgz", + "integrity": "sha1-lVOxIbD6wohp2p7UWeIMdUN4hGE=" + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=", + "dev": true + }, + "is-regex": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.1.tgz", + "integrity": "sha512-1+QkEcxiLlB7VEyFtyBg94e08OAsvq7FUBgApTq/w2ymCLyKJgDPsybBENVtA7XCQEgEXxKPonG+mvYRxh/LIg==", + "requires": { + "has-symbols": "^1.0.1" + } + }, + "is-set": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.1.tgz", + "integrity": "sha512-eJEzOtVyenDs1TMzSQ3kU3K+E0GUS9sno+F0OBT97xsgcJsF9nXMBtkT9/kut5JEpM7oL7X/0qxR17K3mcwIAA==", + "dev": true + }, + "is-string": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "dev": true + }, + "is-symbol": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "requires": { + "has-symbols": "^1.0.1" + } + }, + "isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "iterate-iterator": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/iterate-iterator/-/iterate-iterator-1.0.1.tgz", + "integrity": "sha512-3Q6tudGN05kbkDQDI4CqjaBf4qf85w6W6GnuZDtUVYwKgtC1q8yxYX7CZed7N+tLzQqS6roujWvszf13T+n9aw==", + "dev": true + }, + "iterate-value": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/iterate-value/-/iterate-value-1.0.2.tgz", + "integrity": "sha512-A6fMAio4D2ot2r/TYzr4yUWrmwNdsN5xL7+HUiyACE4DXm+q8HtPcnFTp+NnW3k4N05tZ7FVYFFb2CR13NxyHQ==", + "dev": true, + "requires": { + "es-get-iterator": "^1.0.2", + "iterate-iterator": "^1.0.1" + } + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "js-yaml": { + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.0.tgz", + "integrity": "sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "llparse-builder": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/llparse-builder/-/llparse-builder-1.5.2.tgz", + "integrity": "sha512-i862UNC3YUEdlfK/NUCJxlKjtWjgAI9AJXDRgjcfRHfwFt4Sf8eFPTRsc91/2R9MBZ0kyFdfhi8SVhMsZf1gNQ==", + "requires": { + "@types/debug": "4.1.5 ", + "binary-search": "^1.3.6", + "debug": "^4.2.0" + }, + "dependencies": { + "@types/debug": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.5.tgz", + "integrity": "sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ==" + }, + "debug": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", + "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "log-symbols": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", + "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", + "dev": true, + "requires": { + "chalk": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", + "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + }, + "dependencies": { + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + } + } + }, + "mocha": { + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-8.1.3.tgz", + "integrity": "sha512-ZbaYib4hT4PpF4bdSO2DohooKXIn4lDeiYqB+vTmCdr6l2woW0b6H3pf5x4sM5nwQMru9RvjjHYWVGltR50ZBw==", + "dev": true, + "requires": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.4.2", + "debug": "4.1.1", + "diff": "4.0.2", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.1.6", + "growl": "1.10.5", + "he": "1.2.0", + "js-yaml": "3.14.0", + "log-symbols": "4.0.0", + "minimatch": "3.0.4", + "ms": "2.1.2", + "object.assign": "4.1.0", + "promise.allsettled": "1.0.2", + "serialize-javascript": "4.0.0", + "strip-json-comments": "3.0.1", + "supports-color": "7.1.0", + "which": "2.0.2", + "wide-align": "1.1.3", + "workerpool": "6.0.0", + "yargs": "13.3.2", + "yargs-parser": "13.1.2", + "yargs-unparser": "1.6.1" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "object-inspect": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.8.0.tgz", + "integrity": "sha512-jLdtEOB112fORuypAyl/50VRVIBIdVQOSUUGQHzJ4xBSbit81zRarz7GThkEFZy1RceYrWYcPcBFPQwHyAc1gA==" + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" + }, + "object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "dev": true, + "requires": { + "define-properties": "^1.1.2", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.0", + "object-keys": "^1.0.11" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "p-limit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.0.2.tgz", + "integrity": "sha512-iwqZSOoWIW+Ew4kAGUlN16J4M7OB3ysMLSZtnhmqx7njIHFPlxWBX8xo3lVTyFVq6mI/lL9qt2IsN1sHwaxJkg==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "path-parse": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "dev": true + }, + "picomatch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", + "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "dev": true + }, + "promise.allsettled": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/promise.allsettled/-/promise.allsettled-1.0.2.tgz", + "integrity": "sha512-UpcYW5S1RaNKT6pd+s9jp9K9rlQge1UXKskec0j6Mmuq7UJCvlS2J2/s/yuPN8ehftf9HXMxWlKiPbGGUzpoRg==", + "dev": true, + "requires": { + "array.prototype.map": "^1.0.1", + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "iterate-value": "^1.0.0" + } + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "readdirp": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.4.0.tgz", + "integrity": "sha512-0xe001vZBnJEK+uKcj8qOhyAKPzIT+gStxWr3LCB0DwcXR5NZJ3IaC+yGnHCYzB/S7ov3m3EEbZI2zeNvX+hGQ==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "resolve": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", + "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, + "serialize-javascript": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-4.0.0.tgz", + "integrity": "sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "source-map-support": { + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", + "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "string.prototype.trimend": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz", + "integrity": "sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, + "string.prototype.trimstart": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz", + "integrity": "sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + }, + "strip-json-comments": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", + "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", + "dev": true + }, + "supports-color": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz", + "integrity": "sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "ts-node": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-9.0.0.tgz", + "integrity": "sha512-/TqB4SnererCDR/vb4S/QvSZvzQMJN8daAslg7MeaiHvD8rDZsSfXmNeNumyZZzMned72Xoq/isQljYSt8Ynfg==", + "dev": true, + "requires": { + "arg": "^4.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "source-map-support": "^0.5.17", + "yn": "3.1.1" + } + }, + "tslib": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.13.0.tgz", + "integrity": "sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q==", + "dev": true + }, + "tslint": { + "version": "5.20.1", + "resolved": "https://registry.npmjs.org/tslint/-/tslint-5.20.1.tgz", + "integrity": "sha512-EcMxhzCFt8k+/UP5r8waCf/lzmeSyVlqxqMEDQE7rWYiQky8KpIBz1JAoYXfROHrPZ1XXd43q8yQnULOLiBRQg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "builtin-modules": "^1.1.1", + "chalk": "^2.3.0", + "commander": "^2.12.1", + "diff": "^4.0.1", + "glob": "^7.1.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "resolve": "^1.3.2", + "semver": "^5.3.0", + "tslib": "^1.8.0", + "tsutils": "^2.29.0" + }, + "dependencies": { + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + } + } + }, + "tsutils": { + "version": "2.29.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-2.29.0.tgz", + "integrity": "sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA==", + "dev": true, + "requires": { + "tslib": "^1.8.1" + } + }, + "typescript": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.0.3.tgz", + "integrity": "sha512-tEu6DGxGgRJPb/mVPIZ48e69xCn2yRmCgYmDugAVwmJ6o+0u1RI18eO7E7WBTLYLaEVVOhwQmcdhQHweux/WPg==", + "dev": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "dev": true, + "requires": { + "string-width": "^1.0.2 || 2" + } + }, + "workerpool": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.0.0.tgz", + "integrity": "sha512-fU2OcNA/GVAJLLyKUoHkAgIhKb0JoCpSjLC/G2vYKxUjVmQwGbRVeoPJ1a8U4pnVofz4AQV5Y/NEw8oKqxEBtA==", + "dev": true + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "y18n": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "dev": true + }, + "yargs": { + "version": "13.3.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", + "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.2" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "yargs-parser": { + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + }, + "yargs-unparser": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-1.6.1.tgz", + "integrity": "sha512-qZV14lK9MWsGCmcr7u5oXGH0dbGqZAIxTDrWXZDo5zUr6b6iUmelNKO6x6R1dQT24AH3LgRxJpr8meWy2unolA==", + "dev": true, + "requires": { + "camelcase": "^5.3.1", + "decamelize": "^1.2.0", + "flat": "^4.1.0", + "is-plain-obj": "^1.1.0", + "yargs": "^14.2.3" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "yargs": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-14.2.3.tgz", + "integrity": "sha512-ZbotRWhF+lkjijC/VhmOT9wSgyBQ7+zr13+YLkhfsSiTriYsMzkTUFP18pFhWwBeMa5gUc1MzbhrO6/VB7c9Xg==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "decamelize": "^1.2.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^15.0.1" + } + }, + "yargs-parser": { + "version": "15.0.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-15.0.1.tgz", + "integrity": "sha512-0OAMV2mAZQrs3FkNpDQcBk1x5HXb8X4twADss4S0Iuk+2dGnLOE/fRHrsYm542GduMveyA77OF4wrNJuanRCWw==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + } + } + }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true + } + } +} diff --git a/llparse-frontend/package.json b/llparse-frontend/package.json new file mode 100644 index 0000000..8afea88 --- /dev/null +++ b/llparse-frontend/package.json @@ -0,0 +1,43 @@ +{ + "name": "llparse-frontend", + "version": "3.0.0", + "description": "Frontend for LLParse compiler", + "main": "lib/frontend.js", + "types": "lib/frontend.d.ts", + "scripts": { + "build": "tsc", + "clean": "rm -rf lib", + "prepare": "npm run clean && npm run build", + "lint": "tslint -c tslint.json src/**/*.ts test/**/*.ts", + "fix-lint": "npm run lint -- --fix", + "mocha": "mocha --timeout=10000 -r ts-node/register/type-check --reporter spec test/*-test.ts", + "test": "npm run mocha && npm run lint" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/indutny/llparse-frontend.git" + }, + "keywords": [ + "llparse", + "frontend" + ], + "author": "Fedor Indutny (http://darksi.de/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/indutny/llparse-frontend/issues" + }, + "homepage": "https://github.com/indutny/llparse-frontend#readme", + "dependencies": { + "debug": "^3.2.6", + "llparse-builder": "^1.5.2" + }, + "devDependencies": { + "@types/debug": "^4.1.5", + "@types/mocha": "^8.0.3", + "@types/node": "^14.11.8", + "mocha": "^8.1.3", + "ts-node": "^9.0.0", + "tslint": "^5.20.1", + "typescript": "^4.0.3" + } +} diff --git a/llparse-frontend/src/code/and.ts b/llparse-frontend/src/code/and.ts new file mode 100644 index 0000000..54dc5fd --- /dev/null +++ b/llparse-frontend/src/code/and.ts @@ -0,0 +1,8 @@ +import { toCacheKey } from '../utils'; +import { FieldValue } from './field-value'; + +export class And extends FieldValue { + constructor(name: string, field: string, value: number) { + super('match', `and_${field}_${toCacheKey(value)}`, name, field, value); + } +} diff --git a/llparse-frontend/src/code/base.ts b/llparse-frontend/src/code/base.ts new file mode 100644 index 0000000..cde4b6d --- /dev/null +++ b/llparse-frontend/src/code/base.ts @@ -0,0 +1,8 @@ +export type Signature = 'match' | 'value' | 'span'; + +export abstract class Code { + constructor(public readonly signature: Signature, + public readonly cacheKey: string, + public readonly name: string) { + } +} diff --git a/llparse-frontend/src/code/external.ts b/llparse-frontend/src/code/external.ts new file mode 100644 index 0000000..f4254c1 --- /dev/null +++ b/llparse-frontend/src/code/external.ts @@ -0,0 +1,7 @@ +import { Code, Signature } from './base'; + +export abstract class External extends Code { + constructor(signature: Signature, name: string) { + super(signature, 'external_' + name, name); + } +} diff --git a/llparse-frontend/src/code/field-value.ts b/llparse-frontend/src/code/field-value.ts new file mode 100644 index 0000000..1c7c109 --- /dev/null +++ b/llparse-frontend/src/code/field-value.ts @@ -0,0 +1,13 @@ +import * as assert from 'assert'; + +import { Signature } from './base'; +import { Field } from './field'; + +export abstract class FieldValue extends Field { + constructor(signature: Signature, cacheKey: string, name: string, + field: string, public readonly value: number) { + super(signature, cacheKey, name, field); + + assert.strictEqual(value, value | 0, 'FieldValue `value` must be integer'); + } +} diff --git a/llparse-frontend/src/code/field.ts b/llparse-frontend/src/code/field.ts new file mode 100644 index 0000000..c60b8ef --- /dev/null +++ b/llparse-frontend/src/code/field.ts @@ -0,0 +1,8 @@ +import { Code, Signature } from './base'; + +export abstract class Field extends Code { + constructor(signature: Signature, cacheKey: string, name: string, + public readonly field: string) { + super(signature, cacheKey, name); + } +} diff --git a/llparse-frontend/src/code/index.ts b/llparse-frontend/src/code/index.ts new file mode 100644 index 0000000..c7d5c69 --- /dev/null +++ b/llparse-frontend/src/code/index.ts @@ -0,0 +1,15 @@ +export * from './and'; +export * from './base'; +export * from './external'; +export * from './field-value'; +export * from './field'; +export * from './is-equal'; +export * from './load'; +export * from './match'; +export * from './mul-add'; +export * from './or'; +export * from './span'; +export * from './store'; +export * from './test'; +export * from './update'; +export * from './value'; diff --git a/llparse-frontend/src/code/is-equal.ts b/llparse-frontend/src/code/is-equal.ts new file mode 100644 index 0000000..16a2ee2 --- /dev/null +++ b/llparse-frontend/src/code/is-equal.ts @@ -0,0 +1,9 @@ +import { toCacheKey } from '../utils'; +import { FieldValue } from './field-value'; + +export class IsEqual extends FieldValue { + constructor(name: string, field: string, value: number) { + super('match', `is_equal_${field}_${toCacheKey(value)}`, name, field, + value); + } +} diff --git a/llparse-frontend/src/code/load.ts b/llparse-frontend/src/code/load.ts new file mode 100644 index 0000000..76b715a --- /dev/null +++ b/llparse-frontend/src/code/load.ts @@ -0,0 +1,7 @@ +import { Field } from './field'; + +export class Load extends Field { + constructor(name: string, field: string) { + super('match', `load_${field}`, name, field); + } +} diff --git a/llparse-frontend/src/code/match.ts b/llparse-frontend/src/code/match.ts new file mode 100644 index 0000000..819d2af --- /dev/null +++ b/llparse-frontend/src/code/match.ts @@ -0,0 +1,7 @@ +import { External } from './external'; + +export class Match extends External { + constructor(name: string) { + super('match', name); + } +} diff --git a/llparse-frontend/src/code/mul-add.ts b/llparse-frontend/src/code/mul-add.ts new file mode 100644 index 0000000..c99be0d --- /dev/null +++ b/llparse-frontend/src/code/mul-add.ts @@ -0,0 +1,26 @@ +import { toCacheKey } from '../utils'; +import { Field } from './field'; + +export interface IMulAddOptions { + readonly base: number; + readonly max?: number; + readonly signed: boolean; +} + +function toOptionsKey(options: IMulAddOptions): string { + let res = `base_${toCacheKey(options.base)}`; + if (options.max !== undefined) { + res += `_max_${toCacheKey(options.max)}`; + } + if (options.signed !== undefined) { + res += `_signed_${toCacheKey(options.signed)}`; + } + return res; +} + +export class MulAdd extends Field { + constructor(name: string, field: string, + public readonly options: IMulAddOptions) { + super('value', `mul_add_${field}_${toOptionsKey(options)}`, name, field); + } +} diff --git a/llparse-frontend/src/code/or.ts b/llparse-frontend/src/code/or.ts new file mode 100644 index 0000000..2328a9f --- /dev/null +++ b/llparse-frontend/src/code/or.ts @@ -0,0 +1,8 @@ +import { toCacheKey } from '../utils'; +import { FieldValue } from './field-value'; + +export class Or extends FieldValue { + constructor(name: string, field: string, value: number) { + super('match', `or_${field}_${toCacheKey(value)}`, name, field, value); + } +} diff --git a/llparse-frontend/src/code/span.ts b/llparse-frontend/src/code/span.ts new file mode 100644 index 0000000..6241e03 --- /dev/null +++ b/llparse-frontend/src/code/span.ts @@ -0,0 +1,7 @@ +import { External } from './external'; + +export class Span extends External { + constructor(name: string) { + super('span', name); + } +} diff --git a/llparse-frontend/src/code/store.ts b/llparse-frontend/src/code/store.ts new file mode 100644 index 0000000..c2cb9ea --- /dev/null +++ b/llparse-frontend/src/code/store.ts @@ -0,0 +1,7 @@ +import { Field } from './field'; + +export class Store extends Field { + constructor(name: string, field: string) { + super('value', `store_${field}`, name, field); + } +} diff --git a/llparse-frontend/src/code/test.ts b/llparse-frontend/src/code/test.ts new file mode 100644 index 0000000..21339e9 --- /dev/null +++ b/llparse-frontend/src/code/test.ts @@ -0,0 +1,8 @@ +import { toCacheKey } from '../utils'; +import { FieldValue } from './field-value'; + +export class Test extends FieldValue { + constructor(name: string, field: string, value: number) { + super('match', `test_${field}_${toCacheKey(value)}`, name, field, value); + } +} diff --git a/llparse-frontend/src/code/update.ts b/llparse-frontend/src/code/update.ts new file mode 100644 index 0000000..5fa5eec --- /dev/null +++ b/llparse-frontend/src/code/update.ts @@ -0,0 +1,8 @@ +import { toCacheKey } from '../utils'; +import { FieldValue } from './field-value'; + +export class Update extends FieldValue { + constructor(name: string, field: string, value: number) { + super('match', `update_${field}_${toCacheKey(value)}`, name, field, value); + } +} diff --git a/llparse-frontend/src/code/value.ts b/llparse-frontend/src/code/value.ts new file mode 100644 index 0000000..4f32ae8 --- /dev/null +++ b/llparse-frontend/src/code/value.ts @@ -0,0 +1,7 @@ +import { External } from './external'; + +export class Value extends External { + constructor(name: string) { + super('value', name); + } +} diff --git a/llparse-frontend/src/container/index.ts b/llparse-frontend/src/container/index.ts new file mode 100644 index 0000000..a62aac8 --- /dev/null +++ b/llparse-frontend/src/container/index.ts @@ -0,0 +1,84 @@ +import * as assert from 'assert'; + +import { ICodeImplementation } from '../implementation/code'; +import { IImplementation } from '../implementation/full'; +import { INodeImplementation } from '../implementation/node'; +import { ITransformImplementation } from '../implementation/transform'; +import { IWrap } from '../wrap'; +import { ContainerWrap } from './wrap'; + +export { ContainerWrap }; + +export class Container { + private readonly map: Map = new Map(); + + public add(key: string, impl: IImplementation): void { + assert(!this.map.has(key), `Duplicate implementation key: "${key}"`); + this.map.set(key, impl); + } + + public build(): IImplementation { + return { + code: this.buildCode(), + node: this.buildNode(), + transform: this.buildTransform(), + }; + } + + public buildCode(): ICodeImplementation { + return { + And: this.combine((impl) => impl.code.And), + IsEqual: this.combine((impl) => impl.code.IsEqual), + Load: this.combine((impl) => impl.code.Load), + Match: this.combine((impl) => impl.code.Match), + MulAdd: this.combine((impl) => impl.code.MulAdd), + Or: this.combine((impl) => impl.code.Or), + Span: this.combine((impl) => impl.code.Span), + Store: this.combine((impl) => impl.code.Store), + Test: this.combine((impl) => impl.code.Test), + Update: this.combine((impl) => impl.code.Update), + Value: this.combine((impl) => impl.code.Value), + }; + } + + public buildNode(): INodeImplementation { + return { + Consume: this.combine((impl) => impl.node.Consume), + Empty: this.combine((impl) => impl.node.Empty), + Error: this.combine((impl) => impl.node.Error), + Invoke: this.combine((impl) => impl.node.Invoke), + Pause: this.combine((impl) => impl.node.Pause), + Sequence: this.combine((impl) => impl.node.Sequence), + Single: this.combine((impl) => impl.node.Single), + SpanEnd: this.combine((impl) => impl.node.SpanEnd), + SpanStart: this.combine((impl) => impl.node.SpanStart), + TableLookup: this.combine((impl) => impl.node.TableLookup), + }; + } + + public buildTransform(): ITransformImplementation { + return { + ID: this.combine((impl) => impl.transform.ID), + ToLower: this.combine((impl) => impl.transform.ToLower), + ToLowerUnsafe: this.combine((impl) => impl.transform.ToLowerUnsafe), + }; + } + + private combine(gather: (impl: IImplementation) => new(n: T) => IWrap) + : new(n: T) => ContainerWrap { + const wraps: Map IWrap> = new Map(); + for (const [ key, impl ] of this.map) { + wraps.set(key, gather(impl)); + } + + return class ContainerWrapSingle extends ContainerWrap { + constructor(ref: T) { + super(ref); + + for (const [ key, impl ] of wraps) { + this.map.set(key, new impl(ref)); + } + } + }; + } +} diff --git a/llparse-frontend/src/container/wrap.ts b/llparse-frontend/src/container/wrap.ts new file mode 100644 index 0000000..f3b886c --- /dev/null +++ b/llparse-frontend/src/container/wrap.ts @@ -0,0 +1,15 @@ +import * as assert from 'assert'; + +import { IWrap } from '../wrap'; + +export class ContainerWrap { + protected readonly map: Map> = new Map(); + + constructor(public readonly ref: T) { + } + + public get>(key: string): R { + assert(this.map.has(key), `Unknown implementation key "${key}"`); + return this.map.get(key)! as R; + } +} diff --git a/llparse-frontend/src/enumerator.ts b/llparse-frontend/src/enumerator.ts new file mode 100644 index 0000000..f2940a2 --- /dev/null +++ b/llparse-frontend/src/enumerator.ts @@ -0,0 +1,23 @@ +import { Node } from './node'; +import { IWrap } from './wrap'; + +export class Enumerator { + public getAllNodes(root: IWrap): ReadonlyArray> { + const nodes: Set> = new Set(); + const queue = [ root ]; + + while (queue.length !== 0) { + const node = queue.pop()!; + for (const slot of node.ref.getSlots()) { + if (nodes.has(slot.node)) { + continue; + } + + nodes.add(slot.node); + queue.push(slot.node); + } + } + + return Array.from(nodes); + } +} diff --git a/llparse-frontend/src/frontend.ts b/llparse-frontend/src/frontend.ts new file mode 100644 index 0000000..91c5224 --- /dev/null +++ b/llparse-frontend/src/frontend.ts @@ -0,0 +1,513 @@ +import * as assert from 'assert'; +import * as debugAPI from 'debug'; +import * as source from 'llparse-builder'; + +import * as frontend from './namespace/frontend'; +import { Container, ContainerWrap } from './container'; +import { IImplementation } from './implementation'; +import { SpanField } from './span-field'; +import { Trie, TrieEmpty, TrieNode, TrieSequence, TrieSingle } from './trie'; +import { Identifier, IUniqueName } from './utils'; +import { IWrap } from './wrap'; +import { Enumerator } from './enumerator'; +import { Peephole } from './peephole'; + +const debug = debugAPI('llparse:translator'); + +export { code, node, transform } from './namespace/frontend'; + +export { + source, + Identifier, + IUniqueName, + IWrap, + SpanField, + Container, + ContainerWrap, +}; + +// Minimum number of cases of `single` node to make it eligable for +// `TableLookup` optimization +export const DEFAULT_MIN_TABLE_SIZE = 32; + +// Maximum width of entry in a table for a `TableLookup` optimization +export const DEFAULT_MAX_TABLE_WIDTH = 4; + +type WrappedNode = IWrap; +type WrappedCode = IWrap; + +export interface IFrontendLazyOptions { + readonly maxTableElemWidth?: number; + readonly minTableSize?: number; +} + +export interface IFrontendResult { + readonly prefix: string; + readonly properties: ReadonlyArray; + readonly root: IWrap; + readonly spans: ReadonlyArray; + readonly resumptionTargets: ReadonlySet; +} + +interface IFrontendOptions { + readonly maxTableElemWidth: number; + readonly minTableSize: number; +} + +type MatchChildren = WrappedNode[]; +type MatchResult = WrappedNode | ReadonlyArray; + +interface ITableLookupTarget { + readonly keys: number[]; + readonly noAdvance: boolean; + readonly trie: TrieEmpty; +} + +export class Frontend { + private readonly options: IFrontendOptions; + + private readonly id: Identifier = new Identifier(this.prefix + '__n_'); + private readonly codeId: Identifier = new Identifier(this.prefix + '__c_'); + private readonly map: Map = new Map(); + private readonly spanMap: Map = new Map(); + private readonly codeCache: Map = new Map(); + private readonly resumptionTargets: Set = new Set(); + + constructor(private readonly prefix: string, + private readonly implementation: IImplementation, + options: IFrontendLazyOptions = {}) { + this.options = { + maxTableElemWidth: options.maxTableElemWidth === undefined ? + DEFAULT_MAX_TABLE_WIDTH : options.maxTableElemWidth, + minTableSize: options.minTableSize === undefined ? + DEFAULT_MIN_TABLE_SIZE : options.minTableSize, + }; + + assert(0 < this.options.maxTableElemWidth, + 'Invalid `options.maxTableElemWidth`, must be positive'); + } + + public compile(root: source.node.Node, + properties: ReadonlyArray): IFrontendResult { + debug('checking loops'); + const lc = new source.LoopChecker(); + lc.check(root); + + debug('allocating spans'); + const spanAllocator = new source.SpanAllocator(); + const sourceSpans = spanAllocator.allocate(root); + + const spans = sourceSpans.concurrency.map((concurrent, index) => { + const span = new SpanField(index, concurrent.map((sourceSpan) => { + return this.translateSpanCode(sourceSpan.callback); + })); + + for (const sourceSpan of concurrent) { + this.spanMap.set(sourceSpan, span); + } + + return span; + }); + + debug('translating'); + let out = this.translate(root); + + debug('enumerating'); + const enumerator = new Enumerator(); + let nodes = enumerator.getAllNodes(out); + + debug('peephole optimization'); + const peephole = new Peephole(); + out = peephole.optimize(out, nodes); + + debug('re-enumerating'); + nodes = enumerator.getAllNodes(out); + + debug('registering resumption targets'); + this.resumptionTargets.add(out); + for (const node of nodes) { + this.registerNode(node); + } + + return { + prefix: this.prefix, + properties, + resumptionTargets: this.resumptionTargets, + root: out, + spans, + }; + } + + // TODO(indutny): remove this in the next major release + public getResumptionTargets(): ReadonlySet { + return this.resumptionTargets; + } + + private translate(node: source.node.Node): WrappedNode { + if (this.map.has(node)) { + return this.map.get(node)!; + } + + const id = () => this.id.id(node.name); + + const nodeImpl = this.implementation.node; + + // Instantiate target class + let result: MatchResult; + if (node instanceof source.node.Error) { + result = new nodeImpl.Error( + new frontend.node.Error(id(), node.code, node.reason)); + } else if (node instanceof source.node.Pause) { + result = new nodeImpl.Pause( + new frontend.node.Pause(id(), node.code, node.reason)); + } else if (node instanceof source.node.Consume) { + result = new nodeImpl.Consume( + new frontend.node.Consume(id(), node.field)); + } else if (node instanceof source.node.SpanStart) { + result = new nodeImpl.SpanStart( + new frontend.node.SpanStart(id(), this.spanMap.get(node.span)!, + this.translateSpanCode(node.span.callback))); + } else if (node instanceof source.node.SpanEnd) { + result = new nodeImpl.SpanEnd( + new frontend.node.SpanEnd(id(), this.spanMap.get(node.span)!, + this.translateSpanCode(node.span.callback))); + } else if (node instanceof source.node.Invoke) { + assert(node.code.signature === 'match' || node.code.signature === 'value', + 'Passing `span` callback to `invoke` is not allowed'); + result = new nodeImpl.Invoke( + new frontend.node.Invoke(id(), this.translateCode(node.code))); + } else if (node instanceof source.node.Match) { + result = this.translateMatch(node); + } else { + throw new Error(`Unknown node type for "${node.name}" ${node.constructor.toString()}`); + } + + // Initialize result + const otherwise = node.getOtherwiseEdge(); + + if (Array.isArray(result)) { + assert(node instanceof source.node.Match); + const match = node as source.node.Match; + + // TODO(indutny): move this to llparse-builder? + assert.notStrictEqual(otherwise, undefined, + `Node "${node.name}" has no \`.otherwise()\``); + + // Assign otherwise to every node of Trie + if (otherwise !== undefined) { + for (const child of result) { + if (!child.ref.otherwise) { + child.ref.setOtherwise(this.translate(otherwise.node), + otherwise.noAdvance); + } + } + } + + // Assign transform to every node of Trie + const transform = this.translateTransform(match.getTransform()); + for (const child of result) { + child.ref.setTransform(transform); + } + + assert(result.length >= 1); + return result[0]; + } else { + const single: WrappedNode = result as WrappedNode; + assert(single.ref instanceof frontend.node.Node); + + // Break loops + this.map.set(node, single); + + if (otherwise !== undefined) { + single.ref.setOtherwise(this.translate(otherwise.node), + otherwise.noAdvance); + } else { + // TODO(indutny): move this to llparse-builder? + assert(node instanceof source.node.Error, + `Node "${node.name}" has no \`.otherwise()\``); + } + + if (single.ref instanceof frontend.node.Invoke) { + for (const edge of node) { + single.ref.addEdge(edge.key as number, this.translate(edge.node)); + } + } else { + assert.strictEqual(Array.from(node).length, 0); + } + + return single; + } + } + + private registerNode(node: any): void { + const nodeImpl = this.implementation.node; + + // Nodes with prologue check (start_pos != end_pos) + if (node instanceof nodeImpl.Consume || + node instanceof nodeImpl.Empty || + node instanceof nodeImpl.Sequence || + node instanceof nodeImpl.Single || + node instanceof nodeImpl.SpanStart || + node instanceof nodeImpl.TableLookup) { + this.resumptionTargets.add(node); + + // Nodes that can interrupt the execution to be resumed at different node + } else if (node instanceof nodeImpl.Pause || + node instanceof nodeImpl.SpanEnd) { + this.resumptionTargets.add(node.ref.otherwise!.node); + } + } + + private translateMatch(node: source.node.Match): MatchResult { + const trie = new Trie(node.name); + + const otherwise = node.getOtherwiseEdge(); + const trieNode = trie.build(Array.from(node)); + if (trieNode === undefined) { + return new this.implementation.node.Empty( + new frontend.node.Empty(this.id.id(node.name))); + } + + const children: MatchChildren = []; + this.translateTrie(node, trieNode, children); + assert(children.length >= 1); + + return children; + } + + private translateTrie(node: source.node.Match, trie: TrieNode, + children: MatchChildren): WrappedNode { + if (trie instanceof TrieEmpty) { + assert(this.map.has(node)); + return this.translate(trie.node); + } else if (trie instanceof TrieSingle) { + return this.translateSingle(node, trie, children); + } else if (trie instanceof TrieSequence) { + return this.translateSequence(node, trie, children); + } else { + throw new Error('Unknown trie node'); + } + } + + private translateSingle(node: source.node.Match, trie: TrieSingle, + children: MatchChildren) + : IWrap { + // See if we can apply TableLookup optimization + const maybeTable = this.maybeTableLookup(node, trie, children); + if (maybeTable !== undefined) { + return maybeTable; + } + + const single = new this.implementation.node.Single( + new frontend.node.Single(this.id.id(node.name))); + children.push(single); + + // Break the loop + if (!this.map.has(node)) { + this.map.set(node, single); + } + for (const child of trie.children) { + const childNode = this.translateTrie(node, child.node, children); + + single.ref.addEdge({ + key: child.key, + noAdvance: child.noAdvance, + node: childNode, + value: child.node instanceof TrieEmpty ? child.node.value : undefined, + }); + } + + const otherwise = trie.otherwise; + if (otherwise) { + single.ref.setOtherwise( + this.translateTrie(node, otherwise, children), + true, + otherwise.value); + } + + return single; + } + + private maybeTableLookup(node: source.node.Match, trie: TrieSingle, + children: MatchChildren) + : IWrap | undefined { + if (trie.children.length < this.options.minTableSize) { + debug('not enough children of "%s" to allocate table, got %d need %d', + node.name, trie.children.length, this.options.minTableSize); + return undefined; + } + + const targets: Map = new Map(); + + const bailout = !trie.children.every((child) => { + if (!(child.node instanceof TrieEmpty)) { + debug('non-leaf trie child of "%s" prevents table allocation', + node.name); + return false; + } + + const empty: TrieEmpty = child.node; + + // We can't pass values from the table yet + if (empty.value !== undefined) { + debug('value passing trie leaf of "%s" prevents table allocation', + node.name); + return false; + } + + const target = empty.node; + if (!targets.has(target)) { + targets.set(target, { + keys: [ child.key ], + noAdvance: child.noAdvance, + trie: empty, + }); + return true; + } + + const existing = targets.get(target)!; + + // TODO(indutny): just use it as a sub-key? + if (existing.noAdvance !== child.noAdvance) { + debug( + 'noAdvance mismatch in a trie leaf of "%s" prevents ' + + 'table allocation', + node.name); + return false; + } + + existing.keys.push(child.key); + return true; + }); + + if (bailout) { + return undefined; + } + + // We've width limit for this optimization + if (targets.size >= (1 << this.options.maxTableElemWidth)) { + debug('too many different trie targets of "%s" for a table allocation', + node.name); + return undefined; + } + + const table = new this.implementation.node.TableLookup( + new frontend.node.TableLookup(this.id.id(node.name))); + children.push(table); + + // Break the loop + if (!this.map.has(node)) { + this.map.set(node, table); + } + + targets.forEach((target) => { + const next = this.translateTrie(node, target.trie, children); + + table.ref.addEdge({ + keys: target.keys, + noAdvance: target.noAdvance, + node: next, + }); + }); + + debug('optimized "%s" to a table lookup node', node.name); + return table; + } + + private translateSequence(node: source.node.Match, trie: TrieSequence, + children: MatchChildren) + : IWrap { + const sequence = new this.implementation.node.Sequence( + new frontend.node.Sequence(this.id.id(node.name), trie.select)); + children.push(sequence); + + // Break the loop + if (!this.map.has(node)) { + this.map.set(node, sequence); + } + + const childNode = this.translateTrie(node, trie.child, children); + + const value = trie.child instanceof TrieEmpty ? + trie.child.value : undefined; + + sequence.ref.setEdge(childNode, value); + + return sequence; + } + + private translateCode(code: source.code.Code): WrappedCode { + const prefixed = this.codeId.id(code.name).name; + const codeImpl = this.implementation.code; + + let res: WrappedCode; + if (code instanceof source.code.IsEqual) { + res = new codeImpl.IsEqual( + new frontend.code.IsEqual(prefixed, code.field, code.value)); + } else if (code instanceof source.code.Load) { + res = new codeImpl.Load( + new frontend.code.Load(prefixed, code.field)); + } else if (code instanceof source.code.MulAdd) { + // TODO(indutny): verify property type + const m = new frontend.code.MulAdd(prefixed, code.field, { + base: code.options.base, + max: code.options.max, + signed: code.options.signed === undefined ? true : code.options.signed, + }); + res = new codeImpl.MulAdd(m); + } else if (code instanceof source.code.And) { + res = new codeImpl.And( + new frontend.code.Or(prefixed, code.field, code.value)); + } else if (code instanceof source.code.Or) { + res = new codeImpl.Or( + new frontend.code.Or(prefixed, code.field, code.value)); + } else if (code instanceof source.code.Store) { + res = new codeImpl.Store( + new frontend.code.Store(prefixed, code.field)); + } else if (code instanceof source.code.Test) { + res = new codeImpl.Test( + new frontend.code.Test(prefixed, code.field, code.value)); + } else if (code instanceof source.code.Update) { + res = new codeImpl.Update( + new frontend.code.Update(prefixed, code.field, code.value)); + + // External callbacks + } else if (code instanceof source.code.Span) { + res = new codeImpl.Span(new frontend.code.Span(code.name)); + } else if (code instanceof source.code.Match) { + res = new codeImpl.Match(new frontend.code.Match(code.name)); + } else if (code instanceof source.code.Value) { + res = new codeImpl.Value(new frontend.code.Value(code.name)); + } else { + throw new Error(`Unsupported code: "${code.name}"`); + } + + // Re-use instances to build them just once + if (this.codeCache.has(res.ref.cacheKey)) { + return this.codeCache.get(res.ref.cacheKey)!; + } + + this.codeCache.set(res.ref.cacheKey, res); + return res; + } + + private translateSpanCode(code: source.code.Span): IWrap { + return this.translateCode(code) as IWrap; + } + + private translateTransform(transform?: source.transform.Transform) + : IWrap { + const transformImpl = this.implementation.transform; + if (transform === undefined) { + return new transformImpl.ID(new frontend.transform.ID()); + } else if (transform.name === 'to_lower') { + return new transformImpl.ToLower( + new frontend.transform.ToLower()); + } else if (transform.name === 'to_lower_unsafe') { + return new transformImpl.ToLowerUnsafe( + new frontend.transform.ToLowerUnsafe()); + } else { + throw new Error(`Unsupported transform: "${transform.name}"`); + } + } +} diff --git a/llparse-frontend/src/implementation/code.ts b/llparse-frontend/src/implementation/code.ts new file mode 100644 index 0000000..c467ced --- /dev/null +++ b/llparse-frontend/src/implementation/code.ts @@ -0,0 +1,16 @@ +import * as code from '../code'; +import { IWrap } from '../wrap'; + +export interface ICodeImplementation { + readonly And: new(c: code.And) => IWrap; + readonly IsEqual: new(c: code.IsEqual) => IWrap; + readonly Load: new(c: code.Load) => IWrap; + readonly Match: new(c: code.Match) => IWrap; + readonly MulAdd: new(c: code.MulAdd) => IWrap; + readonly Or: new(c: code.Or) => IWrap; + readonly Span: new(c: code.Span) => IWrap; + readonly Store: new(c: code.Store) => IWrap; + readonly Test: new(c: code.Test) => IWrap; + readonly Update: new(c: code.Update) => IWrap; + readonly Value: new(c: code.Value) => IWrap; +} diff --git a/llparse-frontend/src/implementation/full.ts b/llparse-frontend/src/implementation/full.ts new file mode 100644 index 0000000..08c4c03 --- /dev/null +++ b/llparse-frontend/src/implementation/full.ts @@ -0,0 +1,9 @@ +import { ICodeImplementation } from './code'; +import { INodeImplementation } from './node'; +import { ITransformImplementation } from './transform'; + +export interface IImplementation { + readonly code: ICodeImplementation; + readonly node: INodeImplementation; + readonly transform: ITransformImplementation; +} diff --git a/llparse-frontend/src/implementation/index.ts b/llparse-frontend/src/implementation/index.ts new file mode 100644 index 0000000..2b5411b --- /dev/null +++ b/llparse-frontend/src/implementation/index.ts @@ -0,0 +1,4 @@ +export * from './code'; +export * from './full'; +export * from './node'; +export * from './transform'; diff --git a/llparse-frontend/src/implementation/node.ts b/llparse-frontend/src/implementation/node.ts new file mode 100644 index 0000000..af0b3df --- /dev/null +++ b/llparse-frontend/src/implementation/node.ts @@ -0,0 +1,15 @@ +import * as node from '../node'; +import { IWrap } from '../wrap'; + +export interface INodeImplementation { + readonly Consume: new(n: node.Consume) => IWrap; + readonly Empty: new(n: node.Empty) => IWrap; + readonly Error: new(n: node.Error) => IWrap; + readonly Invoke: new(n: node.Invoke) => IWrap; + readonly Pause: new(n: node.Pause) => IWrap; + readonly Sequence: new(n: node.Sequence) => IWrap; + readonly Single: new(n: node.Single) => IWrap; + readonly SpanEnd: new(n: node.SpanEnd) => IWrap; + readonly SpanStart: new(n: node.SpanStart) => IWrap; + readonly TableLookup: new(n: node.TableLookup) => IWrap; +} diff --git a/llparse-frontend/src/implementation/transform.ts b/llparse-frontend/src/implementation/transform.ts new file mode 100644 index 0000000..4382284 --- /dev/null +++ b/llparse-frontend/src/implementation/transform.ts @@ -0,0 +1,9 @@ +import * as transform from '../transform'; +import { IWrap } from '../wrap'; + +export interface ITransformImplementation { + readonly ID: new(t: transform.ID) => IWrap; + readonly ToLower: new(t: transform.ToLower) => IWrap; + readonly ToLowerUnsafe: new(t: transform.ToLowerUnsafe) + => IWrap; +} diff --git a/llparse-frontend/src/namespace/frontend.ts b/llparse-frontend/src/namespace/frontend.ts new file mode 100644 index 0000000..2f89093 --- /dev/null +++ b/llparse-frontend/src/namespace/frontend.ts @@ -0,0 +1,5 @@ +import * as code from '../code'; +import * as node from '../node'; +import * as transform from '../transform'; + +export { code, node, transform }; diff --git a/llparse-frontend/src/node/base.ts b/llparse-frontend/src/node/base.ts new file mode 100644 index 0000000..1e93c49 --- /dev/null +++ b/llparse-frontend/src/node/base.ts @@ -0,0 +1,46 @@ +import { IUniqueName } from '../utils'; +import { IWrap } from '../wrap'; +import { Slot } from './slot'; + +export interface IReadonlyOtherwiseEdge { + readonly node: IWrap; + readonly noAdvance: boolean; + readonly value: number | undefined; +} + +interface IOtherwiseEdge { + node: IWrap; + readonly noAdvance: boolean; + readonly value: number | undefined; +} + +export abstract class Node { + private privOtherwise: IOtherwiseEdge | undefined; + private privSlots: ReadonlyArray | undefined; + + constructor(public readonly id: IUniqueName) { + } + + public setOtherwise(node: IWrap, noAdvance: boolean, value?: number) { + this.privOtherwise = { node, noAdvance, value }; + } + + public get otherwise(): IReadonlyOtherwiseEdge | undefined { + return this.privOtherwise; + } + + public *getSlots() { + if (this.privSlots === undefined) { + this.privSlots = Array.from(this.buildSlots()); + } + + yield* this.privSlots; + } + + protected *buildSlots() { + const otherwise = this.privOtherwise; + if (otherwise !== undefined) { + yield new Slot(otherwise.node, (value) => otherwise.node = value); + } + } +} diff --git a/llparse-frontend/src/node/consume.ts b/llparse-frontend/src/node/consume.ts new file mode 100644 index 0000000..6ab49ac --- /dev/null +++ b/llparse-frontend/src/node/consume.ts @@ -0,0 +1,8 @@ +import { IUniqueName } from '../utils'; +import { Node } from './base'; + +export class Consume extends Node { + constructor(id: IUniqueName, readonly field: string) { + super(id); + } +} diff --git a/llparse-frontend/src/node/empty.ts b/llparse-frontend/src/node/empty.ts new file mode 100644 index 0000000..45c552c --- /dev/null +++ b/llparse-frontend/src/node/empty.ts @@ -0,0 +1,4 @@ +import { Node } from './base'; + +export class Empty extends Node { +} diff --git a/llparse-frontend/src/node/error.ts b/llparse-frontend/src/node/error.ts new file mode 100644 index 0000000..c4e6faf --- /dev/null +++ b/llparse-frontend/src/node/error.ts @@ -0,0 +1,9 @@ +import { IUniqueName } from '../utils'; +import { Node } from './base'; + +export class Error extends Node { + constructor(id: IUniqueName, public readonly code: number, + public readonly reason: string) { + super(id); + } +} diff --git a/llparse-frontend/src/node/index.ts b/llparse-frontend/src/node/index.ts new file mode 100644 index 0000000..bd11015 --- /dev/null +++ b/llparse-frontend/src/node/index.ts @@ -0,0 +1,13 @@ +export * from './base'; +export * from './consume'; +export * from './empty'; +export * from './error'; +export * from './invoke'; +export * from './match'; +export * from './pause'; +export * from './sequence'; +export * from './single'; +export * from './slot'; +export * from './span-end'; +export * from './span-start'; +export * from './table-lookup'; diff --git a/llparse-frontend/src/node/invoke.ts b/llparse-frontend/src/node/invoke.ts new file mode 100644 index 0000000..ba6ef53 --- /dev/null +++ b/llparse-frontend/src/node/invoke.ts @@ -0,0 +1,39 @@ +import { Code } from '../code'; +import { IUniqueName } from '../utils'; +import { IWrap } from '../wrap'; +import { Node } from './base'; +import { Slot } from './slot'; + +interface IInvokeEdge { + readonly code: number; + node: IWrap; +} + +export interface IReadonlyInvokeEdge { + readonly code: number; + readonly node: IWrap; +} + +export class Invoke extends Node { + private readonly privEdges: IInvokeEdge[] = []; + + constructor(id: IUniqueName, public readonly code: IWrap) { + super(id); + } + + public addEdge(code: number, node: IWrap): void { + this.privEdges.push({ code, node }); + } + + public get edges(): ReadonlyArray { + return this.privEdges; + } + + protected *buildSlots() { + for (const edge of this.privEdges) { + yield new Slot(edge.node, (value) => edge.node = value); + } + + yield* super.buildSlots(); + } +} diff --git a/llparse-frontend/src/node/match.ts b/llparse-frontend/src/node/match.ts new file mode 100644 index 0000000..8a499d3 --- /dev/null +++ b/llparse-frontend/src/node/match.ts @@ -0,0 +1,11 @@ +import { Transform } from '../transform'; +import { IWrap } from '../wrap'; +import { Node } from './base'; + +export class Match extends Node { + public transform?: IWrap; + + public setTransform(transform: IWrap): void { + this.transform = transform; + } +} diff --git a/llparse-frontend/src/node/pause.ts b/llparse-frontend/src/node/pause.ts new file mode 100644 index 0000000..b9923d7 --- /dev/null +++ b/llparse-frontend/src/node/pause.ts @@ -0,0 +1,4 @@ +import { Error as ErrorNode } from './error'; + +export class Pause extends ErrorNode { +} diff --git a/llparse-frontend/src/node/sequence.ts b/llparse-frontend/src/node/sequence.ts new file mode 100644 index 0000000..c9105b3 --- /dev/null +++ b/llparse-frontend/src/node/sequence.ts @@ -0,0 +1,44 @@ +import * as assert from 'assert'; +import { Buffer } from 'buffer'; + +import { IUniqueName } from '../utils'; +import { IWrap } from '../wrap'; +import { Node } from './base'; +import { Match } from './match'; +import { Slot } from './slot'; + +interface ISequenceEdge { + node: IWrap; + readonly value: number | undefined; +} + +export interface IReadonlySequenceEdge { + readonly node: IWrap; + readonly value: number | undefined; +} + +export class Sequence extends Match { + private privEdge?: ISequenceEdge; + + constructor(id: IUniqueName, public readonly select: Buffer) { + super(id); + } + + public setEdge(node: IWrap, value?: number | undefined) { + assert.strictEqual(this.privEdge, undefined); + this.privEdge = { node, value }; + } + + public get edge(): IReadonlySequenceEdge | undefined { + return this.privEdge; + } + + protected *buildSlots() { + const edge = this.privEdge; + if (edge !== undefined) { + yield new Slot(edge.node, (value) => edge.node = value); + } + + yield* super.buildSlots(); + } +} diff --git a/llparse-frontend/src/node/single.ts b/llparse-frontend/src/node/single.ts new file mode 100644 index 0000000..0acf715 --- /dev/null +++ b/llparse-frontend/src/node/single.ts @@ -0,0 +1,46 @@ +import * as assert from 'assert'; + +import { IUniqueName } from '../utils'; +import { IWrap } from '../wrap'; +import { Node } from './base'; +import { Match } from './match'; +import { Slot } from './slot'; + +interface ISingleEdge { + readonly key: number; + node: IWrap; + readonly noAdvance: boolean; + readonly value: number | undefined; +} + +export interface IReadonlySingleEdge { + readonly key: number; + node: IWrap; + readonly noAdvance: boolean; + readonly value: number | undefined; +} + +export class Single extends Match { + private readonly privEdges: ISingleEdge[] = []; + + public addEdge(edge: IReadonlySingleEdge): void { + this.privEdges.push({ + key: edge.key, + noAdvance: edge.noAdvance, + node: edge.node, + value: edge.value, + }); + } + + public get edges(): ReadonlyArray { + return this.privEdges; + } + + protected *buildSlots() { + for (const edge of this.privEdges) { + yield new Slot(edge.node, (value) => edge.node = value); + } + + yield* super.buildSlots(); + } +} diff --git a/llparse-frontend/src/node/slot.ts b/llparse-frontend/src/node/slot.ts new file mode 100644 index 0000000..923da86 --- /dev/null +++ b/llparse-frontend/src/node/slot.ts @@ -0,0 +1,20 @@ +import { IWrap } from '../wrap'; +import { Node } from './base'; + +export class Slot { + private privNode: IWrap; + + constructor(node: IWrap, + private readonly privUpdate: (value: IWrap) => void) { + this.privNode = node; + } + + public get node(): IWrap { + return this.privNode; + } + + public set node(value: IWrap) { + this.privNode = value; + this.privUpdate(value); + } +} diff --git a/llparse-frontend/src/node/span-end.ts b/llparse-frontend/src/node/span-end.ts new file mode 100644 index 0000000..bf8d5cc --- /dev/null +++ b/llparse-frontend/src/node/span-end.ts @@ -0,0 +1,12 @@ +import { Span } from '../code'; +import { SpanField } from '../span-field'; +import { IUniqueName } from '../utils'; +import { IWrap } from '../wrap'; +import { Node } from './base'; + +export class SpanEnd extends Node { + constructor(id: IUniqueName, public readonly field: SpanField, + public readonly callback: IWrap) { + super(id); + } +} diff --git a/llparse-frontend/src/node/span-start.ts b/llparse-frontend/src/node/span-start.ts new file mode 100644 index 0000000..89690f1 --- /dev/null +++ b/llparse-frontend/src/node/span-start.ts @@ -0,0 +1,12 @@ +import { Span } from '../code'; +import { SpanField } from '../span-field'; +import { IUniqueName } from '../utils'; +import { IWrap } from '../wrap'; +import { Node } from './base'; + +export class SpanStart extends Node { + constructor(id: IUniqueName, public readonly field: SpanField, + public readonly callback: IWrap) { + super(id); + } +} diff --git a/llparse-frontend/src/node/table-lookup.ts b/llparse-frontend/src/node/table-lookup.ts new file mode 100644 index 0000000..9880fc7 --- /dev/null +++ b/llparse-frontend/src/node/table-lookup.ts @@ -0,0 +1,43 @@ +import * as assert from 'assert'; + +import { IUniqueName } from '../utils'; +import { IWrap } from '../wrap'; +import { Node } from './base'; +import { Match } from './match'; +import { Slot } from './slot'; + +interface ITableEdge { + readonly keys: ReadonlyArray; + node: IWrap; + readonly noAdvance: boolean; +} + +export interface IReadonlyTableEdge { + readonly keys: ReadonlyArray; + readonly node: IWrap; + readonly noAdvance: boolean; +} + +export class TableLookup extends Match { + private readonly privEdges: ITableEdge[] = []; + + public addEdge(edge: IReadonlyTableEdge): void { + this.privEdges.push({ + keys: edge.keys, + noAdvance: edge.noAdvance, + node: edge.node, + }); + } + + public get edges(): ReadonlyArray { + return this.privEdges; + } + + protected *buildSlots() { + for (const edge of this.privEdges) { + yield new Slot(edge.node, (value) => edge.node = value); + } + + yield* super.buildSlots(); + } +} diff --git a/llparse-frontend/src/peephole.ts b/llparse-frontend/src/peephole.ts new file mode 100644 index 0000000..19ac13f --- /dev/null +++ b/llparse-frontend/src/peephole.ts @@ -0,0 +1,52 @@ +import { Node, Empty } from './node'; +import { IWrap } from './wrap'; + +type WrapNode = IWrap; +type WrapList = ReadonlyArray; + +export class Peephole { + public optimize(root: WrapNode, nodes: WrapList): WrapNode { + let changed = new Set(nodes); + + while (changed.size !== 0) { + const previous = changed; + changed = new Set(); + + for (const node of previous) { + if (this.optimizeNode(node)) { + changed.add(node); + } + } + } + + while (root.ref instanceof Empty) { + if (!root.ref.otherwise!.noAdvance) { + break; + } + + root = root.ref.otherwise!.node; + } + + return root; + } + + public optimizeNode(node: WrapNode): boolean { + let changed = false; + for (const slot of node.ref.getSlots()) { + if (!(slot.node.ref instanceof Empty)) { + continue; + } + + const otherwise = slot.node.ref.otherwise!; + + // Node actively skips, can't optimize! + if (!otherwise.noAdvance) { + continue; + } + + slot.node = otherwise.node; + changed = true; + } + return changed; + } +} diff --git a/llparse-frontend/src/span-field.ts b/llparse-frontend/src/span-field.ts new file mode 100644 index 0000000..0652f77 --- /dev/null +++ b/llparse-frontend/src/span-field.ts @@ -0,0 +1,8 @@ +import { Span } from './code'; +import { IWrap } from './wrap'; + +export class SpanField { + constructor(public readonly index: number, + public readonly callbacks: ReadonlyArray>) { + } +} diff --git a/llparse-frontend/src/transform/base.ts b/llparse-frontend/src/transform/base.ts new file mode 100644 index 0000000..5397326 --- /dev/null +++ b/llparse-frontend/src/transform/base.ts @@ -0,0 +1,4 @@ +export abstract class Transform { + constructor(public readonly name: string) { + } +} diff --git a/llparse-frontend/src/transform/id.ts b/llparse-frontend/src/transform/id.ts new file mode 100644 index 0000000..d86e3c1 --- /dev/null +++ b/llparse-frontend/src/transform/id.ts @@ -0,0 +1,7 @@ +import { Transform } from './base'; + +export class ID extends Transform { + constructor() { + super('id'); + } +} diff --git a/llparse-frontend/src/transform/index.ts b/llparse-frontend/src/transform/index.ts new file mode 100644 index 0000000..f103b3b --- /dev/null +++ b/llparse-frontend/src/transform/index.ts @@ -0,0 +1,4 @@ +export * from './base'; +export * from './id'; +export * from './to-lower'; +export * from './to-lower-unsafe'; diff --git a/llparse-frontend/src/transform/to-lower-unsafe.ts b/llparse-frontend/src/transform/to-lower-unsafe.ts new file mode 100644 index 0000000..99d9618 --- /dev/null +++ b/llparse-frontend/src/transform/to-lower-unsafe.ts @@ -0,0 +1,7 @@ +import { Transform } from './base'; + +export class ToLowerUnsafe extends Transform { + constructor() { + super('to_lower_unsafe'); + } +} diff --git a/llparse-frontend/src/transform/to-lower.ts b/llparse-frontend/src/transform/to-lower.ts new file mode 100644 index 0000000..b333fce --- /dev/null +++ b/llparse-frontend/src/transform/to-lower.ts @@ -0,0 +1,7 @@ +import { Transform } from './base'; + +export class ToLower extends Transform { + constructor() { + super('to_lower'); + } +} diff --git a/llparse-frontend/src/trie/empty.ts b/llparse-frontend/src/trie/empty.ts new file mode 100644 index 0000000..aba52ea --- /dev/null +++ b/llparse-frontend/src/trie/empty.ts @@ -0,0 +1,9 @@ +import { node as api } from 'llparse-builder'; +import { TrieNode } from './node'; + +export class TrieEmpty extends TrieNode { + constructor(public readonly node: api.Node, + public readonly value: number | undefined) { + super(); + } +} diff --git a/llparse-frontend/src/trie/index.ts b/llparse-frontend/src/trie/index.ts new file mode 100644 index 0000000..391c6a3 --- /dev/null +++ b/llparse-frontend/src/trie/index.ts @@ -0,0 +1,136 @@ +import * as assert from 'assert'; +import { Buffer } from 'buffer'; +import { Edge, node as api } from 'llparse-builder'; + +import { TrieEmpty } from './empty'; +import { TrieNode } from './node'; +import { TrieSequence } from './sequence'; +import { ITrieSingleChild, TrieSingle } from './single'; + +export { TrieEmpty, TrieNode, TrieSequence, TrieSingle }; + +interface IEdge { + readonly key: Buffer; + readonly node: api.Node; + readonly noAdvance: boolean; + readonly value: number | undefined; +} + +type Path = ReadonlyArray; +type EdgeArray = ReadonlyArray; + +export class Trie { + constructor(private readonly name: string) { + } + + public build(edges: ReadonlyArray): undefined | TrieNode { + if (edges.length === 0) { + return undefined; + } + + const internalEdges: IEdge[] = []; + for (const edge of edges) { + internalEdges.push({ + key: edge.key as Buffer, + noAdvance: edge.noAdvance, + node: edge.node, + value: edge.value, + }); + } + + return this.level(internalEdges, []); + } + + private level(edges: EdgeArray, path: Path): TrieNode { + const first = edges[0].key; + const last = edges[edges.length - 1].key; + + // Leaf + if (edges.length === 1 && edges[0].key.length === 0) { + return new TrieEmpty(edges[0].node, edges[0].value); + } + + // Find the longest common sub-string + let common = 0; + for (; common < first.length; common++) { + if (first[common] !== last[common]) { + break; + } + } + + // Sequence + if (common > 1) { + return this.sequence(edges, first.slice(0, common), path); + } + + // Single + return this.single(edges, path); + } + + private slice(edges: EdgeArray, off: number): EdgeArray { + return edges.map((edge) => { + return { + key: edge.key.slice(off), + noAdvance: edge.noAdvance, + node: edge.node, + value: edge.value, + }; + }).sort((a, b) => { + return a.key.compare(b.key); + }); + } + + private sequence(edges: EdgeArray, prefix: Buffer, path: Path): TrieNode { + const sliced = this.slice(edges, prefix.length); + const noAdvance = sliced.some((edge) => edge.noAdvance); + assert(!noAdvance); + const child = this.level(sliced, path.concat(prefix)); + + return new TrieSequence(prefix, child); + } + + private single(edges: EdgeArray, path: Path): TrieNode { + // Check for duplicates + if (edges[0].key.length === 0) { + assert(path.length !== 0, `Empty root entry at "${this.name}"`); + assert(edges.length === 1 || edges[1].key.length !== 0, + `Duplicate entries in "${this.name}" at [ ${path.join(', ')} ]`); + } + + let otherwise: TrieEmpty | undefined; + const keys: Map = new Map(); + for (const edge of edges) { + if (edge.key.length === 0) { + otherwise = new TrieEmpty(edge.node, edge.value); + continue; + } + const key = edge.key[0]; + + if (keys.has(key)) { + keys.get(key)!.push(edge); + } else { + keys.set(key, [ edge ]); + } + } + + const children: ITrieSingleChild[] = []; + keys.forEach((subEdges, key) => { + const sliced = this.slice(subEdges, 1); + const subpath = path.concat(Buffer.from([ key ])); + + const noAdvance = subEdges.some((edge) => edge.noAdvance); + const allSame = subEdges.every((edge) => edge.noAdvance === noAdvance); + + assert(allSame || subEdges.length === 0, + 'Conflicting `.peek()` and `.match()` entries in ' + + `"${this.name}" at [ ${subpath.join(', ')} ]`); + + children.push({ + key, + noAdvance, + node: this.level(sliced, subpath), + }); + }); + return new TrieSingle(children, otherwise); + } +} diff --git a/llparse-frontend/src/trie/node.ts b/llparse-frontend/src/trie/node.ts new file mode 100644 index 0000000..31f327c --- /dev/null +++ b/llparse-frontend/src/trie/node.ts @@ -0,0 +1,2 @@ +export abstract class TrieNode { +} diff --git a/llparse-frontend/src/trie/sequence.ts b/llparse-frontend/src/trie/sequence.ts new file mode 100644 index 0000000..6b17e02 --- /dev/null +++ b/llparse-frontend/src/trie/sequence.ts @@ -0,0 +1,9 @@ +import { node as api } from 'llparse-builder'; +import { TrieNode } from './node'; + +export class TrieSequence extends TrieNode { + constructor(public readonly select: Buffer, + public readonly child: TrieNode) { + super(); + } +} diff --git a/llparse-frontend/src/trie/single.ts b/llparse-frontend/src/trie/single.ts new file mode 100644 index 0000000..c984af0 --- /dev/null +++ b/llparse-frontend/src/trie/single.ts @@ -0,0 +1,16 @@ +import { node as api } from 'llparse-builder'; +import { TrieEmpty } from './empty'; +import { TrieNode } from './node'; + +export interface ITrieSingleChild { + readonly key: number; + readonly noAdvance: boolean; + readonly node: TrieNode; +} + +export class TrieSingle extends TrieNode { + constructor(public readonly children: ReadonlyArray, + public readonly otherwise: TrieEmpty | undefined) { + super(); + } +} diff --git a/llparse-frontend/src/utils/identifier.ts b/llparse-frontend/src/utils/identifier.ts new file mode 100644 index 0000000..c9ba6ad --- /dev/null +++ b/llparse-frontend/src/utils/identifier.ts @@ -0,0 +1,32 @@ +export interface IUniqueName { + readonly name: string; + readonly originalName: string; +} + +export class Identifier { + private readonly ns: Set = new Set(); + + constructor(private readonly prefix: string = '', + private readonly postfix: string = '') { + } + + public id(name: string): IUniqueName { + let target = this.prefix + name + this.postfix; + if (this.ns.has(target)) { + let i = 1; + for (; i < this.ns.size; i++) { + if (!this.ns.has(target + '_' + i)) { + break; + } + } + + target += '_' + i; + } + + this.ns.add(target); + return { + name: target, + originalName: name, + }; + } +} diff --git a/llparse-frontend/src/utils/index.ts b/llparse-frontend/src/utils/index.ts new file mode 100644 index 0000000..06e86f1 --- /dev/null +++ b/llparse-frontend/src/utils/index.ts @@ -0,0 +1,19 @@ +export { Identifier, IUniqueName } from './identifier'; + +export function toCacheKey(value: number | boolean): string { + if (typeof value === 'number') { + if (value < 0) { + return 'm' + (-value); + } else { + return value.toString(); + } + } else if (typeof value === 'boolean') { + if (value === true) { + return 'true'; + } else { + return 'false'; + } + } else { + throw new Error(`Unsupported value: "${value}"`); + } +} diff --git a/llparse-frontend/src/wrap.ts b/llparse-frontend/src/wrap.ts new file mode 100644 index 0000000..013adb3 --- /dev/null +++ b/llparse-frontend/src/wrap.ts @@ -0,0 +1,3 @@ +export interface IWrap { + readonly ref: T; +} diff --git a/llparse-frontend/test/container-test.ts b/llparse-frontend/test/container-test.ts new file mode 100644 index 0000000..28b7f1b --- /dev/null +++ b/llparse-frontend/test/container-test.ts @@ -0,0 +1,46 @@ +import * as assert from 'assert'; + +import { Builder } from 'llparse-builder'; + +import { Container, ContainerWrap, Frontend, node } from '../src/frontend'; +import implementation from './fixtures/a-implementation'; +import { Node } from './fixtures/implementation/node/base'; + +describe('llparse-frontend/Container', () => { + let b: Builder; + beforeEach(() => { + b = new Builder(); + }); + + it('should translate nodes to implementation', () => { + const comb = new Container(); + comb.add('a', implementation); + comb.add('b', implementation); + + const f = new Frontend('llparse', comb.build()); + + const root = b.node('root'); + + root.match('ab', root); + root.match('acd', root); + root.match('efg', root); + root.otherwise(b.error(123, 'hello')); + + const fRoot = f.compile(root, []).root as ContainerWrap; + + const out: string[] = []; + (fRoot.get('a') as Node).build(out); + + assert.deepStrictEqual(out, [ + '', + '', + '', + '', + '', + ]); + }); +}); diff --git a/llparse-frontend/test/fixtures/a-implementation/code/and.ts b/llparse-frontend/test/fixtures/a-implementation/code/and.ts new file mode 100644 index 0000000..c1df821 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/and.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class And extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/code/base.ts b/llparse-frontend/test/fixtures/a-implementation/code/base.ts new file mode 100644 index 0000000..d9a7ace --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/base.ts @@ -0,0 +1,6 @@ +export abstract class Code { + constructor(public readonly ref: T) { + } + + public abstract build(): string; +} diff --git a/llparse-frontend/test/fixtures/a-implementation/code/index.ts b/llparse-frontend/test/fixtures/a-implementation/code/index.ts new file mode 100644 index 0000000..855a5cf --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/index.ts @@ -0,0 +1,15 @@ +import { And } from './and'; +import { IsEqual } from './is-equal'; +import { Load } from './load'; +import { Match } from './match'; +import { MulAdd } from './mul-add'; +import { Or } from './or'; +import { Span } from './span'; +import { Store } from './store'; +import { Test } from './test'; +import { Update } from './update'; +import { Value } from './value'; + +export default { + And, IsEqual, Load, Match, MulAdd, Or, Span, Store, Test, Update, Value, +}; diff --git a/llparse-frontend/test/fixtures/a-implementation/code/is-equal.ts b/llparse-frontend/test/fixtures/a-implementation/code/is-equal.ts new file mode 100644 index 0000000..13a1737 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/is-equal.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class IsEqual extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/code/load.ts b/llparse-frontend/test/fixtures/a-implementation/code/load.ts new file mode 100644 index 0000000..bc97f27 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/load.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Load extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/code/match.ts b/llparse-frontend/test/fixtures/a-implementation/code/match.ts new file mode 100644 index 0000000..e933a71 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/match.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Match extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/code/mul-add.ts b/llparse-frontend/test/fixtures/a-implementation/code/mul-add.ts new file mode 100644 index 0000000..e06a217 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/mul-add.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class MulAdd extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/code/or.ts b/llparse-frontend/test/fixtures/a-implementation/code/or.ts new file mode 100644 index 0000000..a569db4 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/or.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Or extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/code/span.ts b/llparse-frontend/test/fixtures/a-implementation/code/span.ts new file mode 100644 index 0000000..46fc410 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/span.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Span extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/code/store.ts b/llparse-frontend/test/fixtures/a-implementation/code/store.ts new file mode 100644 index 0000000..7a1ca9f --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/store.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Store extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/code/test.ts b/llparse-frontend/test/fixtures/a-implementation/code/test.ts new file mode 100644 index 0000000..4fc8ddb --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/test.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Test extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/code/update.ts b/llparse-frontend/test/fixtures/a-implementation/code/update.ts new file mode 100644 index 0000000..16b20e2 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/update.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Update extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/code/value.ts b/llparse-frontend/test/fixtures/a-implementation/code/value.ts new file mode 100644 index 0000000..8e76e2a --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/code/value.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Value extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/index.ts b/llparse-frontend/test/fixtures/a-implementation/index.ts new file mode 100644 index 0000000..1d8d29a --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/index.ts @@ -0,0 +1,5 @@ +import code from './code'; +import node from './node'; +import transform from './transform'; + +export default { code, node, transform }; diff --git a/llparse-frontend/test/fixtures/a-implementation/node/base.ts b/llparse-frontend/test/fixtures/a-implementation/node/base.ts new file mode 100644 index 0000000..04c8285 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/base.ts @@ -0,0 +1,38 @@ +import { ContainerWrap, node } from '../../../../src/frontend'; + +export abstract class Node { + private built: boolean = false; + + constructor(public readonly ref: T) { + } + + public build(out: string[]): void { + if (this.built) { + return; + } + + this.built = true; + this.doBuild(out); + + if (this.ref.otherwise !== undefined) { + const cwrap = this.ref.otherwise.node as ContainerWrap; + const otherwise = cwrap.get>('a'); + otherwise.build(out); + } + } + + protected format(value: string): string { + let otherwise: string = ''; + if (this.ref.otherwise !== undefined) { + const otherwiseRef = this.ref.otherwise.node.ref; + otherwise = ' otherwise' + + `${this.ref.otherwise.noAdvance ? '-no_adv' : ''}=` + + `${otherwiseRef.id.name}`; + } + + return `<${this.constructor.name} name=${this.ref.id.name} ` + + `${value}${otherwise}/>`; + } + + protected abstract doBuild(out: string[]): void; +} diff --git a/llparse-frontend/test/fixtures/a-implementation/node/consume.ts b/llparse-frontend/test/fixtures/a-implementation/node/consume.ts new file mode 100644 index 0000000..cdc6cef --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/consume.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Consume extends Node { + protected doBuild(out: string[]): void { + out.push(this.format(`field=${this.ref.field}`)); + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/node/empty.ts b/llparse-frontend/test/fixtures/a-implementation/node/empty.ts new file mode 100644 index 0000000..ef1499b --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/empty.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Empty extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/node/error.ts b/llparse-frontend/test/fixtures/a-implementation/node/error.ts new file mode 100644 index 0000000..1a4f31d --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/error.ts @@ -0,0 +1,10 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +class ErrorNode extends Node { + protected doBuild(out: string[]): void { + out.push(this.format(`code=${this.ref.code} reason="${this.ref.reason}"`)); + } +} + +export { ErrorNode as Error }; diff --git a/llparse-frontend/test/fixtures/a-implementation/node/index.ts b/llparse-frontend/test/fixtures/a-implementation/node/index.ts new file mode 100644 index 0000000..31dbc5e --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/index.ts @@ -0,0 +1,15 @@ +import { Consume } from './consume'; +import { Empty } from './empty'; +import { Error } from './error'; +import { Invoke } from './invoke'; +import { Pause } from './pause'; +import { Sequence } from './sequence'; +import { Single } from './single'; +import { SpanEnd } from './span-end'; +import { SpanStart } from './span-start'; +import { TableLookup } from './table-lookup'; + +export default { + Consume, Empty, Error, Invoke, Pause, Sequence, Single, SpanEnd, + SpanStart, TableLookup, +}; diff --git a/llparse-frontend/test/fixtures/a-implementation/node/invoke.ts b/llparse-frontend/test/fixtures/a-implementation/node/invoke.ts new file mode 100644 index 0000000..674be5f --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/invoke.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Invoke extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/node/pause.ts b/llparse-frontend/test/fixtures/a-implementation/node/pause.ts new file mode 100644 index 0000000..94da63c --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/pause.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Pause extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/node/sequence.ts b/llparse-frontend/test/fixtures/a-implementation/node/sequence.ts new file mode 100644 index 0000000..13fd336 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/sequence.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Sequence extends Node { + protected doBuild(out: string[]): void { + out.push(this.format(`select="${this.ref.select.toString('hex')}"`)); + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/node/single.ts b/llparse-frontend/test/fixtures/a-implementation/node/single.ts new file mode 100644 index 0000000..d7bcc72 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/single.ts @@ -0,0 +1,18 @@ +import { ContainerWrap, node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Single extends Node { + protected doBuild(out: string[]): void { + const edges: string[] = []; + for (const edge of this.ref.edges) { + edges.push(`k${edge.key}${edge.noAdvance ? '-no_adv-' : ''}=` + + `${edge.node.ref.id.name}`); + } + out.push(this.format(edges.join(' '))); + + for (const edge of this.ref.edges) { + const edgeNode = edge.node as ContainerWrap; + edgeNode.get>('a').build(out); + } + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/node/span-end.ts b/llparse-frontend/test/fixtures/a-implementation/node/span-end.ts new file mode 100644 index 0000000..dc79b81 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/span-end.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class SpanEnd extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/node/span-start.ts b/llparse-frontend/test/fixtures/a-implementation/node/span-start.ts new file mode 100644 index 0000000..32e373c --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/span-start.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class SpanStart extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/node/table-lookup.ts b/llparse-frontend/test/fixtures/a-implementation/node/table-lookup.ts new file mode 100644 index 0000000..e6166d0 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/node/table-lookup.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class TableLookup extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/transform/base.ts b/llparse-frontend/test/fixtures/a-implementation/transform/base.ts new file mode 100644 index 0000000..96dc27d --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/transform/base.ts @@ -0,0 +1,6 @@ +export abstract class Transform { + constructor(public readonly ref: T) { + } + + public abstract build(): string; +} diff --git a/llparse-frontend/test/fixtures/a-implementation/transform/id.ts b/llparse-frontend/test/fixtures/a-implementation/transform/id.ts new file mode 100644 index 0000000..e6c1adc --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/transform/id.ts @@ -0,0 +1,8 @@ +import { transform } from '../../../../src/frontend'; +import { Transform } from './base'; + +export class ID extends Transform { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/transform/index.ts b/llparse-frontend/test/fixtures/a-implementation/transform/index.ts new file mode 100644 index 0000000..bed8bc9 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/transform/index.ts @@ -0,0 +1,5 @@ +import { ID } from './id'; +import { ToLower } from './to-lower'; +import { ToLowerUnsafe } from './to-lower-unsafe'; + +export default { ID, ToLower, ToLowerUnsafe }; diff --git a/llparse-frontend/test/fixtures/a-implementation/transform/to-lower-unsafe.ts b/llparse-frontend/test/fixtures/a-implementation/transform/to-lower-unsafe.ts new file mode 100644 index 0000000..9d175a9 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/transform/to-lower-unsafe.ts @@ -0,0 +1,8 @@ +import { transform } from '../../../../src/frontend'; +import { Transform } from './base'; + +export class ToLowerUnsafe extends Transform { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/a-implementation/transform/to-lower.ts b/llparse-frontend/test/fixtures/a-implementation/transform/to-lower.ts new file mode 100644 index 0000000..cbe6456 --- /dev/null +++ b/llparse-frontend/test/fixtures/a-implementation/transform/to-lower.ts @@ -0,0 +1,8 @@ +import { transform } from '../../../../src/frontend'; +import { Transform } from './base'; + +export class ToLower extends Transform { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/code/and.ts b/llparse-frontend/test/fixtures/implementation/code/and.ts new file mode 100644 index 0000000..c1df821 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/and.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class And extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/code/base.ts b/llparse-frontend/test/fixtures/implementation/code/base.ts new file mode 100644 index 0000000..d9a7ace --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/base.ts @@ -0,0 +1,6 @@ +export abstract class Code { + constructor(public readonly ref: T) { + } + + public abstract build(): string; +} diff --git a/llparse-frontend/test/fixtures/implementation/code/index.ts b/llparse-frontend/test/fixtures/implementation/code/index.ts new file mode 100644 index 0000000..855a5cf --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/index.ts @@ -0,0 +1,15 @@ +import { And } from './and'; +import { IsEqual } from './is-equal'; +import { Load } from './load'; +import { Match } from './match'; +import { MulAdd } from './mul-add'; +import { Or } from './or'; +import { Span } from './span'; +import { Store } from './store'; +import { Test } from './test'; +import { Update } from './update'; +import { Value } from './value'; + +export default { + And, IsEqual, Load, Match, MulAdd, Or, Span, Store, Test, Update, Value, +}; diff --git a/llparse-frontend/test/fixtures/implementation/code/is-equal.ts b/llparse-frontend/test/fixtures/implementation/code/is-equal.ts new file mode 100644 index 0000000..13a1737 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/is-equal.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class IsEqual extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/code/load.ts b/llparse-frontend/test/fixtures/implementation/code/load.ts new file mode 100644 index 0000000..bc97f27 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/load.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Load extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/code/match.ts b/llparse-frontend/test/fixtures/implementation/code/match.ts new file mode 100644 index 0000000..e933a71 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/match.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Match extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/code/mul-add.ts b/llparse-frontend/test/fixtures/implementation/code/mul-add.ts new file mode 100644 index 0000000..e06a217 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/mul-add.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class MulAdd extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/code/or.ts b/llparse-frontend/test/fixtures/implementation/code/or.ts new file mode 100644 index 0000000..a569db4 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/or.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Or extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/code/span.ts b/llparse-frontend/test/fixtures/implementation/code/span.ts new file mode 100644 index 0000000..46fc410 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/span.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Span extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/code/store.ts b/llparse-frontend/test/fixtures/implementation/code/store.ts new file mode 100644 index 0000000..7a1ca9f --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/store.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Store extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/code/test.ts b/llparse-frontend/test/fixtures/implementation/code/test.ts new file mode 100644 index 0000000..4fc8ddb --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/test.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Test extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/code/update.ts b/llparse-frontend/test/fixtures/implementation/code/update.ts new file mode 100644 index 0000000..16b20e2 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/update.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Update extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/code/value.ts b/llparse-frontend/test/fixtures/implementation/code/value.ts new file mode 100644 index 0000000..8e76e2a --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/code/value.ts @@ -0,0 +1,8 @@ +import { code } from '../../../../src/frontend'; +import { Code } from './base'; + +export class Value extends Code { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/index.ts b/llparse-frontend/test/fixtures/implementation/index.ts new file mode 100644 index 0000000..1d8d29a --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/index.ts @@ -0,0 +1,5 @@ +import code from './code'; +import node from './node'; +import transform from './transform'; + +export default { code, node, transform }; diff --git a/llparse-frontend/test/fixtures/implementation/node/base.ts b/llparse-frontend/test/fixtures/implementation/node/base.ts new file mode 100644 index 0000000..c9fd589 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/base.ts @@ -0,0 +1,39 @@ +import { node } from '../../../../src/frontend'; + +export abstract class Node { + private built: boolean = false; + + constructor(public readonly ref: T) { + } + + public build(out: string[]): void { + if (this.built) { + return; + } + + this.built = true; + this.doBuild(out); + + if (this.ref.otherwise !== undefined) { + (this.ref.otherwise.node as Node).build(out); + } + } + + protected format(value: string): string { + let otherwise: string = ''; + if (this.ref.otherwise !== undefined) { + const otherwiseRef = this.ref.otherwise.node.ref; + otherwise = ' otherwise' + + `${this.ref.otherwise.noAdvance ? '-no_adv' : ''}=` + + `${otherwiseRef.id.name}`; + if (this.ref.otherwise.value !== undefined) { + otherwise += `:${this.ref.otherwise.value}`; + } + } + + return `<${this.constructor.name} name=${this.ref.id.name} ` + + `${value}${otherwise}/>`; + } + + protected abstract doBuild(out: string[]): void; +} diff --git a/llparse-frontend/test/fixtures/implementation/node/consume.ts b/llparse-frontend/test/fixtures/implementation/node/consume.ts new file mode 100644 index 0000000..cdc6cef --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/consume.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Consume extends Node { + protected doBuild(out: string[]): void { + out.push(this.format(`field=${this.ref.field}`)); + } +} diff --git a/llparse-frontend/test/fixtures/implementation/node/empty.ts b/llparse-frontend/test/fixtures/implementation/node/empty.ts new file mode 100644 index 0000000..ef1499b --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/empty.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Empty extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/implementation/node/error.ts b/llparse-frontend/test/fixtures/implementation/node/error.ts new file mode 100644 index 0000000..1a4f31d --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/error.ts @@ -0,0 +1,10 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +class ErrorNode extends Node { + protected doBuild(out: string[]): void { + out.push(this.format(`code=${this.ref.code} reason="${this.ref.reason}"`)); + } +} + +export { ErrorNode as Error }; diff --git a/llparse-frontend/test/fixtures/implementation/node/index.ts b/llparse-frontend/test/fixtures/implementation/node/index.ts new file mode 100644 index 0000000..31dbc5e --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/index.ts @@ -0,0 +1,15 @@ +import { Consume } from './consume'; +import { Empty } from './empty'; +import { Error } from './error'; +import { Invoke } from './invoke'; +import { Pause } from './pause'; +import { Sequence } from './sequence'; +import { Single } from './single'; +import { SpanEnd } from './span-end'; +import { SpanStart } from './span-start'; +import { TableLookup } from './table-lookup'; + +export default { + Consume, Empty, Error, Invoke, Pause, Sequence, Single, SpanEnd, + SpanStart, TableLookup, +}; diff --git a/llparse-frontend/test/fixtures/implementation/node/invoke.ts b/llparse-frontend/test/fixtures/implementation/node/invoke.ts new file mode 100644 index 0000000..674be5f --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/invoke.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Invoke extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/implementation/node/pause.ts b/llparse-frontend/test/fixtures/implementation/node/pause.ts new file mode 100644 index 0000000..94da63c --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/pause.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Pause extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/implementation/node/sequence.ts b/llparse-frontend/test/fixtures/implementation/node/sequence.ts new file mode 100644 index 0000000..bb745f5 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/sequence.ts @@ -0,0 +1,15 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Sequence extends Node { + protected doBuild(out: string[]): void { + let str = `select="${this.ref.select.toString('hex')}" ` + + `edge="${this.ref.edge!.node.ref.id.name}"`; + if (this.ref.edge!.value !== undefined) { + str += `:${this.ref.edge!.value}`; + } + out.push(this.format(str)); + const edgeNode = this.ref.edge!.node as Node; + edgeNode.build(out); + } +} diff --git a/llparse-frontend/test/fixtures/implementation/node/single.ts b/llparse-frontend/test/fixtures/implementation/node/single.ts new file mode 100644 index 0000000..b24ef93 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/single.ts @@ -0,0 +1,22 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class Single extends Node { + protected doBuild(out: string[]): void { + const edges: string[] = []; + for (const edge of this.ref.edges) { + let str = `k${edge.key}${edge.noAdvance ? '-no_adv-' : ''}=` + + `${edge.node.ref.id.name}`; + if (edge.value !== undefined) { + str += `:${edge.value}`; + } + edges.push(str); + } + out.push(this.format(edges.join(' '))); + + for (const edge of this.ref.edges) { + const edgeNode = edge.node as Node; + edgeNode.build(out); + } + } +} diff --git a/llparse-frontend/test/fixtures/implementation/node/span-end.ts b/llparse-frontend/test/fixtures/implementation/node/span-end.ts new file mode 100644 index 0000000..dc79b81 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/span-end.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class SpanEnd extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/implementation/node/span-start.ts b/llparse-frontend/test/fixtures/implementation/node/span-start.ts new file mode 100644 index 0000000..32e373c --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/span-start.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class SpanStart extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/implementation/node/table-lookup.ts b/llparse-frontend/test/fixtures/implementation/node/table-lookup.ts new file mode 100644 index 0000000..e6166d0 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/node/table-lookup.ts @@ -0,0 +1,8 @@ +import { node } from '../../../../src/frontend'; +import { Node } from './base'; + +export class TableLookup extends Node { + protected doBuild(out: string[]): void { + out.push(this.format('')); + } +} diff --git a/llparse-frontend/test/fixtures/implementation/transform/base.ts b/llparse-frontend/test/fixtures/implementation/transform/base.ts new file mode 100644 index 0000000..96dc27d --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/transform/base.ts @@ -0,0 +1,6 @@ +export abstract class Transform { + constructor(public readonly ref: T) { + } + + public abstract build(): string; +} diff --git a/llparse-frontend/test/fixtures/implementation/transform/id.ts b/llparse-frontend/test/fixtures/implementation/transform/id.ts new file mode 100644 index 0000000..e6c1adc --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/transform/id.ts @@ -0,0 +1,8 @@ +import { transform } from '../../../../src/frontend'; +import { Transform } from './base'; + +export class ID extends Transform { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/transform/index.ts b/llparse-frontend/test/fixtures/implementation/transform/index.ts new file mode 100644 index 0000000..bed8bc9 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/transform/index.ts @@ -0,0 +1,5 @@ +import { ID } from './id'; +import { ToLower } from './to-lower'; +import { ToLowerUnsafe } from './to-lower-unsafe'; + +export default { ID, ToLower, ToLowerUnsafe }; diff --git a/llparse-frontend/test/fixtures/implementation/transform/to-lower-unsafe.ts b/llparse-frontend/test/fixtures/implementation/transform/to-lower-unsafe.ts new file mode 100644 index 0000000..9d175a9 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/transform/to-lower-unsafe.ts @@ -0,0 +1,8 @@ +import { transform } from '../../../../src/frontend'; +import { Transform } from './base'; + +export class ToLowerUnsafe extends Transform { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/fixtures/implementation/transform/to-lower.ts b/llparse-frontend/test/fixtures/implementation/transform/to-lower.ts new file mode 100644 index 0000000..cbe6456 --- /dev/null +++ b/llparse-frontend/test/fixtures/implementation/transform/to-lower.ts @@ -0,0 +1,8 @@ +import { transform } from '../../../../src/frontend'; +import { Transform } from './base'; + +export class ToLower extends Transform { + public build(): string { + return ''; + } +} diff --git a/llparse-frontend/test/frontend-test.ts b/llparse-frontend/test/frontend-test.ts new file mode 100644 index 0000000..69e075c --- /dev/null +++ b/llparse-frontend/test/frontend-test.ts @@ -0,0 +1,187 @@ +import * as assert from 'assert'; + +import * as source from 'llparse-builder'; + +import { Frontend, node } from '../src/frontend'; +import implementation from './fixtures/implementation'; +import { Node } from './fixtures/implementation/node/base'; + +function checkNodes(f: Frontend, root: source.node.Node, + expected: ReadonlyArray) { + const fRoot = f.compile(root, []).root as Node; + + const out: string[] = []; + fRoot.build(out); + + assert.deepStrictEqual(out, expected); + + return fRoot; +} + +function checkResumptionTargets(f: Frontend, expected: ReadonlyArray) { + const targets = Array.from(f.getResumptionTargets()).map((t) => { + return t.ref.id.name; + }); + + assert.deepStrictEqual(targets, expected); +} + +describe('llparse-frontend', () => { + let b: source.Builder; + let f: Frontend; + beforeEach(() => { + b = new source.Builder(); + f = new Frontend('llparse', implementation); + }); + + it('should translate nodes to implementation', () => { + const root = b.node('root'); + + root.match('ab', root); + root.match('acd', root); + root.match('efg', root); + root.otherwise(b.error(123, 'hello')); + + checkNodes(f, root, [ + '', + '', + '', + '', + '', + ]); + + checkResumptionTargets(f, [ + 'llparse__n_root', + 'llparse__n_root_1', + 'llparse__n_root_3', + 'llparse__n_root_2', + ]); + }); + + it('should do peephole optimization', () => { + const root = b.node('root'); + const root1 = b.node('a'); + const root2 = b.node('b'); + const node1 = b.node('c'); + const node2 = b.node('d'); + + root.otherwise(root1); + root1.otherwise(root2); + root2.skipTo(node1); + node1.otherwise(node2); + node2.otherwise(root); + + checkNodes(f, root, [ + '', + ]); + + checkResumptionTargets(f, [ + 'llparse__n_b', + ]); + }); + + it('should generate proper resumption targets', () => { + b.property('i64', 'counter'); + + const root = b.node('root'); + const end = b.node('end'); + const store = b.invoke(b.code.store('counter')); + + root.select({ a: 1, b: 2 }, store); + root.otherwise(b.error(1, 'okay')); + + store.otherwise(end); + + end.match('ohai', root); + end.match('paus', b.pause(1, 'paused').otherwise( + b.pause(2, 'paused').otherwise(root))); + end.otherwise(b.error(2, 'ohai')); + + checkNodes(f, root, [ + '', + '', + '', + '', + '', + '', + '', + '', + '', + ]); + + checkResumptionTargets(f, [ + 'llparse__n_root', + 'llparse__n_end', + 'llparse__n_end_1', + 'llparse__n_end_2', + 'llparse__n_pause_1', + ]); + }); + + it('should translate Span code into Span', () => { + const root = b.invoke(b.code.span('my_span')); + root.otherwise(b.error(1, 'okay')); + + const fRoot = checkNodes(f, root, [ + '', + '', + ]); + + assert((fRoot.ref as any).code instanceof implementation.code.Span); + }); + + it('should translate overlapping matches', () => { + const root = b.node('root'); + + root.match('ab', root); + root.match('abc', root); + root.otherwise(b.error(123, 'hello')); + + checkNodes(f, root, [ + '', + '', + '', + ]); + + checkResumptionTargets(f, [ + 'llparse__n_root', + 'llparse__n_root_1', + ]); + }); + + it('should translate overlapping matches with values', () => { + const root = b.node('root'); + const store = b.invoke(b.code.store('counter')); + + root.select({ + ab: 1, + abc: 2, + }, store); + store.otherwise(root); + root.otherwise(b.error(123, 'hello')); + + checkNodes(f, root, [ + '', + '', + '', + '', + ]); + + checkResumptionTargets(f, [ + 'llparse__n_root', + 'llparse__n_root_1', + ]); + }); +}); diff --git a/llparse-frontend/tsconfig.json b/llparse-frontend/tsconfig.json new file mode 100644 index 0000000..01ec7c2 --- /dev/null +++ b/llparse-frontend/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "strict": true, + "target": "es2017", + "module": "commonjs", + "moduleResolution": "node", + "outDir": "./lib", + "declaration": true, + "pretty": true, + "sourceMap": true + }, + "include": [ + "src/**/*.ts" + ] +} diff --git a/llparse-frontend/tslint.json b/llparse-frontend/tslint.json new file mode 100644 index 0000000..24fec09 --- /dev/null +++ b/llparse-frontend/tslint.json @@ -0,0 +1,16 @@ +{ + "defaultSeverity": "error", + "extends": [ + "tslint:recommended" + ], + "jsRules": {}, + "rules": { + "no-bitwise": null, + "max-line-length": [true, 80], + "max-classes-per-file": [true, 1, "exclude-class-expressions"], + "quotemark": [ + true, "single", "avoid-escape", "avoid-template" + ] + }, + "rulesDirectory": [] +} diff --git a/llparse/.gitignore b/llparse/.gitignore new file mode 100644 index 0000000..88b2771 --- /dev/null +++ b/llparse/.gitignore @@ -0,0 +1,4 @@ +node_modules/ +npm-debug.log +lib/ +test/tmp/ diff --git a/llparse/.travis.yml b/llparse/.travis.yml new file mode 100644 index 0000000..b381e1b --- /dev/null +++ b/llparse/.travis.yml @@ -0,0 +1,6 @@ +sudo: false +language: node_js +node_js: + - "stable" +script: + CFLAGS="-O0" npm test diff --git a/llparse/CNAME b/llparse/CNAME new file mode 100644 index 0000000..e39566e --- /dev/null +++ b/llparse/CNAME @@ -0,0 +1 @@ +llparse.org \ No newline at end of file diff --git a/llparse/CODE_OF_CONDUCT.md b/llparse/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..4c21140 --- /dev/null +++ b/llparse/CODE_OF_CONDUCT.md @@ -0,0 +1,4 @@ +# Code of Conduct + +* [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md) +* [Node.js Moderation Policy](https://github.com/nodejs/admin/blob/master/Moderation-Policy.md) diff --git a/llparse/LICENSE-MIT b/llparse/LICENSE-MIT new file mode 100644 index 0000000..6c1512d --- /dev/null +++ b/llparse/LICENSE-MIT @@ -0,0 +1,22 @@ +This software is licensed under the MIT License. + +Copyright Fedor Indutny, 2018. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/llparse/README.md b/llparse/README.md new file mode 100644 index 0000000..afbe4aa --- /dev/null +++ b/llparse/README.md @@ -0,0 +1,86 @@ +# llparse +[![Build Status](https://secure.travis-ci.org/nodejs/llparse.svg)](http://travis-ci.org/nodejs/llparse) +[![NPM version](https://badge.fury.io/js/llparse.svg)](https://badge.fury.io/js/llparse) + +An API for compiling an incremental parser into a C output. + +## Usage + +```ts +import { LLParse } from 'llparse'; + +const p = new LLParse('http_parser'); + +const method = p.node('method'); +const beforeUrl = p.node('before_url'); +const urlSpan = p.span(p.code.span('on_url')); +const url = p.node('url'); +const http = p.node('http'); + +// Add custom uint8_t property to the state +p.property('i8', 'method'); + +// Store method inside a custom property +const onMethod = p.invoke(p.code.store('method'), beforeUrl); + +// Invoke custom C function +const complete = p.invoke(p.code.match('on_complete'), { + // Restart + 0: method +}, p.error(4, '`on_complete` error')); + +method + .select({ + 'HEAD': 0, 'GET': 1, 'POST': 2, 'PUT': 3, + 'DELETE': 4, 'OPTIONS': 5, 'CONNECT': 6, + 'TRACE': 7, 'PATCH': 8 + }, onMethod) + .otherwise(p.error(5, 'Expected method')); + +beforeUrl + .match(' ', beforeUrl) + .otherwise(urlSpan.start(url)); + +url + .peek(' ', urlSpan.end(http)) + .skipTo(url); + +http + .match(' HTTP/1.1\r\n\r\n', complete) + .otherwise(p.error(6, 'Expected HTTP/1.1 and two newlines')); + +const artifacts = p.build(method); +console.log('----- C -----'); +console.log(artifacts.c); // string +console.log('----- C END -----'); +console.log('----- HEADER -----'); +console.log(artifacts.header); +console.log('----- HEADER END -----'); +``` + +#### LICENSE + +This software is licensed under the MIT License. + +Copyright Fedor Indutny, 2020. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. + +[3]: https://llvm.org/docs/LangRef.html diff --git a/llparse/_config.yml b/llparse/_config.yml new file mode 100644 index 0000000..1885487 --- /dev/null +++ b/llparse/_config.yml @@ -0,0 +1 @@ +theme: jekyll-theme-midnight \ No newline at end of file diff --git a/llparse/examples/http/.gitignore b/llparse/examples/http/.gitignore new file mode 100644 index 0000000..fcfe02e --- /dev/null +++ b/llparse/examples/http/.gitignore @@ -0,0 +1,6 @@ +http +*.c +*.ll +*.h +*.o +*.dSYM diff --git a/llparse/examples/http/Makefile b/llparse/examples/http/Makefile new file mode 100644 index 0000000..323d2e3 --- /dev/null +++ b/llparse/examples/http/Makefile @@ -0,0 +1,11 @@ +CC ?= clang + +all: http + +http: main.c http_parser.bc + $(CC) -g3 -flto -Os -fvisibility=hidden -Wall -I. http_parser.c main.c -o $@ + +http_parser.bc: index.ts + npx ts-node $< + +.PHONY = all diff --git a/llparse/examples/http/index.ts b/llparse/examples/http/index.ts new file mode 100644 index 0000000..dc7f28a --- /dev/null +++ b/llparse/examples/http/index.ts @@ -0,0 +1,51 @@ +import { LLParse } from '../../src/api'; + +const p = new LLParse('http_parser'); + +const method = p.node('method'); +const beforeUrl = p.node('before_url'); +const urlSpan = p.span(p.code.span('on_url')); +const url = p.node('url'); +const http = p.node('http'); + +// Add custom uint8_t property to the state +p.property('i8', 'method'); + +// Store method inside a custom property +const onMethod = p.invoke(p.code.store('method'), beforeUrl); + +// Invoke custom C function +const complete = p.invoke(p.code.match('on_complete'), { + // Restart + 0: method +}, p.error(4, '`on_complete` error')); + +method + .select({ + 'HEAD': 0, 'GET': 1, 'POST': 2, 'PUT': 3, + 'DELETE': 4, 'OPTIONS': 5, 'CONNECT': 6, + 'TRACE': 7, 'PATCH': 8 + }, onMethod) + .otherwise(p.error(5, 'Expected method')); + +beforeUrl + .match(' ', beforeUrl) + .otherwise(urlSpan.start(url)); + +url + .peek(' ', urlSpan.end(http)) + .skipTo(url); + +http + .match(' HTTP/1.1\r\n\r\n', complete) + .match(' HTTP/1.1\n\n', complete) + .otherwise(p.error(6, 'Expected HTTP/1.1 and two newlines')); + +// Build + +const fs = require('fs'); +const path = require('path'); + +const artifacts = p.build(method); +fs.writeFileSync(path.join(__dirname, 'http_parser.h'), artifacts.header); +fs.writeFileSync(path.join(__dirname, 'http_parser.c'), artifacts.c); diff --git a/llparse/examples/http/main.c b/llparse/examples/http/main.c new file mode 100644 index 0000000..4721a19 --- /dev/null +++ b/llparse/examples/http/main.c @@ -0,0 +1,48 @@ +#include +#include +#include +#include + +#include "http_parser.h" + +int on_url(http_parser_t* s, const char* p, const char* endp) { + if (p == endp) + return 0; + + fprintf(stdout, "method=%d url_part=\"%.*s\"\n", s->method, + (int) (endp - p), p); + return 0; +} + + +int on_complete(http_parser_t* s, const char* p, const char* endp) { + fprintf(stdout, "on_complete\n"); + return 0; +} + + +int main(int argc, char** argv) { + http_parser_t s; + + http_parser_init(&s); + + for (;;) { + char buf[16384]; + const char* input; + const char* endp; + int code; + + input = fgets(buf, sizeof(buf), stdin); + if (input == NULL) + break; + + endp = input + strlen(input); + code = http_parser_execute(&s, input, endp); + if (code != 0) { + fprintf(stderr, "code=%d error=%d reason=%s\n", code, s.error, s.reason); + return -1; + } + } + + return 0; +} diff --git a/llparse/package-lock.json b/llparse/package-lock.json new file mode 100644 index 0000000..e4c2b29 --- /dev/null +++ b/llparse/package-lock.json @@ -0,0 +1,1802 @@ +{ + "name": "llparse", + "version": "7.1.1", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "@babel/code-frame": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.4.tgz", + "integrity": "sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==", + "dev": true, + "requires": { + "@babel/highlight": "^7.10.4" + } + }, + "@babel/helper-validator-identifier": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz", + "integrity": "sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw==", + "dev": true + }, + "@babel/highlight": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.10.4.tgz", + "integrity": "sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.10.4", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@types/color-name": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", + "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", + "dev": true + }, + "@types/debug": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.5.tgz", + "integrity": "sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ==", + "dev": true + }, + "@types/mocha": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-8.0.3.tgz", + "integrity": "sha512-vyxR57nv8NfcU0GZu8EUXZLTbCMupIUwy95LJ6lllN+JRPG25CwMHoB1q5xKh8YKhQnHYRAn4yW2yuHbf/5xgg==", + "dev": true + }, + "@types/node": { + "version": "14.11.8", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.11.8.tgz", + "integrity": "sha512-KPcKqKm5UKDkaYPTuXSx8wEP7vE9GnuaXIZKijwRYcePpZFDVuy2a57LarFKiORbHOuTOOwYzxVxcUzsh2P2Pw==", + "dev": true + }, + "ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true + }, + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, + "anymatch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", + "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "array.prototype.map": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array.prototype.map/-/array.prototype.map-1.0.2.tgz", + "integrity": "sha512-Az3OYxgsa1g7xDYp86l0nnN4bcmuEITGe1rbdEBVkrqkzMgDcbdQ2R7r41pNzti+4NMces3H8gMmuioZUilLgw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "es-array-method-boxes-properly": "^1.0.0", + "is-string": "^1.0.4" + } + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "binary-extensions": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.1.0.tgz", + "integrity": "sha512-1Yj8h9Q+QDF5FzhMs/c9+6UntbD5MkRfRwac8DoEm9ZfUBZ7tZ55YcGVAzEe4bXsdQHEk+s9S5wsOKVdZrw0tQ==", + "dev": true + }, + "binary-search": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/binary-search/-/binary-search-1.3.6.tgz", + "integrity": "sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==" + }, + "bitcode": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/bitcode/-/bitcode-1.2.0.tgz", + "integrity": "sha512-cWgZK/ri/1ZUJ+UKEwP9Cqw10WY5wHz+boMxVO4vvc0btmxa2tMc2m2Zk9HYdCyx4b5+sgQM1/NCJPTIPO1XOw==", + "dev": true, + "requires": { + "bitcode-builder": "^1.2.0" + } + }, + "bitcode-builder": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/bitcode-builder/-/bitcode-builder-1.2.0.tgz", + "integrity": "sha512-biuJIhrog5d1IFMaKtHMJ8PJ1L3zxiWdclwYErjOBWf8Gwyqa4XwflvMufzcQw/OUeAArO1AqOrqsOFsWJ94OA==", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", + "dev": true + }, + "builtin-modules": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", + "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=", + "dev": true + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "dependencies": { + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, + "chokidar": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.4.2.tgz", + "integrity": "sha512-IZHaDeBeI+sZJRX7lGcXsdzgvZqKv6sECqsbErJA4mHWfpRrD8B97kSFN4cQz6nGBGiuFia1MKR4d6c1o8Cv7A==", + "dev": true, + "requires": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "fsevents": "~2.1.2", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.4.0" + } + }, + "cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + } + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + } + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "debug": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", + "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "requires": { + "ms": "2.1.2" + } + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true + }, + "define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "requires": { + "object-keys": "^1.0.12" + } + }, + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "es-abstract": { + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz", + "integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.18.0-next.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", + "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.0", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + }, + "object.assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.1.tgz", + "integrity": "sha512-VT/cxmx5yaoHSOTSyrCygIDFco+RsibY2NM0a4RdEeY/4KgqezwFtK1yr3U67xYhqJSlASm2pKhLVzPj2lr4bA==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.18.0-next.0", + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.18.0-next.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", + "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.0", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + } + } + } + } + }, + "es-array-method-boxes-properly": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz", + "integrity": "sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==", + "dev": true + }, + "es-get-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.0.tgz", + "integrity": "sha512-UfrmHuWQlNMTs35e1ypnvikg6jCz3SK8v8ImvmDsh36fCVUR1MqoFDiyn0/k52C8NqO3YsO8Oe0azeesNuqSsQ==", + "dev": true, + "requires": { + "es-abstract": "^1.17.4", + "has-symbols": "^1.0.1", + "is-arguments": "^1.0.4", + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-string": "^1.0.5", + "isarray": "^2.0.5" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true + }, + "esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "dev": true + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "flat": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/flat/-/flat-4.1.0.tgz", + "integrity": "sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==", + "dev": true, + "requires": { + "is-buffer": "~2.0.3" + } + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fsevents": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.3.tgz", + "integrity": "sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ==", + "dev": true, + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "glob": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", + "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==" + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "is-arguments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.0.4.tgz", + "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==", + "dev": true + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-buffer": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.4.tgz", + "integrity": "sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==", + "dev": true + }, + "is-callable": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.2.tgz", + "integrity": "sha512-dnMqspv5nU3LoewK2N/y7KLtxtakvTuaCsU9FU50/QDmdbHNy/4/JuRtMHqRU22o3q+W89YQndQEeCVwK+3qrA==" + }, + "is-date-object": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==" + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-map": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.1.tgz", + "integrity": "sha512-T/S49scO8plUiAOA2DBTBG3JHpn1yiw0kRp6dgiZ0v2/6twi5eiB0rHtHFH9ZIrvlWc6+4O+m4zg5+Z833aXgw==", + "dev": true + }, + "is-negative-zero": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.0.tgz", + "integrity": "sha1-lVOxIbD6wohp2p7UWeIMdUN4hGE=" + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=", + "dev": true + }, + "is-regex": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.1.tgz", + "integrity": "sha512-1+QkEcxiLlB7VEyFtyBg94e08OAsvq7FUBgApTq/w2ymCLyKJgDPsybBENVtA7XCQEgEXxKPonG+mvYRxh/LIg==", + "requires": { + "has-symbols": "^1.0.1" + } + }, + "is-set": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.1.tgz", + "integrity": "sha512-eJEzOtVyenDs1TMzSQ3kU3K+E0GUS9sno+F0OBT97xsgcJsF9nXMBtkT9/kut5JEpM7oL7X/0qxR17K3mcwIAA==", + "dev": true + }, + "is-string": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "dev": true + }, + "is-symbol": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "requires": { + "has-symbols": "^1.0.1" + } + }, + "isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "iterate-iterator": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/iterate-iterator/-/iterate-iterator-1.0.1.tgz", + "integrity": "sha512-3Q6tudGN05kbkDQDI4CqjaBf4qf85w6W6GnuZDtUVYwKgtC1q8yxYX7CZed7N+tLzQqS6roujWvszf13T+n9aw==", + "dev": true + }, + "iterate-value": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/iterate-value/-/iterate-value-1.0.2.tgz", + "integrity": "sha512-A6fMAio4D2ot2r/TYzr4yUWrmwNdsN5xL7+HUiyACE4DXm+q8HtPcnFTp+NnW3k4N05tZ7FVYFFb2CR13NxyHQ==", + "dev": true, + "requires": { + "es-get-iterator": "^1.0.2", + "iterate-iterator": "^1.0.1" + } + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "llparse": { + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/llparse/-/llparse-6.4.0.tgz", + "integrity": "sha512-ySA+bj2wOLXrKmohAVMw0Nq84oHDPLdg+sUx4+VeSk1U72MEKfKAXS7zh82n15BRjWc/cVgWBN9RQAFdgk0g5Q==", + "dev": true, + "requires": { + "bitcode": "^1.2.0", + "debug": "^3.2.6", + "llparse-frontend": "^1.4.0" + }, + "dependencies": { + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "llparse-frontend": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/llparse-frontend/-/llparse-frontend-1.4.0.tgz", + "integrity": "sha512-lUpGvGU9MDPb3k4Wbb0S7FgpceCirXVeFQQZjsYWB3fIEGU0Q6IEiTO91J6MLLN75gsxvGiWZaKVnmcHb7jh6g==", + "dev": true, + "requires": { + "debug": "^3.2.6", + "llparse-builder": "^1.3.2" + } + } + } + }, + "llparse-builder": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/llparse-builder/-/llparse-builder-1.4.0.tgz", + "integrity": "sha512-mu0/zgAc1KdD6r+tjmRvF+YgoToQvBun4iXISRfSmx66b5qurckRpYjzBUYpHn0XVqKPRrGg86gMQKv8ogY3Rw==", + "dev": true, + "requires": { + "@types/debug": "0.0.30", + "binary-search": "^1.3.6", + "debug": "^3.2.6" + }, + "dependencies": { + "@types/debug": { + "version": "0.0.30", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-0.0.30.tgz", + "integrity": "sha512-orGL5LXERPYsLov6CWs3Fh6203+dXzJkR7OnddIr2514Hsecwc8xRpzCapshBbKFImCsvS/mk6+FWiN5LyZJAQ==", + "dev": true + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "llparse-frontend": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/llparse-frontend/-/llparse-frontend-3.0.0.tgz", + "integrity": "sha512-G/o0Po2C+G5OtP8MJeQDjDf5qwDxcO7K6x4r6jqGsJwxk7yblbJnRqpmye7G/lZ8dD0Hv5neY4/KB5BhDmEc9Q==", + "requires": { + "debug": "^3.2.6", + "llparse-builder": "^1.5.2" + }, + "dependencies": { + "@types/debug": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.5.tgz", + "integrity": "sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ==" + }, + "debug": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "llparse-builder": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/llparse-builder/-/llparse-builder-1.5.2.tgz", + "integrity": "sha512-i862UNC3YUEdlfK/NUCJxlKjtWjgAI9AJXDRgjcfRHfwFt4Sf8eFPTRsc91/2R9MBZ0kyFdfhi8SVhMsZf1gNQ==", + "requires": { + "@types/debug": "4.1.5 ", + "binary-search": "^1.3.6", + "debug": "^4.2.0" + }, + "dependencies": { + "debug": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", + "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "requires": { + "ms": "2.1.2" + } + } + } + } + } + }, + "llparse-test-fixture": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/llparse-test-fixture/-/llparse-test-fixture-5.0.1.tgz", + "integrity": "sha512-BrnS70lxODcTXttLkfoSqn8DPbNuuSLFR48JnwxLimFkr8QRNBVbUku+bumIIo5Z7gAbIGNQXDOiSi2crMzS8Q==", + "dev": true, + "requires": { + "esm": "^3.2.25", + "llparse": "^6.4.0", + "yargs": "^15.4.1" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "log-symbols": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", + "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", + "dev": true, + "requires": { + "chalk": "^4.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", + "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + } + } + }, + "make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "mocha": { + "version": "8.1.3", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-8.1.3.tgz", + "integrity": "sha512-ZbaYib4hT4PpF4bdSO2DohooKXIn4lDeiYqB+vTmCdr6l2woW0b6H3pf5x4sM5nwQMru9RvjjHYWVGltR50ZBw==", + "dev": true, + "requires": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.4.2", + "debug": "4.1.1", + "diff": "4.0.2", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.1.6", + "growl": "1.10.5", + "he": "1.2.0", + "js-yaml": "3.14.0", + "log-symbols": "4.0.0", + "minimatch": "3.0.4", + "ms": "2.1.2", + "object.assign": "4.1.0", + "promise.allsettled": "1.0.2", + "serialize-javascript": "4.0.0", + "strip-json-comments": "3.0.1", + "supports-color": "7.1.0", + "which": "2.0.2", + "wide-align": "1.1.3", + "workerpool": "6.0.0", + "yargs": "13.3.2", + "yargs-parser": "13.1.2", + "yargs-unparser": "1.6.1" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + } + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "js-yaml": { + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.0.tgz", + "integrity": "sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + } + }, + "yargs": { + "version": "13.3.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", + "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^13.1.2" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + } + } + }, + "yargs-parser": { + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + } + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "object-inspect": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.8.0.tgz", + "integrity": "sha512-jLdtEOB112fORuypAyl/50VRVIBIdVQOSUUGQHzJ4xBSbit81zRarz7GThkEFZy1RceYrWYcPcBFPQwHyAc1gA==" + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" + }, + "object.assign": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", + "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "dev": true, + "requires": { + "define-properties": "^1.1.2", + "function-bind": "^1.1.1", + "has-symbols": "^1.0.0", + "object-keys": "^1.0.11" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "p-limit": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", + "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + }, + "dependencies": { + "p-limit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.0.2.tgz", + "integrity": "sha512-iwqZSOoWIW+Ew4kAGUlN16J4M7OB3ysMLSZtnhmqx7njIHFPlxWBX8xo3lVTyFVq6mI/lL9qt2IsN1sHwaxJkg==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + } + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "path-parse": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "dev": true + }, + "picomatch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", + "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "dev": true + }, + "promise.allsettled": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/promise.allsettled/-/promise.allsettled-1.0.2.tgz", + "integrity": "sha512-UpcYW5S1RaNKT6pd+s9jp9K9rlQge1UXKskec0j6Mmuq7UJCvlS2J2/s/yuPN8ehftf9HXMxWlKiPbGGUzpoRg==", + "dev": true, + "requires": { + "array.prototype.map": "^1.0.1", + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "iterate-value": "^1.0.0" + } + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "readdirp": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.4.0.tgz", + "integrity": "sha512-0xe001vZBnJEK+uKcj8qOhyAKPzIT+gStxWr3LCB0DwcXR5NZJ3IaC+yGnHCYzB/S7ov3m3EEbZI2zeNvX+hGQ==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true + }, + "resolve": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz", + "integrity": "sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + }, + "serialize-javascript": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-4.0.0.tgz", + "integrity": "sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "source-map-support": { + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", + "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + } + }, + "string.prototype.trimend": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz", + "integrity": "sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, + "string.prototype.trimstart": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz", + "integrity": "sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + }, + "strip-json-comments": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", + "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", + "dev": true + }, + "supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + }, + "dependencies": { + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + } + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "ts-node": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-9.0.0.tgz", + "integrity": "sha512-/TqB4SnererCDR/vb4S/QvSZvzQMJN8daAslg7MeaiHvD8rDZsSfXmNeNumyZZzMned72Xoq/isQljYSt8Ynfg==", + "dev": true, + "requires": { + "arg": "^4.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "source-map-support": "^0.5.17", + "yn": "3.1.1" + } + }, + "tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "tslint": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/tslint/-/tslint-6.1.3.tgz", + "integrity": "sha512-IbR4nkT96EQOvKE2PW/djGz8iGNeJ4rF2mBfiYaR/nvUWYKJhLwimoJKgjIFEIDibBtOevj7BqCRL4oHeWWUCg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "builtin-modules": "^1.1.1", + "chalk": "^2.3.0", + "commander": "^2.12.1", + "diff": "^4.0.1", + "glob": "^7.1.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.3", + "resolve": "^1.3.2", + "semver": "^5.3.0", + "tslib": "^1.13.0", + "tsutils": "^2.29.0" + }, + "dependencies": { + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true + } + } + }, + "tsutils": { + "version": "2.29.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-2.29.0.tgz", + "integrity": "sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA==", + "dev": true, + "requires": { + "tslib": "^1.8.1" + } + }, + "typescript": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.0.3.tgz", + "integrity": "sha512-tEu6DGxGgRJPb/mVPIZ48e69xCn2yRmCgYmDugAVwmJ6o+0u1RI18eO7E7WBTLYLaEVVOhwQmcdhQHweux/WPg==", + "dev": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true + }, + "wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "dev": true, + "requires": { + "string-width": "^1.0.2 || 2" + } + }, + "workerpool": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.0.0.tgz", + "integrity": "sha512-fU2OcNA/GVAJLLyKUoHkAgIhKb0JoCpSjLC/G2vYKxUjVmQwGbRVeoPJ1a8U4pnVofz4AQV5Y/NEw8oKqxEBtA==", + "dev": true + }, + "wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "ansi-styles": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", + "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "dev": true, + "requires": { + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + } + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + } + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "y18n": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", + "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==", + "dev": true + }, + "yargs": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", + "dev": true, + "requires": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + } + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + } + } + }, + "yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + }, + "yargs-unparser": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-1.6.1.tgz", + "integrity": "sha512-qZV14lK9MWsGCmcr7u5oXGH0dbGqZAIxTDrWXZDo5zUr6b6iUmelNKO6x6R1dQT24AH3LgRxJpr8meWy2unolA==", + "dev": true, + "requires": { + "camelcase": "^5.3.1", + "decamelize": "^1.2.0", + "flat": "^4.1.0", + "is-plain-obj": "^1.1.0", + "yargs": "^14.2.3" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + } + }, + "yargs": { + "version": "14.2.3", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-14.2.3.tgz", + "integrity": "sha512-ZbotRWhF+lkjijC/VhmOT9wSgyBQ7+zr13+YLkhfsSiTriYsMzkTUFP18pFhWwBeMa5gUc1MzbhrO6/VB7c9Xg==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "decamelize": "^1.2.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^15.0.1" + } + }, + "yargs-parser": { + "version": "15.0.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-15.0.1.tgz", + "integrity": "sha512-0OAMV2mAZQrs3FkNpDQcBk1x5HXb8X4twADss4S0Iuk+2dGnLOE/fRHrsYm542GduMveyA77OF4wrNJuanRCWw==", + "dev": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + } + } + }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true + } + } +} diff --git a/llparse/package.json b/llparse/package.json new file mode 100644 index 0000000..ee35dc4 --- /dev/null +++ b/llparse/package.json @@ -0,0 +1,49 @@ +{ + "name": "llparse", + "version": "7.1.1", + "description": "Compile incremental parsers to C code", + "main": "lib/api.js", + "types": "lib/api.d.ts", + "files": [ + "lib", + "src" + ], + "scripts": { + "build": "tsc", + "clean": "rm -rf lib", + "prepare": "npm run clean && npm run build", + "lint": "tslint -c tslint.json src/**/*.ts test/**/*.ts", + "fix-lint": "npm run lint -- --fix", + "mocha": "mocha --timeout=10000 -r ts-node/register/type-check --reporter spec test/*-test.ts", + "test": "npm run mocha && npm run lint" + }, + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/nodejs/llparse.git" + }, + "keywords": [ + "llparse", + "compiler" + ], + "author": "Fedor Indutny (http://darksi.de/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/nodejs/llparse/issues" + }, + "homepage": "https://github.com/nodejs/llparse#readme", + "devDependencies": { + "@types/debug": "^4.1.5", + "@types/mocha": "^8.0.3", + "@types/node": "^14.11.8", + "esm": "^3.2.25", + "llparse-test-fixture": "^5.0.1", + "mocha": "^8.1.3", + "ts-node": "^9.0.0", + "tslint": "^6.1.3", + "typescript": "^4.0.3" + }, + "dependencies": { + "debug": "^4.2.0", + "llparse-frontend": "^3.0.0" + } +} diff --git a/llparse/src/api.ts b/llparse/src/api.ts new file mode 100644 index 0000000..a34f5bc --- /dev/null +++ b/llparse/src/api.ts @@ -0,0 +1,47 @@ +import * as frontend from 'llparse-frontend'; + +import source = frontend.source; + +import { Compiler, ICompilerOptions, ICompilerResult } from './compiler'; + +export { source, ICompilerOptions, ICompilerResult }; + +// TODO(indutny): API for disabling/short-circuiting spans + +/** + * LLParse graph builder and compiler. + */ +export class LLParse extends source.Builder { + /** + * The prefix controls the names of methods and state struct in generated + * public C headers: + * + * ```c + * // state struct + * struct PREFIX_t { + * ... + * } + * + * int PREFIX_init(PREFIX_t* state); + * int PREFIX_execute(PREFIX_t* state, const char* p, const char* endp); + * ``` + * + * @param prefix Prefix to be used when generating public API. + */ + constructor(private readonly prefix: string = 'llparse') { + super(); + } + + /** + * Compile LLParse graph to the C code and C headers + * + * @param root Root node of the parse graph (see `.node()`) + * @param options Compiler options. + */ + public build(root: source.node.Node, options: ICompilerOptions = {}) + : ICompilerResult { + const c = new Compiler(this.prefix, options); + + return c.compile(root, this.properties); + } +} diff --git a/llparse/src/compiler/header-builder.ts b/llparse/src/compiler/header-builder.ts new file mode 100644 index 0000000..9f5bee7 --- /dev/null +++ b/llparse/src/compiler/header-builder.ts @@ -0,0 +1,80 @@ +import * as frontend from 'llparse-frontend'; +import source = frontend.source; + +export interface IHeaderBuilderOptions { + readonly prefix: string; + readonly headerGuard?: string; + readonly properties: ReadonlyArray; + readonly spans: ReadonlyArray; +} + +export class HeaderBuilder { + public build(options: IHeaderBuilderOptions): string { + let res = ''; + const PREFIX = options.prefix.toUpperCase().replace(/[^a-z]/gi, '_'); + const DEFINE = options.headerGuard === undefined ? + `INCLUDE_${PREFIX}_H_` : options.headerGuard; + + res += `#ifndef ${DEFINE}\n`; + res += `#define ${DEFINE}\n`; + res += '#ifdef __cplusplus\n'; + res += 'extern "C" {\n'; + res += '#endif\n'; + res += '\n'; + + res += '#include \n'; + res += '\n'; + + // Structure + res += `typedef struct ${options.prefix}_s ${options.prefix}_t;\n`; + res += `struct ${options.prefix}_s {\n`; + res += ' int32_t _index;\n'; + + for (const [ index, field ] of options.spans.entries()) { + res += ` void* _span_pos${index};\n`; + if (field.callbacks.length > 1) { + res += ` void* _span_cb${index};\n`; + } + } + + res += ' int32_t error;\n'; + res += ' const char* reason;\n'; + res += ' const char* error_pos;\n'; + res += ' void* data;\n'; + res += ' void* _current;\n'; + + for (const prop of options.properties) { + let ty: string; + if (prop.ty === 'i8') { + ty = 'uint8_t'; + } else if (prop.ty === 'i16') { + ty = 'uint16_t'; + } else if (prop.ty === 'i32') { + ty = 'uint32_t'; + } else if (prop.ty === 'i64') { + ty = 'uint64_t'; + } else if (prop.ty === 'ptr') { + ty = 'void*'; + } else { + throw new Error( + `Unknown state property type: "${prop.ty}"`); + } + res += ` ${ty} ${prop.name};\n`; + } + res += '};\n'; + + res += '\n'; + + res += `int ${options.prefix}_init(${options.prefix}_t* s);\n`; + res += `int ${options.prefix}_execute(${options.prefix}_t* s, ` + + 'const char* p, const char* endp);\n'; + + res += '\n'; + res += '#ifdef __cplusplus\n'; + res += '} /* extern "C" *\/\n'; + res += '#endif\n'; + res += `#endif /* ${DEFINE} *\/\n`; + + return res; + } +} diff --git a/llparse/src/compiler/index.ts b/llparse/src/compiler/index.ts new file mode 100644 index 0000000..89c258a --- /dev/null +++ b/llparse/src/compiler/index.ts @@ -0,0 +1,88 @@ +import * as debugAPI from 'debug'; +import * as frontend from 'llparse-frontend'; + +import source = frontend.source; + +import * as cImpl from '../implementation/c'; +import { HeaderBuilder } from './header-builder'; + +const debug = debugAPI('llparse:compiler'); + +export interface ICompilerOptions { + /** + * Debug method name + * + * The method must have following signature: + * + * ```c + * void debug(llparse_t* state, const char* p, const char* endp, + * const char* msg); + * ``` + * + * Where `llparse_t` is a parser state type. + */ + readonly debug?: string; + + /** + * What guard define to use in `#ifndef` in C headers. + * + * Default value: `prefix` argument + */ + readonly headerGuard?: string; + + /** Optional frontend configuration */ + readonly frontend?: frontend.IFrontendLazyOptions; + + /** Optional C-backend configuration */ + readonly c?: cImpl.ICPublicOptions; +} + +export interface ICompilerResult { + /** + * Textual C code + */ + readonly c: string; + + /** + * Textual C header file + */ + readonly header: string; +} + +export class Compiler { + constructor(public readonly prefix: string, + public readonly options: ICompilerOptions) { + } + + public compile(root: source.node.Node, + properties: ReadonlyArray): ICompilerResult { + debug('Combining implementations'); + const container = new frontend.Container(); + + const c = new cImpl.CCompiler(container, Object.assign({ + debug: this.options.debug, + }, this.options.c)); + + debug('Running frontend pass'); + const f = new frontend.Frontend(this.prefix, + container.build(), + this.options.frontend); + const info = f.compile(root, properties); + + debug('Building header'); + const hb = new HeaderBuilder(); + + const header = hb.build({ + headerGuard: this.options.headerGuard, + prefix: this.prefix, + properties, + spans: info.spans, + }); + + debug('Building C'); + return { + header, + c: c.compile(info), + }; + } +} diff --git a/llparse/src/implementation/c/code/and.ts b/llparse/src/implementation/c/code/and.ts new file mode 100644 index 0000000..fdd5434 --- /dev/null +++ b/llparse/src/implementation/c/code/and.ts @@ -0,0 +1,11 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Field } from './field'; + +export class And extends Field { + protected doBuild(ctx: Compilation, out: string[]): void { + out.push(`${this.field(ctx)} &= ${this.ref.value};`); + out.push('return 0;'); + } +} diff --git a/llparse/src/implementation/c/code/base.ts b/llparse/src/implementation/c/code/base.ts new file mode 100644 index 0000000..888330d --- /dev/null +++ b/llparse/src/implementation/c/code/base.ts @@ -0,0 +1,12 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; + +export abstract class Code { + protected cachedDecl: string | undefined; + + constructor(public readonly ref: T) { + } + + public abstract build(ctx: Compilation, out: string[]): void; +} diff --git a/llparse/src/implementation/c/code/external.ts b/llparse/src/implementation/c/code/external.ts new file mode 100644 index 0000000..494fc5a --- /dev/null +++ b/llparse/src/implementation/c/code/external.ts @@ -0,0 +1,19 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Code } from './base'; + +export abstract class External + extends Code { + + public build(ctx: Compilation, out: string[]): void { + out.push(`int ${this.ref.name}(`); + out.push(` ${ctx.prefix}_t* s, const unsigned char* p,`); + if (this.ref.signature === 'value') { + out.push(' const unsigned char* endp,'); + out.push(' int value);'); + } else { + out.push(' const unsigned char* endp);'); + } + } +} diff --git a/llparse/src/implementation/c/code/field.ts b/llparse/src/implementation/c/code/field.ts new file mode 100644 index 0000000..51f4439 --- /dev/null +++ b/llparse/src/implementation/c/code/field.ts @@ -0,0 +1,28 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Code } from './base'; + +export abstract class Field extends Code { + public build(ctx: Compilation, out: string[]): void { + out.push(`int ${this.ref.name}(`); + out.push(` ${ctx.prefix}_t* ${ctx.stateArg()},`); + out.push(` const unsigned char* ${ctx.posArg()},`); + if (this.ref.signature === 'value') { + out.push(` const unsigned char* ${ctx.endPosArg()},`); + out.push(` int ${ctx.matchVar()}) {`); + } else { + out.push(` const unsigned char* ${ctx.endPosArg()}) {`); + } + const tmp: string[] = []; + this.doBuild(ctx, tmp); + ctx.indent(out, tmp, ' '); + out.push('}'); + } + + protected abstract doBuild(ctx: Compilation, out: string[]): void; + + protected field(ctx: Compilation): string { + return `${ctx.stateArg()}->${this.ref.field}`; + } +} diff --git a/llparse/src/implementation/c/code/index.ts b/llparse/src/implementation/c/code/index.ts new file mode 100644 index 0000000..0de5de5 --- /dev/null +++ b/llparse/src/implementation/c/code/index.ts @@ -0,0 +1,27 @@ +import * as frontend from 'llparse-frontend'; + +import { And } from './and'; +import { External } from './external'; +import { IsEqual } from './is-equal'; +import { Load } from './load'; +import { MulAdd } from './mul-add'; +import { Or } from './or'; +import { Store } from './store'; +import { Test } from './test'; +import { Update } from './update'; + +export * from './base'; + +export default { + And, + IsEqual, + Load, + Match: class Match extends External {}, + MulAdd, + Or, + Span: class Span extends External {}, + Store, + Test, + Update, + Value: class Value extends External {}, +}; diff --git a/llparse/src/implementation/c/code/is-equal.ts b/llparse/src/implementation/c/code/is-equal.ts new file mode 100644 index 0000000..f76c2c1 --- /dev/null +++ b/llparse/src/implementation/c/code/is-equal.ts @@ -0,0 +1,10 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Field } from './field'; + +export class IsEqual extends Field { + protected doBuild(ctx: Compilation, out: string[]): void { + out.push(`return ${this.field(ctx)} == ${this.ref.value};`); + } +} diff --git a/llparse/src/implementation/c/code/load.ts b/llparse/src/implementation/c/code/load.ts new file mode 100644 index 0000000..b913f23 --- /dev/null +++ b/llparse/src/implementation/c/code/load.ts @@ -0,0 +1,10 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Field } from './field'; + +export class Load extends Field { + protected doBuild(ctx: Compilation, out: string[]): void { + out.push(`return ${this.field(ctx)};`); + } +} diff --git a/llparse/src/implementation/c/code/mul-add.ts b/llparse/src/implementation/c/code/mul-add.ts new file mode 100644 index 0000000..fd5ce8c --- /dev/null +++ b/llparse/src/implementation/c/code/mul-add.ts @@ -0,0 +1,67 @@ +import * as assert from 'assert'; +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { SIGNED_LIMITS, UNSIGNED_LIMITS, SIGNED_TYPES } from '../constants'; +import { Field } from './field'; + +export class MulAdd extends Field { + protected doBuild(ctx: Compilation, out: string[]): void { + const options = this.ref.options; + const ty = ctx.getFieldType(this.ref.field); + + let field = this.field(ctx); + if (options.signed) { + assert(SIGNED_TYPES.has(ty), `Unexpected mulAdd type "${ty}"`); + const targetTy = SIGNED_TYPES.get(ty)!; + out.push(`${targetTy}* field = (${targetTy}*) &${field};`); + field = '(*field)'; + } + + const match = ctx.matchVar(); + + const limits = options.signed ? SIGNED_LIMITS : UNSIGNED_LIMITS; + assert(limits.has(ty), `Unexpected mulAdd type "${ty}"`); + const [ min, max ] = limits.get(ty)!; + + const mulMax = `${max} / ${options.base}`; + const mulMin = `${min} / ${options.base}`; + + out.push('/* Multiplication overflow */'); + out.push(`if (${field} > ${mulMax}) {`); + out.push(' return 1;'); + out.push('}'); + if (options.signed) { + out.push(`if (${field} < ${mulMin}) {`); + out.push(' return 1;'); + out.push('}'); + } + out.push(''); + + out.push(`${field} *= ${options.base};`); + out.push(''); + + out.push('/* Addition overflow */'); + out.push(`if (${match} >= 0) {`); + out.push(` if (${field} > ${max} - ${match}) {`); + out.push(' return 1;'); + out.push(' }'); + out.push('} else {'); + out.push(` if (${field} < ${min} - ${match}) {`); + out.push(' return 1;'); + out.push(' }'); + out.push('}'); + + out.push(`${field} += ${match};`); + + if (options.max !== undefined) { + out.push(''); + out.push('/* Enforce maximum */'); + out.push(`if (${field} > ${options.max}) {`); + out.push(' return 1;'); + out.push('}'); + } + + out.push('return 0;'); + } +} diff --git a/llparse/src/implementation/c/code/or.ts b/llparse/src/implementation/c/code/or.ts new file mode 100644 index 0000000..76b16f9 --- /dev/null +++ b/llparse/src/implementation/c/code/or.ts @@ -0,0 +1,11 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Field } from './field'; + +export class Or extends Field { + protected doBuild(ctx: Compilation, out: string[]): void { + out.push(`${this.field(ctx)} |= ${this.ref.value};`); + out.push('return 0;'); + } +} diff --git a/llparse/src/implementation/c/code/store.ts b/llparse/src/implementation/c/code/store.ts new file mode 100644 index 0000000..a37d963 --- /dev/null +++ b/llparse/src/implementation/c/code/store.ts @@ -0,0 +1,11 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Field } from './field'; + +export class Store extends Field { + protected doBuild(ctx: Compilation, out: string[]): void { + out.push(`${this.field(ctx)} = ${ctx.matchVar()};`); + out.push('return 0;'); + } +} diff --git a/llparse/src/implementation/c/code/test.ts b/llparse/src/implementation/c/code/test.ts new file mode 100644 index 0000000..36126f5 --- /dev/null +++ b/llparse/src/implementation/c/code/test.ts @@ -0,0 +1,11 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Field } from './field'; + +export class Test extends Field { + protected doBuild(ctx: Compilation, out: string[]): void { + const value = this.ref.value; + out.push(`return (${this.field(ctx)} & ${value}) == ${value};`); + } +} diff --git a/llparse/src/implementation/c/code/update.ts b/llparse/src/implementation/c/code/update.ts new file mode 100644 index 0000000..89efedf --- /dev/null +++ b/llparse/src/implementation/c/code/update.ts @@ -0,0 +1,11 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Field } from './field'; + +export class Update extends Field { + protected doBuild(ctx: Compilation, out: string[]): void { + out.push(`${this.field(ctx)} = ${this.ref.value};`); + out.push('return 0;'); + } +} diff --git a/llparse/src/implementation/c/compilation.ts b/llparse/src/implementation/c/compilation.ts new file mode 100644 index 0000000..4df05a6 --- /dev/null +++ b/llparse/src/implementation/c/compilation.ts @@ -0,0 +1,336 @@ +import * as assert from 'assert'; +import { Buffer } from 'buffer'; +import * as frontend from 'llparse-frontend'; + +import { + CONTAINER_KEY, STATE_ERROR, + ARG_STATE, ARG_POS, ARG_ENDPOS, + VAR_MATCH, + STATE_PREFIX, LABEL_PREFIX, BLOB_PREFIX, + SEQUENCE_COMPLETE, SEQUENCE_MISMATCH, SEQUENCE_PAUSE, +} from './constants'; +import { Code } from './code'; +import { Node } from './node'; +import { Transform } from './transform'; +import { MatchSequence } from './helpers/match-sequence'; + +// Number of hex words per line of blob declaration +const BLOB_GROUP_SIZE = 11; + +type WrappedNode = frontend.IWrap; + +interface IBlob { + readonly alignment: number | undefined; + readonly buffer: Buffer; + readonly name: string; +} + +// TODO(indutny): deduplicate +export interface ICompilationOptions { + readonly debug?: string; +} + +// TODO(indutny): deduplicate +export interface ICompilationProperty { + readonly name: string; + readonly ty: string; +} + +export class Compilation { + private readonly stateMap: Map> = new Map(); + private readonly blobs: Map = new Map(); + private readonly codeMap: Map> = new Map(); + private readonly matchSequence: + Map = new Map(); + private readonly resumptionTargets: Set = new Set(); + + constructor(public readonly prefix: string, + private readonly properties: ReadonlyArray, + resumptionTargets: ReadonlySet, + private readonly options: ICompilationOptions) { + for (const node of resumptionTargets) { + this.resumptionTargets.add(STATE_PREFIX + node.ref.id.name); + } + } + + private buildStateEnum(out: string[]): void { + out.push('enum llparse_state_e {'); + out.push(` ${STATE_ERROR},`); + for (const stateName of this.stateMap.keys()) { + if (this.resumptionTargets.has(stateName)) { + out.push(` ${stateName},`); + } + } + out.push('};'); + out.push('typedef enum llparse_state_e llparse_state_t;'); + } + + private buildBlobs(out: string[]): void { + if (this.blobs.size === 0) { + return; + } + + for (const blob of this.blobs.values()) { + const buffer = blob.buffer; + let align = ''; + if (blob.alignment) { + align = ` ALIGN(${blob.alignment})`; + } + + if (blob.alignment) { + out.push('#ifdef __SSE4_2__'); + } + out.push(`static const unsigned char${align} ${blob.name}[] = {`); + + for (let i = 0; i < buffer.length; i += BLOB_GROUP_SIZE) { + const limit = Math.min(buffer.length, i + BLOB_GROUP_SIZE); + const hex: string[] = []; + for (let j = i; j < limit; j++) { + const value = buffer[j] as number; + + const ch = String.fromCharCode(value); + // `'`, `\` + if (value === 0x27 || value === 0x5c) { + hex.push(`'\\${ch}'`); + } else if (value >= 0x20 && value <= 0x7e) { + hex.push(`'${ch}'`); + } else { + hex.push(`0x${value.toString(16)}`); + } + } + let line = ' ' + hex.join(', '); + if (limit !== buffer.length) { + line += ','; + } + out.push(line); + } + + out.push(`};`); + if (blob.alignment) { + out.push('#endif /* __SSE4_2__ */'); + } + } + out.push(''); + } + + private buildMatchSequence(out: string[]): void { + if (this.matchSequence.size === 0) { + return; + } + + MatchSequence.buildGlobals(out); + out.push(''); + + for (const match of this.matchSequence.values()) { + match.build(this, out); + out.push(''); + } + } + + public reserveSpans(spans: ReadonlyArray): void { + for (const span of spans) { + for (const callback of span.callbacks) { + this.buildCode(this.unwrapCode(callback)); + } + } + } + + public debug(out: string[], message: string): void { + if (this.options.debug === undefined) { + return; + } + + const args = [ + this.stateArg(), + `(const char*) ${this.posArg()}`, + `(const char*) ${this.endPosArg()}`, + ]; + + out.push(`${this.options.debug}(${args.join(', ')},`); + out.push(` ${this.cstring(message)});`); + } + + public buildGlobals(out: string[]): void { + if (this.options.debug !== undefined) { + out.push(`void ${this.options.debug}(`); + out.push(` ${this.prefix}_t* s, const char* p, const char* endp,`); + out.push(' const char* msg);'); + } + + this.buildBlobs(out); + this.buildMatchSequence(out); + this.buildStateEnum(out); + + for (const code of this.codeMap.values()) { + out.push(''); + code.build(this, out); + } + } + + public buildResumptionStates(out: string[]): void { + this.stateMap.forEach((lines, name) => { + if (!this.resumptionTargets.has(name)) { + return; + } + out.push(`case ${name}:`); + out.push(`${LABEL_PREFIX}${name}: {`); + lines.forEach((line) => out.push(` ${line}`)); + out.push(' /* UNREACHABLE */;'); + out.push(' abort();'); + out.push('}'); + }); + } + + public buildInternalStates(out: string[]): void { + this.stateMap.forEach((lines, name) => { + if (this.resumptionTargets.has(name)) { + return; + } + out.push(`${LABEL_PREFIX}${name}: {`); + lines.forEach((line) => out.push(` ${line}`)); + out.push(' /* UNREACHABLE */;'); + out.push(' abort();'); + out.push('}'); + }); + } + + public addState(state: string, lines: ReadonlyArray): void { + assert(!this.stateMap.has(state)); + this.stateMap.set(state, lines); + } + + public buildCode(code: Code): string { + if (this.codeMap.has(code.ref.name)) { + assert.strictEqual(this.codeMap.get(code.ref.name)!, code, + `Code name conflict for "${code.ref.name}"`); + } else { + this.codeMap.set(code.ref.name, code); + } + return code.ref.name; + } + + public getFieldType(field: string): string { + for (const property of this.properties) { + if (property.name === field) { + return property.ty; + } + } + throw new Error(`Field "${field}" not found`); + } + + // Helpers + + public unwrapCode(code: frontend.IWrap) + : Code { + const container = code as frontend.ContainerWrap; + return container.get(CONTAINER_KEY); + } + + public unwrapNode(node: WrappedNode): Node { + const container = node as frontend.ContainerWrap; + return container.get(CONTAINER_KEY); + } + + public unwrapTransform(node: frontend.IWrap) + : Transform { + const container = + node as frontend.ContainerWrap; + return container.get(CONTAINER_KEY); + } + + public indent(out: string[], lines: ReadonlyArray, pad: string) { + for (const line of lines) { + out.push(`${pad}${line}`); + } + } + + // MatchSequence cache + + public getMatchSequence( + transform: frontend.IWrap, select: Buffer) + : string { + const wrap = this.unwrapTransform(transform); + + let res: MatchSequence; + if (this.matchSequence.has(wrap.ref.name)) { + res = this.matchSequence.get(wrap.ref.name)!; + } else { + res = new MatchSequence(wrap); + this.matchSequence.set(wrap.ref.name, res); + } + + return res.getName(); + } + + // Arguments + + public stateArg(): string { + return ARG_STATE; + } + + public posArg(): string { + return ARG_POS; + } + + public endPosArg(): string { + return ARG_ENDPOS; + } + + public matchVar(): string { + return VAR_MATCH; + } + + // State fields + + public indexField(): string { + return this.stateField('_index'); + } + + public currentField(): string { + return this.stateField('_current'); + } + + public errorField(): string { + return this.stateField('error'); + } + + public reasonField(): string { + return this.stateField('reason'); + } + + public errorPosField(): string { + return this.stateField('error_pos'); + } + + public spanPosField(index: number): string { + return this.stateField(`_span_pos${index}`); + } + + public spanCbField(index: number): string { + return this.stateField(`_span_cb${index}`); + } + + public stateField(name: string): string { + return `${this.stateArg()}->${name}`; + } + + // Globals + + public cstring(value: string): string { + return JSON.stringify(value); + } + + public blob(value: Buffer, alignment?: number): string { + if (this.blobs.has(value)) { + return this.blobs.get(value)!.name; + } + + const res = BLOB_PREFIX + this.blobs.size; + this.blobs.set(value, { + alignment, + buffer: value, + name: res, + }); + return res; + } +} diff --git a/llparse/src/implementation/c/constants.ts b/llparse/src/implementation/c/constants.ts new file mode 100644 index 0000000..bfd5be3 --- /dev/null +++ b/llparse/src/implementation/c/constants.ts @@ -0,0 +1,45 @@ +export const CONTAINER_KEY = 'c'; + +export const LABEL_PREFIX = ''; +export const STATE_PREFIX = 's_n_'; +export const STATE_ERROR = 's_error'; + +export const BLOB_PREFIX = 'llparse_blob'; + +export const ARG_STATE = 'state'; +export const ARG_POS = 'p'; +export const ARG_ENDPOS = 'endp'; + +export const VAR_MATCH = 'match'; + +// MatchSequence + +export const SEQUENCE_COMPLETE = 'kMatchComplete'; +export const SEQUENCE_MISMATCH = 'kMatchMismatch'; +export const SEQUENCE_PAUSE = 'kMatchPause'; + +export const SIGNED_LIMITS: Map = new Map(); +SIGNED_LIMITS.set('i8', [ '-0x80', '0x7f' ]); +SIGNED_LIMITS.set('i16', [ '-0x8000', '0x7fff' ]); +SIGNED_LIMITS.set('i32', [ '(-0x7fffffff - 1)', '0x7fffffff' ]); +SIGNED_LIMITS.set('i64', [ '(-0x7fffffffffffffffLL - 1)', + '0x7fffffffffffffffLL' ]); + +export const UNSIGNED_LIMITS: Map = new Map(); +UNSIGNED_LIMITS.set('i8', [ '0', '0xff' ]); +UNSIGNED_LIMITS.set('i8', [ '0', '0xff' ]); +UNSIGNED_LIMITS.set('i16', [ '0', '0xffff' ]); +UNSIGNED_LIMITS.set('i32', [ '0', '0xffffffff' ]); +UNSIGNED_LIMITS.set('i64', [ '0ULL', '0xffffffffffffffffULL' ]); + +export const UNSIGNED_TYPES: Map = new Map(); +UNSIGNED_TYPES.set('i8', 'uint8_t'); +UNSIGNED_TYPES.set('i16', 'uint16_t'); +UNSIGNED_TYPES.set('i32', 'uint32_t'); +UNSIGNED_TYPES.set('i64', 'uint64_t'); + +export const SIGNED_TYPES: Map = new Map(); +SIGNED_TYPES.set('i8', 'int8_t'); +SIGNED_TYPES.set('i16', 'int16_t'); +SIGNED_TYPES.set('i32', 'int32_t'); +SIGNED_TYPES.set('i64', 'int64_t'); diff --git a/llparse/src/implementation/c/helpers/match-sequence.ts b/llparse/src/implementation/c/helpers/match-sequence.ts new file mode 100644 index 0000000..278f4b5 --- /dev/null +++ b/llparse/src/implementation/c/helpers/match-sequence.ts @@ -0,0 +1,75 @@ +import * as assert from 'assert'; +import { Buffer } from 'buffer'; +import * as frontend from 'llparse-frontend'; + +import { + SEQUENCE_COMPLETE, SEQUENCE_MISMATCH, SEQUENCE_PAUSE, +} from '../constants'; +import { Transform } from '../transform'; +import { Compilation } from '../compilation'; + +type TransformWrap = Transform; + +export class MatchSequence { + constructor(private readonly transform: TransformWrap) { + } + + public static buildGlobals(out: string[]): void { + out.push('enum llparse_match_status_e {'); + out.push(` ${SEQUENCE_COMPLETE},`); + out.push(` ${SEQUENCE_PAUSE},`); + out.push(` ${SEQUENCE_MISMATCH}`); + out.push('};'); + out.push('typedef enum llparse_match_status_e llparse_match_status_t;'); + out.push(''); + out.push('struct llparse_match_s {'); + out.push(' llparse_match_status_t status;'); + out.push(' const unsigned char* current;'); + out.push('};'); + out.push('typedef struct llparse_match_s llparse_match_t;'); + } + + public getName(): string { + return `llparse__match_sequence_${this.transform.ref.name}`; + } + + public build(ctx: Compilation, out: string[]): void { + out.push(`static llparse_match_t ${this.getName()}(`); + out.push(` ${ctx.prefix}_t* s, const unsigned char* p,`); + out.push(' const unsigned char* endp,'); + out.push(' const unsigned char* seq, uint32_t seq_len) {'); + + // Vars + out.push(' uint32_t index;'); + out.push(' llparse_match_t res;'); + out.push(''); + + // Body + out.push(' index = s->_index;'); + out.push(' for (; p != endp; p++) {'); + out.push(' unsigned char current;'); + out.push(''); + out.push(` current = ${this.transform.build(ctx, '*p')};`); + out.push(' if (current == seq[index]) {'); + out.push(' if (++index == seq_len) {'); + out.push(` res.status = ${SEQUENCE_COMPLETE};`); + out.push(' goto reset;'); + out.push(' }'); + out.push(' } else {'); + out.push(` res.status = ${SEQUENCE_MISMATCH};`); + out.push(' goto reset;'); + out.push(' }'); + out.push(' }'); + + out.push(' s->_index = index;'); + out.push(` res.status = ${SEQUENCE_PAUSE};`); + out.push(' res.current = p;'); + out.push(' return res;'); + + out.push('reset:'); + out.push(' s->_index = 0;'); + out.push(' res.current = p;'); + out.push(' return res;'); + out.push('}'); + } +} diff --git a/llparse/src/implementation/c/index.ts b/llparse/src/implementation/c/index.ts new file mode 100644 index 0000000..ae94d34 --- /dev/null +++ b/llparse/src/implementation/c/index.ts @@ -0,0 +1,199 @@ +import * as frontend from 'llparse-frontend'; + +import { + ARG_STATE, ARG_POS, ARG_ENDPOS, + STATE_ERROR, + VAR_MATCH, + CONTAINER_KEY, +} from './constants'; +import { Compilation } from './compilation'; +import code from './code'; +import node from './node'; +import { Node } from './node'; +import transform from './transform'; + +export interface ICCompilerOptions { + readonly debug?: string; + readonly header?: string; +} + +export interface ICPublicOptions { + readonly header?: string; +} + +export class CCompiler { + constructor(container: frontend.Container, + public readonly options: ICCompilerOptions) { + container.add(CONTAINER_KEY, { code, node, transform }); + } + + public compile(info: frontend.IFrontendResult): string { + const compilation = new Compilation(info.prefix, info.properties, + info.resumptionTargets, this.options); + const out: string[] = []; + + out.push('#include '); + out.push('#include '); + out.push('#include '); + out.push(''); + + // NOTE: Inspired by https://github.com/h2o/picohttpparser + // TODO(indutny): Windows support for SSE4.2. + // See: https://github.com/nodejs/llparse/pull/24#discussion_r299789676 + // (There is no `__SSE4_2__` define for MSVC) + out.push('#ifdef __SSE4_2__'); + out.push(' #ifdef _MSC_VER'); + out.push(' #include '); + out.push(' #else /* !_MSC_VER */'); + out.push(' #include '); + out.push(' #endif /* _MSC_VER */'); + out.push('#endif /* __SSE4_2__ */'); + out.push(''); + + out.push('#ifdef _MSC_VER'); + out.push(' #define ALIGN(n) _declspec(align(n))'); + out.push('#else /* !_MSC_VER */'); + out.push(' #define ALIGN(n) __attribute__((aligned(n)))'); + out.push('#endif /* _MSC_VER */'); + + out.push(''); + out.push(`#include "${this.options.header || info.prefix}.h"`); + out.push(``); + out.push(`typedef int (*${info.prefix}__span_cb)(`); + out.push(` ${info.prefix}_t*, const char*, const char*);`); + out.push(''); + + // Queue span callbacks to be built before `executeSpans()` code gets called + // below. + compilation.reserveSpans(info.spans); + + const root = info.root as frontend.ContainerWrap; + const rootState = root.get>(CONTAINER_KEY) + .build(compilation); + + compilation.buildGlobals(out); + out.push(''); + + out.push(`int ${info.prefix}_init(${info.prefix}_t* ${ARG_STATE}) {`); + out.push(` memset(${ARG_STATE}, 0, sizeof(*${ARG_STATE}));`); + out.push(` ${ARG_STATE}->_current = (void*) (intptr_t) ${rootState};`); + out.push(' return 0;'); + out.push('}'); + out.push(''); + + out.push(`static llparse_state_t ${info.prefix}__run(`); + out.push(` ${info.prefix}_t* ${ARG_STATE},`); + out.push(` const unsigned char* ${ARG_POS},`); + out.push(` const unsigned char* ${ARG_ENDPOS}) {`); + out.push(` int ${VAR_MATCH};`); + out.push(` switch ((llparse_state_t) (intptr_t) ` + + `${compilation.currentField()}) {`); + + let tmp: string[] = []; + compilation.buildResumptionStates(tmp); + compilation.indent(out, tmp, ' '); + + out.push(' default:'); + out.push(' /* UNREACHABLE */'); + out.push(' abort();'); + out.push(' }'); + + tmp = []; + compilation.buildInternalStates(tmp); + compilation.indent(out, tmp, ' '); + + out.push('}'); + out.push(''); + + + out.push(`int ${info.prefix}_execute(${info.prefix}_t* ${ARG_STATE}, ` + + `const char* ${ARG_POS}, const char* ${ARG_ENDPOS}) {`); + out.push(' llparse_state_t next;'); + out.push(''); + + out.push(' /* check lingering errors */'); + out.push(` if (${compilation.errorField()} != 0) {`); + out.push(` return ${compilation.errorField()};`); + out.push(' }'); + out.push(''); + + tmp = []; + this.restartSpans(compilation, info, tmp); + compilation.indent(out, tmp, ' '); + + const args = [ + compilation.stateArg(), + `(const unsigned char*) ${compilation.posArg()}`, + `(const unsigned char*) ${compilation.endPosArg()}`, + ]; + out.push(` next = ${info.prefix}__run(${args.join(', ')});`); + out.push(` if (next == ${STATE_ERROR}) {`); + out.push(` return ${compilation.errorField()};`); + out.push(' }'); + out.push(` ${compilation.currentField()} = (void*) (intptr_t) next;`); + out.push(''); + + tmp = []; + this.executeSpans(compilation, info, tmp); + compilation.indent(out, tmp, ' '); + + out.push(' return 0;'); + out.push('}'); + + return out.join('\n'); + } + + private restartSpans(ctx: Compilation, info: frontend.IFrontendResult, + out: string[]): void { + if (info.spans.length === 0) { + return; + } + + out.push('/* restart spans */'); + for (const span of info.spans) { + const posField = ctx.spanPosField(span.index); + + out.push(`if (${posField} != NULL) {`); + out.push(` ${posField} = (void*) ${ctx.posArg()};`); + out.push('}'); + } + out.push(''); + } + + private executeSpans(ctx: Compilation, info: frontend.IFrontendResult, + out: string[]): void { + if (info.spans.length === 0) { + return; + } + + out.push('/* execute spans */'); + for (const span of info.spans) { + const posField = ctx.spanPosField(span.index); + let callback: string; + if (span.callbacks.length === 1) { + callback = ctx.buildCode(ctx.unwrapCode(span.callbacks[0])); + } else { + callback = `(${info.prefix}__span_cb) ` + ctx.spanCbField(span.index); + callback = `(${callback})`; + } + + const args = [ + ctx.stateArg(), posField, `(const char*) ${ctx.endPosArg()}`, + ]; + + out.push(`if (${posField} != NULL) {`); + out.push(' int error;'); + out.push(''); + out.push(` error = ${callback}(${args.join(', ')});`); + + // TODO(indutny): de-duplicate this here and in SpanEnd + out.push(' if (error != 0) {'); + out.push(` ${ctx.errorField()} = error;`); + out.push(` ${ctx.errorPosField()} = ${ctx.endPosArg()};`); + out.push(' return error;'); + out.push(' }'); + out.push('}'); + } + out.push(''); + } +} diff --git a/llparse/src/implementation/c/node/base.ts b/llparse/src/implementation/c/node/base.ts new file mode 100644 index 0000000..51f90bb --- /dev/null +++ b/llparse/src/implementation/c/node/base.ts @@ -0,0 +1,77 @@ +import * as assert from 'assert'; +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { + STATE_PREFIX, LABEL_PREFIX, +} from '../constants'; + +export interface INodeEdge { + readonly node: frontend.IWrap; + readonly noAdvance: boolean; + readonly value?: number; +} + +export abstract class Node { + protected cachedDecl: string | undefined; + protected privCompilation: Compilation | undefined; + + constructor(public readonly ref: T) { + } + + public build(compilation: Compilation): string { + if (this.cachedDecl !== undefined) { + return this.cachedDecl; + } + + const res = STATE_PREFIX + this.ref.id.name; + this.cachedDecl = res; + + this.privCompilation = compilation; + + const out: string[] = []; + compilation.debug(out, + `Entering node "${this.ref.id.originalName}" ("${this.ref.id.name}")`); + this.doBuild(out); + + compilation.addState(res, out); + + return res; + } + + protected get compilation(): Compilation { + assert(this.privCompilation !== undefined); + return this.privCompilation!; + } + + protected prologue(out: string[]): void { + const ctx = this.compilation; + + out.push(`if (${ctx.posArg()} == ${ctx.endPosArg()}) {`); + + const tmp: string[] = []; + this.pause(tmp); + this.compilation.indent(out, tmp, ' '); + + out.push('}'); + } + + protected pause(out: string[]): void { + out.push(`return ${this.cachedDecl};`); + } + + protected tailTo(out: string[], edge: INodeEdge): void { + const ctx = this.compilation; + const target = ctx.unwrapNode(edge.node).build(ctx); + + if (!edge.noAdvance) { + out.push(`${ctx.posArg()}++;`); + } + if (edge.value !== undefined) { + out.push(`${ctx.matchVar()} = ${edge.value};`); + } + out.push(`goto ${LABEL_PREFIX}${target};`); + } + + protected abstract doBuild(out: string[]): void; +} diff --git a/llparse/src/implementation/c/node/consume.ts b/llparse/src/implementation/c/node/consume.ts new file mode 100644 index 0000000..658a00e --- /dev/null +++ b/llparse/src/implementation/c/node/consume.ts @@ -0,0 +1,48 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Node } from './base'; + +export class Consume extends Node { + public doBuild(out: string[]): void { + const ctx = this.compilation; + + const index = ctx.stateField(this.ref.field); + const ty = ctx.getFieldType(this.ref.field); + + let fieldTy: string; + if (ty === 'i64') { + fieldTy = 'uint64_t'; + } else if (ty === 'i32') { + fieldTy = 'uint32_t'; + } else if (ty === 'i16') { + fieldTy = 'uint16_t'; + } else if (ty === 'i8') { + fieldTy = 'uint8_t'; + } else { + throw new Error( + `Unsupported type ${ty} of field ${this.ref.field} for consume node`); + } + + out.push('size_t avail;'); + out.push(`${fieldTy} need;`); + + out.push(''); + out.push(`avail = ${ctx.endPosArg()} - ${ctx.posArg()};`); + out.push(`need = ${index};`); + + // Note: `avail` or `need` are going to coerced to the largest + // datatype needed to hold either of the values. + out.push('if (avail >= need) {'); + out.push(` p += need;`); + out.push(` ${index} = 0;`); + const tmp: string[] = []; + this.tailTo(tmp, this.ref.otherwise!); + ctx.indent(out, tmp, ' '); + out.push('}'); + out.push(''); + + out.push(`${index} -= avail;`); + this.pause(out); + } +} diff --git a/llparse/src/implementation/c/node/empty.ts b/llparse/src/implementation/c/node/empty.ts new file mode 100644 index 0000000..e28ecb5 --- /dev/null +++ b/llparse/src/implementation/c/node/empty.ts @@ -0,0 +1,16 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Node } from './base'; + +export class Empty extends Node { + public doBuild(out: string[]): void { + const otherwise = this.ref.otherwise!; + + if (!otherwise.noAdvance) { + this.prologue(out); + } + + this.tailTo(out, otherwise); + } +} diff --git a/llparse/src/implementation/c/node/error.ts b/llparse/src/implementation/c/node/error.ts new file mode 100644 index 0000000..29dce63 --- /dev/null +++ b/llparse/src/implementation/c/node/error.ts @@ -0,0 +1,33 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { STATE_ERROR } from '../constants'; +import { Node } from './base'; + +class ErrorNode extends Node { + protected storeError(out: string[]): void { + const ctx = this.compilation; + + let hexCode: string; + if (this.ref.code < 0) { + hexCode = `-0x` + this.ref.code.toString(16); + } else { + hexCode = '0x' + this.ref.code.toString(16); + } + + out.push(`${ctx.errorField()} = ${hexCode};`); + out.push(`${ctx.reasonField()} = ${ctx.cstring(this.ref.reason)};`); + out.push(`${ctx.errorPosField()} = (const char*) ${ctx.posArg()};`); + } + + public doBuild(out: string[]): void { + this.storeError(out); + + // Non-recoverable state + out.push(`${this.compilation.currentField()} = ` + + `(void*) (intptr_t) ${STATE_ERROR};`); + out.push(`return ${STATE_ERROR};`); + } +} + +export { ErrorNode as Error }; diff --git a/llparse/src/implementation/c/node/index.ts b/llparse/src/implementation/c/node/index.ts new file mode 100644 index 0000000..ba751d9 --- /dev/null +++ b/llparse/src/implementation/c/node/index.ts @@ -0,0 +1,27 @@ +import * as frontend from 'llparse-frontend'; + +import { Consume } from './consume'; +import { Empty } from './empty'; +import { Error as ErrorNode } from './error'; +import { Invoke } from './invoke'; +import { Pause } from './pause'; +import { Sequence } from './sequence'; +import { Single } from './single'; +import { SpanEnd } from './span-end'; +import { SpanStart } from './span-start'; +import { TableLookup } from './table-lookup'; + +export { Node } from './base'; + +export default { + Consume, + Empty, + Error: class Error extends ErrorNode {}, + Invoke, + Pause, + Sequence, + Single, + SpanEnd, + SpanStart, + TableLookup, +}; diff --git a/llparse/src/implementation/c/node/invoke.ts b/llparse/src/implementation/c/node/invoke.ts new file mode 100644 index 0000000..ee917e9 --- /dev/null +++ b/llparse/src/implementation/c/node/invoke.ts @@ -0,0 +1,44 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Node } from './base'; + +export class Invoke extends Node { + public doBuild(out: string[]): void { + const ctx = this.compilation; + + const code = ctx.unwrapCode(this.ref.code); + const codeDecl = ctx.buildCode(code); + + const args: string[] = [ + ctx.stateArg(), + ctx.posArg(), + ctx.endPosArg(), + ]; + + const signature = code.ref.signature; + if (signature === 'value') { + args.push(ctx.matchVar()); + } + + out.push(`switch (${codeDecl}(${args.join(', ')})) {`); + let tmp: string[]; + + for (const edge of this.ref.edges) { + out.push(` case ${edge.code}:`); + tmp = []; + this.tailTo(tmp, { + noAdvance: true, + node: edge.node, + value: undefined, + }); + ctx.indent(out, tmp, ' '); + } + + out.push(' default:'); + tmp = []; + this.tailTo(tmp, this.ref.otherwise!); + ctx.indent(out, tmp, ' '); + out.push('}'); + } +} diff --git a/llparse/src/implementation/c/node/pause.ts b/llparse/src/implementation/c/node/pause.ts new file mode 100644 index 0000000..c239b46 --- /dev/null +++ b/llparse/src/implementation/c/node/pause.ts @@ -0,0 +1,19 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { STATE_ERROR } from '../constants'; +import { Error as ErrorNode } from './error'; + +export class Pause extends ErrorNode { + public doBuild(out: string[]): void { + const ctx = this.compilation; + + this.storeError(out); + + // Recoverable state + const otherwise = ctx.unwrapNode(this.ref.otherwise!.node).build(ctx); + out.push(`${ctx.currentField()} = ` + + `(void*) (intptr_t) ${otherwise};`); + out.push(`return ${STATE_ERROR};`); + } +} diff --git a/llparse/src/implementation/c/node/sequence.ts b/llparse/src/implementation/c/node/sequence.ts new file mode 100644 index 0000000..73d8816 --- /dev/null +++ b/llparse/src/implementation/c/node/sequence.ts @@ -0,0 +1,55 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { + SEQUENCE_COMPLETE, SEQUENCE_MISMATCH, SEQUENCE_PAUSE, +} from '../constants'; +import { Node } from './base'; + +export class Sequence extends Node { + public doBuild(out: string[]): void { + const ctx = this.compilation; + + out.push('llparse_match_t match_seq;'); + out.push(''); + + this.prologue(out); + + const matchSequence = ctx.getMatchSequence(this.ref.transform!, + this.ref.select); + + out.push(`match_seq = ${matchSequence}(${ctx.stateArg()}, ` + + `${ctx.posArg()}, ` + + `${ctx.endPosArg()}, ${ctx.blob(this.ref.select)}, ` + + `${this.ref.select.length});`); + out.push('p = match_seq.current;'); + + let tmp: string[]; + + out.push('switch (match_seq.status) {'); + + out.push(` case ${SEQUENCE_COMPLETE}: {`); + tmp = []; + this.tailTo(tmp, { + noAdvance: false, + node: this.ref.edge!.node, + value: this.ref.edge!.value, + }); + ctx.indent(out, tmp, ' '); + out.push(' }'); + + out.push(` case ${SEQUENCE_PAUSE}: {`); + tmp = []; + this.pause(tmp); + ctx.indent(out, tmp, ' '); + out.push(' }'); + + out.push(` case ${SEQUENCE_MISMATCH}: {`); + tmp = []; + this.tailTo(tmp, this.ref.otherwise!); + ctx.indent(out, tmp, ' '); + out.push(' }'); + + out.push('}'); + } +} diff --git a/llparse/src/implementation/c/node/single.ts b/llparse/src/implementation/c/node/single.ts new file mode 100644 index 0000000..b9c8811 --- /dev/null +++ b/llparse/src/implementation/c/node/single.ts @@ -0,0 +1,47 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Node } from './base'; + +export class Single extends Node { + public doBuild(out: string[]): void { + const ctx = this.compilation; + + const otherwise = this.ref.otherwise!; + + this.prologue(out); + + const transform = ctx.unwrapTransform(this.ref.transform!); + const current = transform.build(ctx, `*${ctx.posArg()}`); + + out.push(`switch (${current}) {`) + this.ref.edges.forEach((edge) => { + let ch: string; + + // Non-printable ASCII, or single-quote, or forward slash + if (edge.key < 0x20 || edge.key > 0x7e || edge.key === 0x27 || + edge.key === 0x5c) { + ch = edge.key.toString(); + } else { + ch = `'${String.fromCharCode(edge.key)}'`; + } + out.push(` case ${ch}: {`); + + const tmp: string[] = []; + this.tailTo(tmp, edge); + ctx.indent(out, tmp, ' '); + + out.push(' }'); + }); + + out.push(` default: {`); + + const tmp: string[] = []; + this.tailTo(tmp, otherwise); + ctx.indent(out, tmp, ' '); + + out.push(' }'); + + out.push(`}`); + } +} diff --git a/llparse/src/implementation/c/node/span-end.ts b/llparse/src/implementation/c/node/span-end.ts new file mode 100644 index 0000000..09f97e5 --- /dev/null +++ b/llparse/src/implementation/c/node/span-end.ts @@ -0,0 +1,56 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { STATE_ERROR } from '../constants'; +import { Node } from './base'; + +export class SpanEnd extends Node { + public doBuild(out: string[]): void { + out.push('const unsigned char* start;'); + out.push('int err;'); + out.push(''); + + const ctx = this.compilation; + const field = this.ref.field; + const posField = ctx.spanPosField(field.index); + + // Load start position + out.push(`start = ${posField};`); + + // ...and reset + out.push(`${posField} = NULL;`); + + // Invoke callback + const callback = ctx.buildCode(ctx.unwrapCode(this.ref.callback)); + out.push(`err = ${callback}(${ctx.stateArg()}, start, ${ctx.posArg()});`); + + out.push('if (err != 0) {'); + const tmp: string[] = []; + this.buildError(tmp, 'err'); + ctx.indent(out, tmp, ' '); + out.push('}'); + + const otherwise = this.ref.otherwise!; + this.tailTo(out, otherwise); + } + + private buildError(out: string[], code: string) { + const ctx = this.compilation; + + out.push(`${ctx.errorField()} = ${code};`); + + const otherwise = this.ref.otherwise!; + let resumePos = ctx.posArg(); + if (!otherwise.noAdvance) { + resumePos = `(${resumePos} + 1)`; + } + + out.push(`${ctx.errorPosField()} = (const char*) ${resumePos};`); + + const resumptionTarget = ctx.unwrapNode(otherwise.node).build(ctx); + out.push(`${ctx.currentField()} = ` + + `(void*) (intptr_t) ${resumptionTarget};`); + + out.push(`return ${STATE_ERROR};`); + } +} diff --git a/llparse/src/implementation/c/node/span-start.ts b/llparse/src/implementation/c/node/span-start.ts new file mode 100644 index 0000000..445da67 --- /dev/null +++ b/llparse/src/implementation/c/node/span-start.ts @@ -0,0 +1,26 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Node } from './base'; + +export class SpanStart extends Node { + public doBuild(out: string[]): void { + // Prevent spurious empty spans + this.prologue(out); + + const ctx = this.compilation; + const field = this.ref.field; + + const posField = ctx.spanPosField(field.index); + out.push(`${posField} = (void*) ${ctx.posArg()};`); + + if (field.callbacks.length > 1) { + const cbField = ctx.spanCbField(field.index); + const callback = ctx.unwrapCode(this.ref.callback); + out.push(`${cbField} = ${ctx.buildCode(callback)};`); + } + + const otherwise = this.ref.otherwise!; + this.tailTo(out, otherwise); + } +} diff --git a/llparse/src/implementation/c/node/table-lookup.ts b/llparse/src/implementation/c/node/table-lookup.ts new file mode 100644 index 0000000..6a400a3 --- /dev/null +++ b/llparse/src/implementation/c/node/table-lookup.ts @@ -0,0 +1,196 @@ +import * as assert from 'assert'; +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Node } from './base'; + +const MAX_CHAR = 0xff; +const TABLE_GROUP = 16; + +// _mm_cmpestri takes 8 ranges +const SSE_RANGES_LEN = 16; +// _mm_cmpestri takes 128bit input +const SSE_RANGES_PAD = 16; +const MAX_SSE_CALLS = 2; +const SSE_ALIGNMENT = 16; + +interface ITable { + readonly name: string; + readonly declaration: ReadonlyArray; +} + +export class TableLookup extends Node { + public doBuild(out: string[]): void { + const ctx = this.compilation; + + const table = this.buildTable(); + for (const line of table.declaration) { + out.push(line); + } + + this.prologue(out); + + const transform = ctx.unwrapTransform(this.ref.transform!); + + // Try to vectorize nodes matching characters and looping to themselves + // NOTE: `switch` below triggers when there is not enough characters in the + // stream for vectorized processing. + this.buildSSE(out); + + const current = transform.build(ctx, `*${ctx.posArg()}`); + out.push(`switch (${table.name}[(uint8_t) ${current}]) {`); + + for (const [ index, edge ] of this.ref.edges.entries()) { + out.push(` case ${index + 1}: {`); + + const tmp: string[] = []; + const edge = this.ref.edges[index]; + this.tailTo(tmp, { + noAdvance: edge.noAdvance, + node: edge.node, + value: undefined, + }); + ctx.indent(out, tmp, ' '); + + out.push(' }'); + } + + out.push(` default: {`); + + const tmp: string[] = []; + this.tailTo(tmp, this.ref.otherwise!); + ctx.indent(out, tmp, ' '); + + out.push(' }'); + out.push('}'); + } + + private buildSSE(out: string[]): boolean { + const ctx = this.compilation; + + // Transformation is not supported atm + if (this.ref.transform && this.ref.transform.ref.name !== 'id') { + return false; + } + + if (this.ref.edges.length !== 1) { + return false; + } + + const edge = this.ref.edges[0]; + if (edge.node.ref !== this.ref) { + return false; + } + + // NOTE: keys are sorted + let ranges: number[] = []; + let first: number | undefined; + let last: number | undefined; + for (const key of edge.keys) { + if (first === undefined) { + first = key; + } + if (last === undefined) { + last = key; + } + + if (key - last > 1) { + ranges.push(first, last); + first = key; + } + last = key; + } + if (first !== undefined && last !== undefined) { + ranges.push(first, last); + } + + if (ranges.length === 0) { + return false; + } + + // Way too many calls would be required + if (ranges.length > MAX_SSE_CALLS * SSE_RANGES_LEN) { + return false; + } + + out.push('#ifdef __SSE4_2__'); + out.push(`if (${ctx.endPosArg()} - ${ctx.posArg()} >= 16) {`); + out.push(' __m128i ranges;'); + out.push(' __m128i input;'); + out.push(' int avail;'); + out.push(' int match_len;'); + out.push(''); + out.push(' /* Load input */'); + out.push(` input = _mm_loadu_si128((__m128i const*) ${ctx.posArg()});`); + for (let off = 0; off < ranges.length; off += SSE_RANGES_LEN) { + const subRanges = ranges.slice(off, off + SSE_RANGES_LEN); + + let paddedRanges = subRanges.slice(); + while (paddedRanges.length < SSE_RANGES_PAD) { + paddedRanges.push(0); + } + + const blob = ctx.blob(Buffer.from(paddedRanges), SSE_ALIGNMENT); + out.push(` ranges = _mm_loadu_si128((__m128i const*) ${blob});`); + out.push(''); + + out.push(' /* Find first character that does not match `ranges` */'); + out.push(` match_len = _mm_cmpestri(ranges, ${subRanges.length},`); + out.push(' input, 16,'); + out.push(' _SIDD_UBYTE_OPS | _SIDD_CMP_RANGES |'); + out.push(' _SIDD_NEGATIVE_POLARITY);'); + out.push(''); + out.push(' if (match_len != 0) {'); + out.push(` ${ctx.posArg()} += match_len;`); + + const tmp: string[] = []; + assert.strictEqual(edge.noAdvance, false); + this.tailTo(tmp, { + noAdvance: true, + node: edge.node, + }); + ctx.indent(out, tmp, ' '); + + out.push(' }'); + } + + { + const tmp: string[] = []; + this.tailTo(tmp, this.ref.otherwise!); + ctx.indent(out, tmp, ' '); + } + out.push('}'); + + out.push('#endif /* __SSE4_2__ */'); + + return true; + } + + private buildTable(): ITable { + const table: number[] = new Array(MAX_CHAR + 1).fill(0); + + for (const [ index, edge ] of this.ref.edges.entries()) { + edge.keys.forEach((key) => { + assert.strictEqual(table[key], 0); + table[key] = index + 1; + }); + } + + const lines = [ + 'static uint8_t lookup_table[] = {', + ]; + for (let i = 0; i < table.length; i += TABLE_GROUP) { + let line = ` ${table.slice(i, i + TABLE_GROUP).join(', ')}`; + if (i + TABLE_GROUP < table.length) { + line += ','; + } + lines.push(line); + } + lines.push('};'); + + return { + name: 'lookup_table', + declaration: lines, + }; + } +} diff --git a/llparse/src/implementation/c/transform/base.ts b/llparse/src/implementation/c/transform/base.ts new file mode 100644 index 0000000..82028d5 --- /dev/null +++ b/llparse/src/implementation/c/transform/base.ts @@ -0,0 +1,10 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; + +export abstract class Transform { + constructor(public readonly ref: T) { + } + + public abstract build(ctx: Compilation, value: string): string; +} diff --git a/llparse/src/implementation/c/transform/id.ts b/llparse/src/implementation/c/transform/id.ts new file mode 100644 index 0000000..6c6105f --- /dev/null +++ b/llparse/src/implementation/c/transform/id.ts @@ -0,0 +1,11 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Transform } from './base'; + +export class ID extends Transform { + public build(ctx: Compilation, value: string): string { + // Identity transformation + return value; + } +} diff --git a/llparse/src/implementation/c/transform/index.ts b/llparse/src/implementation/c/transform/index.ts new file mode 100644 index 0000000..c13ba50 --- /dev/null +++ b/llparse/src/implementation/c/transform/index.ts @@ -0,0 +1,11 @@ +import { ID } from './id'; +import { ToLower } from './to-lower'; +import { ToLowerUnsafe } from './to-lower-unsafe'; + +export { Transform } from './base'; + +export default { + ID, + ToLower, + ToLowerUnsafe, +}; diff --git a/llparse/src/implementation/c/transform/to-lower-unsafe.ts b/llparse/src/implementation/c/transform/to-lower-unsafe.ts new file mode 100644 index 0000000..27f608c --- /dev/null +++ b/llparse/src/implementation/c/transform/to-lower-unsafe.ts @@ -0,0 +1,10 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Transform } from './base'; + +export class ToLowerUnsafe extends Transform { + public build(ctx: Compilation, value: string): string { + return `((${value}) | 0x20)`; + } +} diff --git a/llparse/src/implementation/c/transform/to-lower.ts b/llparse/src/implementation/c/transform/to-lower.ts new file mode 100644 index 0000000..f639ef1 --- /dev/null +++ b/llparse/src/implementation/c/transform/to-lower.ts @@ -0,0 +1,11 @@ +import * as frontend from 'llparse-frontend'; + +import { Compilation } from '../compilation'; +import { Transform } from './base'; + +export class ToLower extends Transform { + public build(ctx: Compilation, value: string): string { + return `((${value}) >= 'A' && (${value}) <= 'Z' ? ` + + `(${value} | 0x20) : (${value}))`; + } +} diff --git a/llparse/test/code-test.ts b/llparse/test/code-test.ts new file mode 100644 index 0000000..54c3f85 --- /dev/null +++ b/llparse/test/code-test.ts @@ -0,0 +1,168 @@ +import * as assert from 'assert'; + +import { LLParse } from '../src/api'; + +import { build, NUM_SELECT, printMatch, printOff } from './fixtures'; + +describe('llparse/code', () => { + let p: LLParse; + + beforeEach(() => { + p = new LLParse(); + }); + + describe('`.mulAdd()`', () => { + it('should operate normally', async () => { + const start = p.node('start'); + const dot = p.node('dot'); + + p.property('i64', 'counter'); + + const is1337 = p.invoke(p.code.load('counter'), { + 1337: printOff(p, p.invoke(p.code.update('counter', 0), start)), + }, p.error(1, 'Invalid result')); + + const count = p.invoke(p.code.mulAdd('counter', { base: 10 }), start); + + start + .select(NUM_SELECT, count) + .otherwise(dot); + + dot + .match('.', is1337) + .otherwise(p.error(1, 'Unexpected')); + + const binary = await build(p, start, 'mul-add'); + await binary.check('1337.', 'off=5\n'); + }); + + it('should operate fail on overflow', async () => { + const start = p.node('start'); + + p.property('i8', 'counter'); + + const count = p.invoke(p.code.mulAdd('counter', { base: 10 }), { + 1: printOff(p, start), + }, start); + + start + .select(NUM_SELECT, count) + .otherwise(p.error(1, 'Unexpected')); + + const binary = await build(p, start, 'mul-add-overflow'); + await binary.check('1111', 'off=4\n'); + }); + + it('should operate fail on greater than max', async () => { + const start = p.node('start'); + + p.property('i64', 'counter'); + + const count = p.invoke(p.code.mulAdd('counter', { + base: 10, + max: 1000, + }), { + 1: printOff(p, start), + }, start); + + start + .select(NUM_SELECT, count) + .otherwise(p.error(1, 'Unexpected')); + + const binary = await build(p, start, 'mul-add-max-overflow'); + await binary.check('1111', 'off=4\n'); + }); + }); + + describe('`.update()`', () => { + it('should operate normally', async () => { + const start = p.node('start'); + + p.property('i64', 'counter'); + + const update = p.invoke(p.code.update('counter', 42)); + + start + .skipTo(update); + + update + .otherwise(p.invoke(p.code.load('counter'), { + 42: printOff(p, start), + }, p.error(1, 'Unexpected'))); + + const binary = await build(p, start, 'update'); + await binary.check('.', 'off=1\n'); + }); + }); + + describe('`.isEqual()`', () => { + it('should operate normally', async () => { + const start = p.node('start'); + + p.property('i64', 'counter'); + + const check = p.invoke(p.code.isEqual('counter', 1), { + 0: printOff(p, start), + 1: start, + }, p.error(1, 'Unexpected')); + + start + .select(NUM_SELECT, p.invoke(p.code.store('counter'), check)) + .otherwise(p.error(1, 'Unexpected')); + + const binary = await build(p, start, 'is-equal'); + await binary.check('010', 'off=1\noff=3\n'); + }); + }); + + describe('`.or()`/`.and()`/`.test()`', () => { + it('should set and retrieve bits', async () => { + const start = p.node('start'); + const test = p.node('test'); + + p.property('i64', 'flag'); + + start + .match('1', p.invoke(p.code.or('flag', 1), start)) + .match('2', p.invoke(p.code.or('flag', 2), start)) + .match('4', p.invoke(p.code.or('flag', 4), start)) + // Reset + .match('r', p.invoke(p.code.update('flag', 0), start)) + // Partial Reset + .match('p', p.invoke(p.code.and('flag', ~1), start)) + // Test + .match('-', test) + .otherwise(p.error(1, 'start')); + + test + .match('1', p.invoke(p.code.test('flag', 1), { + 0: test, + 1: printOff(p, test), + }, p.error(2, 'test-1'))) + .match('2', p.invoke(p.code.test('flag', 2), { + 0: test, + 1: printOff(p, test), + }, p.error(3, 'test-2'))) + .match('4', p.invoke(p.code.test('flag', 4), { + 0: test, + 1: printOff(p, test), + }, p.error(4, 'test-3'))) + .match('7', p.invoke(p.code.test('flag', 7), { + 0: test, + 1: printOff(p, test), + }, p.error(5, 'test-7'))) + // Restart + .match('.', start) + .otherwise(p.error(6, 'test')); + + const binary = await build(p, start, 'or-test'); + await binary.check('1-124.2-1247.4-1247.r4-124.r12p-12', [ + 'off=3', + 'off=9', 'off=10', + 'off=16', 'off=17', 'off=18', 'off=19', + 'off=26', + 'off=34', + ]); + }); + }); +}); diff --git a/llparse/test/compiler-test.ts b/llparse/test/compiler-test.ts new file mode 100644 index 0000000..39bb69f --- /dev/null +++ b/llparse/test/compiler-test.ts @@ -0,0 +1,289 @@ +import { LLParse } from '../src/api'; + +import { + ALPHA, build, NUM, NUM_SELECT, printMatch, printOff, +} from './fixtures'; + +describe('llparse/Compiler', () => { + let p: LLParse; + + beforeEach(() => { + p = new LLParse(); + }); + + it('should compile simple parser', async () => { + const start = p.node('start'); + + start.match(' ', start); + + start.match('HTTP', printOff(p, start)); + + start.select({ + CONNECT: 6, + DELETE: 4, + GET: 1, + HEAD: 0, + OPTIONS: 5, + PATCH: 8, + POST: 2, + PUT: 3, + TRACE: 7, + }, printMatch(p, start)); + + start.otherwise(p.error(3, 'Invalid word')); + + const binary = await build(p, start, 'simple'); + await binary.check('GET', 'off=3 match=1\n'); + }); + + it('should optimize shallow select', async () => { + const start = p.node('start'); + + start.select(NUM_SELECT, printMatch(p, start)); + + start.otherwise(p.error(3, 'Invalid word')); + + const binary = await build(p, start, 'shallow'); + await binary.check('012', 'off=1 match=0\noff=2 match=1\noff=3 match=2\n'); + }); + + it('should support key-value select', async () => { + const start = p.node('start'); + + start.select('0', 0, printMatch(p, start)); + start.select('1', 1, printMatch(p, start)); + start.select('2', 2, printMatch(p, start)); + + start.otherwise(p.error(3, 'Invalid word')); + + const binary = await build(p, start, 'kv-select'); + await binary.check('012', 'off=1 match=0\noff=2 match=1\noff=3 match=2\n'); + }); + + it('should support multi-match', async () => { + const start = p.node('start'); + + start.match([ ' ', '\t', '\r', '\n' ], start); + + start.select({ + A: 0, + B: 1, + }, printMatch(p, start)); + + start.otherwise(p.error(3, 'Invalid word')); + + const binary = await build(p, start, 'multi-match'); + await binary.check( + 'A B\t\tA\r\nA', + 'off=1 match=0\noff=3 match=1\noff=6 match=0\noff=9 match=0\n'); + }); + + it('should support numeric-match', async () => { + const start = p.node('start'); + + start.match(32, start); + + start.select({ + A: 0, + B: 1, + }, printMatch(p, start)); + + start.otherwise(p.error(3, 'Invalid word')); + + const binary = await build(p, start, 'multi-match'); + await binary.check( + 'A B A A', + 'off=1 match=0\noff=3 match=1\noff=6 match=0\noff=9 match=0\n'); + }); + + it('should support custom state properties', async () => { + const start = p.node('start'); + const error = p.error(3, 'Invalid word'); + + p.property('i8', 'custom'); + + const second = p.invoke(p.code.load('custom'), { + 0: p.invoke(p.code.match('llparse__print_zero'), { 0: start }, error), + 1: p.invoke(p.code.match('llparse__print_one'), { 0: start }, error), + }, error); + + start + .select({ + 0: 0, + 1: 1, + }, p.invoke(p.code.store('custom'), second)) + .otherwise(error); + + const binary = await build(p, start, 'custom-prop'); + await binary.check('0110', 'off=1 0\noff=2 1\noff=3 1\noff=4 0\n'); + }); + + it('should return error code/reason', async () => { + const start = p.node('start'); + + start.match('a', start); + start.otherwise(p.error(42, 'some reason')); + + const binary = await build(p, start, 'error'); + await binary.check('aab', 'off=2 error code=42 reason="some reason"\n'); + }); + + it('should not merge `.match()` with `.peek()`', async () => { + const maybeCr = p.node('maybeCr'); + const lf = p.node('lf'); + + maybeCr.peek('\n', lf); + maybeCr.match('\r', lf); + maybeCr.otherwise(p.error(1, 'error')); + + lf.match('\n', printOff(p, maybeCr)); + lf.otherwise(p.error(2, 'error')); + + const binary = await build(p, maybeCr, 'no-merge'); + await binary.check('\r\n\n', 'off=2\noff=3\n'); + }); + + describe('`.match()`', () => { + it('should compile to a single-bit table-lookup node', async () => { + const start = p.node('start'); + + start + .match(ALPHA, start) + .skipTo(printOff(p, start)); + + // TODO(indutny): validate compilation result? + const binary = await build(p, start, 'match-bit-check'); + await binary.check('pecan.is.dead.', 'off=6\noff=9\noff=14\n'); + }); + + it('should compile to a multi-bit table-lookup node', async () => { + const start = p.node('start'); + const another = p.node('another'); + + start + .match(ALPHA, start) + .peek(NUM, another) + .skipTo(printOff(p, start)); + + another + .match(NUM, another) + .otherwise(start); + + // TODO(indutny): validate compilation result? + const binary = await build(p, start, 'match-multi-bit-check'); + await binary.check('pecan.135.is.dead.', + 'off=6\noff=10\noff=13\noff=18\n'); + }); + + it('should not overflow on signed char in table-lookup node', async () => { + const start = p.node('start'); + + start + .match(ALPHA, start) + .match([ 0xc3, 0xbc ], start) + .skipTo(printOff(p, start)); + + // TODO(indutny): validate compilation result? + const binary = await build(p, start, 'match-bit-check'); + await binary.check('Düsseldorf.', 'off=12\n'); + }); + + it('should match single quotes and forward slashes', async () => { + const start = p.node('start'); + + start + .match('\'', printOff(p, start)) + .match('\\', printOff(p, start)) + .otherwise(p.error(3, 'Invalid char')); + + // TODO(indutny): validate compilation result? + const binary = await build(p, start, 'escape-char'); + await binary.check('\\\'', 'off=1\noff=2\n'); + }); + + it('should hit SSE4.2 optimization for table-lookup', async () => { + const start = p.node('start'); + + start + .match(ALPHA, start) + .skipTo(printOff(p, start)); + + // TODO(indutny): validate compilation result? + const binary = await build(p, start, 'match-bit-check-sse'); + await binary.check('abcdabcdabcdabcdabcdabcdabcd.abcd.', + 'off=29\noff=34\n'); + }); + + it('should compile overlapping matches', async () => { + const start = p.node('start'); + + start.select({ + aa: 1, + aab: 2, + }, printMatch(p, start)); + + start.otherwise(p.error(3, 'Invalid word')); + + const binary = await build(p, start, 'overlapping-matches'); + await binary.check('aaaabaa', 'off=2 match=1\noff=5 match=2\n'); + }); + }); + + describe('`.peek()`', () => { + it('should not advance position', async () => { + const start = p.node('start'); + const ab = p.node('ab'); + const error = p.error(3, 'Invalid word'); + + start + .peek([ 'a', 'b' ], ab) + .otherwise(error); + + ab + .match([ 'a', 'b' ], printOff(p, start)) + .otherwise(error); + + const binary = await build(p, start, 'peek'); + await binary.check('ab', 'off=1\noff=2\n'); + }); + }); + + describe('`.otherwise()`', () => { + it('should not advance position by default', async () => { + const a = p.node('a'); + const b = p.node('b'); + + a + .match('A', a) + .otherwise(b); + + b + .match('B', printOff(p, b)) + .skipTo(a); + + const binary = await build(p, a, 'otherwise-noadvance'); + await binary.check('AABAB', 'off=3\noff=5\n'); + }); + + it('should advance when it is `.skipTo()`', async () => { + const start = p.node('start'); + + start + .match(' ', printOff(p, start)) + .skipTo(start); + + const binary = await build(p, start, 'otherwise-skip'); + await binary.check('HELLO WORLD', 'off=6\n'); + }); + + it('should skip everything with `.skipTo()`', async () => { + const start = p.node('start'); + + start + .skipTo(start); + + const binary = await build(p, start, 'all-skip'); + await binary.check('HELLO WORLD', '\n'); + }); + }); +}); diff --git a/llparse/test/consume-test.ts b/llparse/test/consume-test.ts new file mode 100644 index 0000000..f9fb383 --- /dev/null +++ b/llparse/test/consume-test.ts @@ -0,0 +1,69 @@ +import * as assert from 'assert'; + +import { LLParse } from '../src/api'; + +import { build, NUM_SELECT, printMatch, printOff } from './fixtures'; + +describe('llparse/consume', () => { + let p: LLParse; + + beforeEach(() => { + p = new LLParse(); + }); + + it('should consume bytes with i8 field', async () => { + p.property('i8', 'to_consume'); + + const start = p.node('start'); + const consume = p.consume('to_consume'); + + start.select(NUM_SELECT, p.invoke(p.code.store('to_consume'), consume)); + + start + .otherwise(p.error(1, 'unexpected')); + + consume + .otherwise(printOff(p, start)); + + const binary = await build(p, start, 'consume'); + await binary.check('3aaa2bb1a01b', 'off=4\noff=7\noff=9\noff=10\noff=12\n'); + }); + + it('should consume bytes with i64 field', async () => { + p.property('i64', 'to_consume'); + + const start = p.node('start'); + const consume = p.consume('to_consume'); + + start.select(NUM_SELECT, p.invoke(p.code.store('to_consume'), consume)); + + start + .otherwise(p.error(1, 'unexpected')); + + consume + .otherwise(printOff(p, start)); + + const binary = await build(p, start, 'consume-i64'); + await binary.check('3aaa2bb1a01b', 'off=4\noff=7\noff=9\noff=10\noff=12\n'); + }); + + it('should consume bytes with untruncated i64 field', async () => { + p.property('i64', 'to_consume'); + + const start = p.node('start'); + const consume = p.consume('to_consume'); + + start + .select( + NUM_SELECT, + p.invoke(p.code.mulAdd('to_consume', { base: 10 }), start) + ) + .skipTo(consume); + + consume + .otherwise(printOff(p, start)); + + const binary = await build(p, start, 'consume-untruncated-i64'); + await binary.check('4294967297.xxxxxxxx', '\n'); + }); +}); diff --git a/llparse/test/fixtures/extra.c b/llparse/test/fixtures/extra.c new file mode 100644 index 0000000..79cdff9 --- /dev/null +++ b/llparse/test/fixtures/extra.c @@ -0,0 +1,84 @@ +#include "fixture.h" + +int llparse__print_zero(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + llparse__print(p, endp, "0"); + return 0; +} + + +int llparse__print_one(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + llparse__print(p, endp, "1"); + return 0; +} + + +int llparse__print_off(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + llparse__print(p, endp, ""); + return 0; +} + + +int llparse__print_match(llparse_t* s, const char* p, const char* endp, + int value) { + if (llparse__in_bench) + return 0; + llparse__print(p, endp, "match=%d", value); + return 0; +} + + +int llparse__on_dot(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + return llparse__print_span("dot", p, endp); +} + + +int llparse__on_dash(llparse_t* s, const char* p, const char* endp) { + if (llparse__in_bench) + return 0; + return llparse__print_span("dash", p, endp); +} + + +int llparse__on_underscore(llparse_t* s, const char* p, + const char* endp) { + if (llparse__in_bench) + return 0; + return llparse__print_span("underscore", p, endp); +} + + +/* A span callback, really */ +int llparse__please_fail(llparse_t* s, const char* p, const char* endp) { + s->reason = "please fail"; + if (llparse__in_bench) + return 1; + return 1; +} + + +/* A span callback, really */ +static int llparse__pause_once_counter; + +int llparse__pause_once(llparse_t* s, const char* p, const char* endp) { + if (!llparse__in_bench) + llparse__print_span("pause", p, endp); + + if (llparse__pause_once_counter != 0) + return 0; + llparse__pause_once_counter = 1; + + return LLPARSE__ERROR_PAUSE; +} + + +int llparse__test_init() { + llparse__pause_once_counter = 0; +} diff --git a/llparse/test/fixtures/index.ts b/llparse/test/fixtures/index.ts new file mode 100644 index 0000000..d8a7336 --- /dev/null +++ b/llparse/test/fixtures/index.ts @@ -0,0 +1,52 @@ +import { source } from 'llparse-frontend'; +import { Fixture, FixtureResult } from 'llparse-test-fixture'; +import * as path from 'path'; + +import { LLParse } from '../../src/api'; + +export { ERROR_PAUSE } from 'llparse-test-fixture'; + +const fixtures = new Fixture({ + buildDir: path.join(__dirname, '..', 'tmp'), + extra: [ + '-msse4.2', + '-DLLPARSE__TEST_INIT=llparse__test_init', + path.join(__dirname, 'extra.c'), + ], +}); + +export function build(llparse: LLParse, node: source.node.Node, outFile: string) + : Promise { + return fixtures.build(llparse.build(node, { + c: { + header: outFile, + }, + }), outFile); +} + +export function printMatch(p: LLParse, next: source.node.Node) + : source.node.Node { + const code = p.code.value('llparse__print_match'); + const res = p.invoke(code, next); + return res; +} + +export function printOff(p: LLParse, next: source.node.Node): source.node.Node { + const code = p.code.match('llparse__print_off'); + return p.invoke(code, next); +} + +export const NUM_SELECT: { readonly [key: string]: number } = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, +}; + +export const NUM: ReadonlyArray = [ + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', +]; + +export const ALPHA: ReadonlyArray = [ + 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', + 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', + 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', + 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', +]; diff --git a/llparse/test/resumption-test.ts b/llparse/test/resumption-test.ts new file mode 100644 index 0000000..438b7fd --- /dev/null +++ b/llparse/test/resumption-test.ts @@ -0,0 +1,55 @@ +import * as assert from 'assert'; + +import { LLParse } from '../src/api'; + +import { build, ERROR_PAUSE, printMatch, printOff } from './fixtures'; + +describe('llparse/resumption', () => { + let p: LLParse; + + beforeEach(() => { + p = new LLParse(); + }); + + it('should resume after span end pause', async () => { + const start = p.node('start'); + const a = p.node('a'); + const span = p.span(p.code.span('llparse__pause_once')); + + start + .peek('a', span.start(a)) + .skipTo(start); + + a + .match('a', a) + .otherwise(span.end(start)); + + const binary = await build(p, start, 'resume-span'); + + await binary.check('baaab', + new RegExp( + '^(' + + 'off=\\d+ pause\\noff=1 len=3 span\\[pause\\]="aaa"' + + '|' + + 'off=1 len=3 span\\[pause\\]="aaa"\noff=4 pause' + + ')\\n$' + , 'g')); + }); + + it('should resume after `pause` node', async () => { + const start = p.node('start'); + const pause = p.pause(ERROR_PAUSE, 'paused'); + + start + .match('p', pause) + .skipTo(start); + + pause + .otherwise(printOff(p, start)); + + const binary = await build(p, start, 'resume-pause'); + + await binary.check('..p....p..', + 'off=3 pause\noff=3\noff=8 pause\noff=8\n'); + }); +}); diff --git a/llparse/test/span-test.ts b/llparse/test/span-test.ts new file mode 100644 index 0000000..b01ad51 --- /dev/null +++ b/llparse/test/span-test.ts @@ -0,0 +1,107 @@ +import * as assert from 'assert'; + +import { LLParse } from '../src/api'; + +import { build, printMatch, printOff } from './fixtures'; + +describe('llparse/spans', () => { + let p: LLParse; + + beforeEach(() => { + p = new LLParse(); + }); + + it('should invoke span callback', async () => { + const start = p.node('start'); + const dot = p.node('dot'); + const dash = p.node('dash'); + const underscore = p.node('underscore'); + + const span = { + dash: p.span(p.code.span('llparse__on_dash')), + dot: p.span(p.code.span('llparse__on_dot')), + underscore: p.span(p.code.span('llparse__on_underscore')), + }; + + start.otherwise(span.dot.start(dot)); + + dot + .match('.', dot) + .peek('-', span.dash.start(dash)) + .peek('_', span.underscore.start(underscore)) + .skipTo(span.dot.end(start)); + + dash + .match('-', dash) + .otherwise(span.dash.end(dot)); + + underscore + .match('_', underscore) + .otherwise(span.underscore.end(dot)); + + const binary = await build(p, start, 'span'); + await binary.check('..--..__..', + 'off=2 len=2 span[dash]="--"\n' + + 'off=6 len=2 span[underscore]="__"\n' + + 'off=0 len=10 span[dot]="..--..__.."\n'); + }); + + it('should return error', async () => { + const start = p.node('start'); + const dot = p.node('dot'); + + const span = { + pleaseFail: p.span(p.code.span('llparse__please_fail')), + }; + + start.otherwise(span.pleaseFail.start(dot)); + + dot + .match('.', dot) + .skipTo(span.pleaseFail.end(start)); + + const binary = await build(p, start, 'span-error'); + + await binary.check( + '....a', + /off=\d+ error code=1 reason="please fail"\n/); + }); + + it('should return error at `executeSpans()`', async () => { + const start = p.node('start'); + const dot = p.node('dot'); + + const span = { + pleaseFail: p.span(p.code.span('llparse__please_fail')), + }; + + start.otherwise(span.pleaseFail.start(dot)); + + dot + .match('.', dot) + .skipTo(span.pleaseFail.end(start)); + + const binary = await build(p, start, 'span-error-execute'); + + await binary.check( + '.........', + /off=9 error code=1 reason="please fail"\n/, { scan: 100 }); + }); + + it('should not invoke spurious span callback', async () => { + const start = p.node('start'); + const dot = p.node('dot'); + const span = p.span(p.code.span('llparse__on_dot')); + + start + .match('hello', span.start(dot)) + .skipTo(start); + + dot + .match('.', dot) + .skipTo(span.end(start)); + + const binary = await build(p, start, 'span-spurious'); + await binary.check('hello', [ '' ]); + }); +}); diff --git a/llparse/test/transform-test.ts b/llparse/test/transform-test.ts new file mode 100644 index 0000000..d30381e --- /dev/null +++ b/llparse/test/transform-test.ts @@ -0,0 +1,41 @@ +import * as assert from 'assert'; + +import { LLParse } from '../src/api'; + +import { build, printMatch, printOff } from './fixtures'; + +describe('llparse/transform', () => { + let p: LLParse; + + beforeEach(() => { + p = new LLParse(); + }); + + it('should apply transformation before the match', async () => { + const start = p.node('start'); + + start + .transform(p.transform.toLowerUnsafe()) + .match('connect', printOff(p, start)) + .match('close', printOff(p, start)) + .otherwise(p.error(1, 'error')); + + const binary = await build(p, start, 'transform-lower'); + await binary.check('connectCLOSEcOnNeCt', 'off=7\noff=12\noff=19\n'); + }); + + it('should apply safe `toLower()` transformation', async () => { + const start = p.node('start'); + + start + .transform(p.transform.toLower()) + .select({ + 'a-b': 1, + 'a\rb': 2, + }, printMatch(p, start)) + .otherwise(p.error(1, 'error')); + + const binary = await build(p, start, 'transform-safe-lower'); + await binary.check('A-ba\rB', 'off=3 match=1\noff=6 match=2\n'); + }); +}); diff --git a/llparse/tsconfig.json b/llparse/tsconfig.json new file mode 100644 index 0000000..01ec7c2 --- /dev/null +++ b/llparse/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "strict": true, + "target": "es2017", + "module": "commonjs", + "moduleResolution": "node", + "outDir": "./lib", + "declaration": true, + "pretty": true, + "sourceMap": true + }, + "include": [ + "src/**/*.ts" + ] +} diff --git a/llparse/tslint.json b/llparse/tslint.json new file mode 100644 index 0000000..24fec09 --- /dev/null +++ b/llparse/tslint.json @@ -0,0 +1,16 @@ +{ + "defaultSeverity": "error", + "extends": [ + "tslint:recommended" + ], + "jsRules": {}, + "rules": { + "no-bitwise": null, + "max-line-length": [true, 80], + "max-classes-per-file": [true, 1, "exclude-class-expressions"], + "quotemark": [ + true, "single", "avoid-escape", "avoid-template" + ] + }, + "rulesDirectory": [] +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..2b64daf --- /dev/null +++ b/package.json @@ -0,0 +1,167 @@ +{ + "name": "undici", + "version": "5.28.2", + "description": "An HTTP/1.1 client, written from scratch for Node.js", + "homepage": "https://undici.nodejs.org", + "bugs": { + "url": "https://github.com/nodejs/undici/issues" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/nodejs/undici.git" + }, + "license": "MIT", + "contributors": [ + { + "name": "Daniele Belardi", + "url": "https://github.com/dnlup", + "author": true + }, + { + "name": "Ethan Arrowood", + "url": "https://github.com/ethan-arrowood", + "author": true + }, + { + "name": "Matteo Collina", + "url": "https://github.com/mcollina", + "author": true + }, + { + "name": "Matthew Aitken", + "url": "https://github.com/KhafraDev", + "author": true + }, + { + "name": "Robert Nagy", + "url": "https://github.com/ronag", + "author": true + }, + { + "name": "Szymon Marczak", + "url": "https://github.com/szmarczak", + "author": true + }, + { + "name": "Tomas Della Vedova", + "url": "https://github.com/delvedor", + "author": true + } + ], + "keywords": [ + "fetch", + "http", + "https", + "promise", + "request", + "curl", + "wget", + "xhr", + "whatwg" + ], + "main": "index.js", + "types": "index.d.ts", + "files": [ + "*.d.ts", + "index.js", + "index-fetch.js", + "lib", + "types", + "docs" + ], + "scripts": { + "build:node": "npx esbuild@0.19.4 index-fetch.js --bundle --platform=node --outfile=undici-fetch.js --define:esbuildDetection=1 --keep-names", + "prebuild:wasm": "node build/wasm.js --prebuild", + "build:wasm": "node build/wasm.js --docker", + "lint": "standard | snazzy", + "lint:fix": "standard --fix | snazzy", + "test": "node scripts/generate-pem && npm run test:tap && npm run test:node-fetch && npm run test:fetch && npm run test:cookies && npm run test:wpt && npm run test:websocket && npm run test:jest && npm run test:typescript", + "test:cookies": "node scripts/verifyVersion 16 || tap test/cookie/*.js", + "test:node-fetch": "node scripts/verifyVersion.js 16 || mocha --exit test/node-fetch", + "test:fetch": "node scripts/verifyVersion.js 16 || (npm run build:node && tap --expose-gc test/fetch/*.js && tap test/webidl/*.js)", + "test:jest": "node scripts/verifyVersion.js 14 || jest", + "test:tap": "tap test/*.js test/diagnostics-channel/*.js", + "test:tdd": "tap test/*.js test/diagnostics-channel/*.js -w", + "test:typescript": "node scripts/verifyVersion.js 14 || tsd && tsc --skipLibCheck test/imports/undici-import.ts", + "test:websocket": "node scripts/verifyVersion.js 18 || tap test/websocket/*.js", + "test:wpt": "node scripts/verifyVersion 18 || (node test/wpt/start-fetch.mjs && node test/wpt/start-FileAPI.mjs && node test/wpt/start-mimesniff.mjs && node test/wpt/start-xhr.mjs && node test/wpt/start-websockets.mjs)", + "coverage": "nyc --reporter=text --reporter=html npm run test", + "coverage:ci": "nyc --reporter=lcov npm run test", + "bench": "PORT=3042 concurrently -k -s first npm:bench:server npm:bench:run", + "bench:server": "node benchmarks/server.js", + "prebench:run": "node benchmarks/wait.js", + "bench:run": "CONNECTIONS=1 node benchmarks/benchmark.js; CONNECTIONS=50 node benchmarks/benchmark.js", + "serve:website": "docsify serve .", + "prepare": "husky install", + "fuzz": "jsfuzz test/fuzzing/fuzz.js corpus" + }, + "devDependencies": { + "@sinonjs/fake-timers": "^11.1.0", + "@types/node": "^18.0.3", + "abort-controller": "^3.0.0", + "atomic-sleep": "^1.0.0", + "chai": "^4.3.4", + "chai-as-promised": "^7.1.1", + "chai-iterator": "^3.0.2", + "chai-string": "^1.5.0", + "concurrently": "^8.0.1", + "cronometro": "^1.0.5", + "delay": "^5.0.0", + "dns-packet": "^5.4.0", + "docsify-cli": "^4.4.3", + "form-data": "^4.0.0", + "formdata-node": "^6.0.3", + "https-pem": "^3.0.0", + "husky": "^8.0.1", + "import-fresh": "^3.3.0", + "jest": "^29.0.2", + "jsdom": "^23.0.0", + "jsfuzz": "^1.0.15", + "mocha": "^10.0.0", + "mockttp": "^3.9.2", + "p-timeout": "^3.2.0", + "pre-commit": "^1.2.2", + "proxy": "^1.0.2", + "proxyquire": "^2.1.3", + "semver": "^7.5.4", + "sinon": "^17.0.1", + "snazzy": "^9.0.0", + "standard": "^17.0.0", + "table": "^6.8.0", + "tap": "^16.1.0", + "tsd": "^0.29.0", + "typescript": "^5.0.2", + "wait-on": "^7.0.1", + "ws": "^8.11.0" + }, + "engines": { + "node": ">=14.0" + }, + "standard": { + "env": [ + "mocha" + ], + "ignore": [ + "lib/llhttp/constants.js", + "lib/llhttp/utils.js", + "test/wpt/tests" + ] + }, + "tsd": { + "directory": "test/types", + "compilerOptions": { + "esModuleInterop": true, + "lib": [ + "esnext" + ] + } + }, + "jest": { + "testMatch": [ + "/test/jest/**" + ] + }, + "dependencies": { + "@fastify/busboy": "^2.0.0" + } +} diff --git a/scripts/generate-pem.js b/scripts/generate-pem.js new file mode 100644 index 0000000..0d7e628 --- /dev/null +++ b/scripts/generate-pem.js @@ -0,0 +1,3 @@ +/* istanbul ignore file */ + +require('https-pem/install') diff --git a/scripts/generate-undici-types-package-json.js b/scripts/generate-undici-types-package-json.js new file mode 100644 index 0000000..78095ae --- /dev/null +++ b/scripts/generate-undici-types-package-json.js @@ -0,0 +1,28 @@ +const fs = require('node:fs') +const path = require('node:path') + +const packageJSONPath = path.join(__dirname, '..', 'package.json') +const packageJSONRaw = fs.readFileSync(packageJSONPath, 'utf-8') +const packageJSON = JSON.parse(packageJSONRaw) + +const licensePath = path.join(__dirname, '..', 'LICENSE') +const licenseRaw = fs.readFileSync(licensePath, 'utf-8') + +const packageTypesJSON = { + name: 'undici-types', + version: packageJSON.version, + description: 'A stand-alone types package for Undici', + homepage: packageJSON.homepage, + bugs: packageJSON.bugs, + repository: packageJSON.repository, + license: packageJSON.license, + types: 'index.d.ts', + files: ['*.d.ts'], + contributors: packageJSON.contributors +} + +const packageTypesPath = path.join(__dirname, '..', 'types', 'package.json') +const licenseTypesPath = path.join(__dirname, '..', 'types', 'LICENSE') + +fs.writeFileSync(packageTypesPath, JSON.stringify(packageTypesJSON, null, 2)) +fs.writeFileSync(licenseTypesPath, licenseRaw) diff --git a/scripts/verifyVersion.js b/scripts/verifyVersion.js new file mode 100644 index 0000000..8ad2d19 --- /dev/null +++ b/scripts/verifyVersion.js @@ -0,0 +1,15 @@ +/* istanbul ignore file */ + +const [major, minor, patch] = process.versions.node.split('.').map(v => Number(v)) +const required = process.argv.pop().split('.').map(v => Number(v)) + +const badMajor = major < required[0] +const badMinor = major === required[0] && minor < required[1] +const badPatch = major === required[0] && minor === required[1] && patch < required[2] + +if (badMajor || badMinor || badPatch) { + console.log(`Required Node.js >=${required.join('.')}, got ${process.versions.node}`) + console.log('Skipping') +} else { + process.exit(1) +} diff --git a/test/abort-controller.js b/test/abort-controller.js new file mode 100644 index 0000000..4658686 --- /dev/null +++ b/test/abort-controller.js @@ -0,0 +1,238 @@ +'use strict' + +const { test } = require('tap') +const { AbortController: NPMAbortController } = require('abort-controller') +const { Client, errors } = require('..') +const { createServer } = require('http') +const { createReadStream } = require('fs') +const { wrapWithAsyncIterable } = require('./utils/async-iterators') + +const controllers = [{ + AbortControllerImpl: NPMAbortController, + controllerName: 'npm-abortcontroller-shim' +}] + +if (global.AbortController) { + controllers.push({ + AbortControllerImpl: global.AbortController, + controllerName: 'native-abortcontroller' + }) +} +for (const { AbortControllerImpl, controllerName } of controllers) { + test(`Abort ${controllerName} before creating request`, (t) => { + t.plan(1) + + const server = createServer((req, res) => { + t.fail() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const abortController = new AbortControllerImpl() + t.teardown(client.destroy.bind(client)) + + abortController.abort() + + client.request({ path: '/', method: 'GET', signal: abortController.signal }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) + + test(`Abort ${controllerName} before sending request (no body)`, (t) => { + t.plan(3) + + let count = 0 + const server = createServer((req, res) => { + if (count === 1) { + t.fail('The second request should never be executed') + } + count += 1 + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const abortController = new AbortControllerImpl() + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.error(err) + const bufs = [] + response.body.on('data', (buf) => { + bufs.push(buf) + }) + response.body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + + client.request({ path: '/', method: 'GET', signal: abortController.signal }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + + abortController.abort() + }) + }) + + test(`Abort ${controllerName} while waiting response (no body)`, (t) => { + t.plan(1) + + const abortController = new AbortControllerImpl() + const server = createServer((req, res) => { + abortController.abort() + res.setHeader('content-type', 'text/plain') + res.end('hello world') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', signal: abortController.signal }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) + + test(`Abort ${controllerName} while waiting response (write headers started) (no body)`, (t) => { + t.plan(1) + + const abortController = new AbortControllerImpl() + const server = createServer((req, res) => { + res.writeHead(200, { 'content-type': 'text/plain' }) + res.flushHeaders() + abortController.abort() + res.end('hello world') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', signal: abortController.signal }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) + + test(`Abort ${controllerName} while waiting response (write headers and write body started) (no body)`, (t) => { + t.plan(2) + + const abortController = new AbortControllerImpl() + const server = createServer((req, res) => { + res.writeHead(200, { 'content-type': 'text/plain' }) + res.write('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', signal: abortController.signal }, (err, response) => { + t.error(err) + response.body.on('data', () => { + abortController.abort() + }) + response.body.on('error', err => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) + }) + + function waitingWithBody (body, type) { // eslint-disable-line + test(`Abort ${controllerName} while waiting response (with body ${type})`, (t) => { + t.plan(1) + + const abortController = new AbortControllerImpl() + const server = createServer((req, res) => { + abortController.abort() + res.setHeader('content-type', 'text/plain') + res.end('hello world') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'POST', body, signal: abortController.signal }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) + } + + waitingWithBody('hello', 'string') + waitingWithBody(createReadStream(__filename), 'stream') + waitingWithBody(new Uint8Array([42]), 'Uint8Array') + waitingWithBody(wrapWithAsyncIterable(createReadStream(__filename)), 'async-iterator') + + function writeHeadersStartedWithBody (body, type) { // eslint-disable-line + test(`Abort ${controllerName} while waiting response (write headers started) (with body ${type})`, (t) => { + t.plan(1) + + const abortController = new AbortControllerImpl() + const server = createServer((req, res) => { + res.writeHead(200, { 'content-type': 'text/plain' }) + res.flushHeaders() + abortController.abort() + res.end('hello world') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'POST', body, signal: abortController.signal }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) + } + + writeHeadersStartedWithBody('hello', 'string') + writeHeadersStartedWithBody(createReadStream(__filename), 'stream') + writeHeadersStartedWithBody(new Uint8Array([42]), 'Uint8Array') + writeHeadersStartedWithBody(wrapWithAsyncIterable(createReadStream(__filename)), 'async-iterator') + + function writeBodyStartedWithBody (body, type) { // eslint-disable-line + test(`Abort ${controllerName} while waiting response (write headers and write body started) (with body ${type})`, (t) => { + t.plan(2) + + const abortController = new AbortControllerImpl() + const server = createServer((req, res) => { + res.writeHead(200, { 'content-type': 'text/plain' }) + res.write('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'POST', body, signal: abortController.signal }, (err, response) => { + t.error(err) + response.body.on('data', () => { + abortController.abort() + }) + response.body.on('error', err => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) + }) + } + + writeBodyStartedWithBody('hello', 'string') + writeBodyStartedWithBody(createReadStream(__filename), 'stream') + writeBodyStartedWithBody(new Uint8Array([42]), 'Uint8Array') + writeBodyStartedWithBody(wrapWithAsyncIterable(createReadStream(__filename), 'async-iterator')) +} diff --git a/test/abort-event-emitter.js b/test/abort-event-emitter.js new file mode 100644 index 0000000..a5397e4 --- /dev/null +++ b/test/abort-event-emitter.js @@ -0,0 +1,259 @@ +'use strict' + +const { test } = require('tap') +const EventEmitter = require('events') +const { Client, errors } = require('..') +const { createServer } = require('http') +const { createReadStream } = require('fs') +const { Readable } = require('stream') +const { wrapWithAsyncIterable } = require('./utils/async-iterators') + +test('Abort before sending request (no body)', (t) => { + t.plan(4) + + let count = 0 + const server = createServer((req, res) => { + if (count === 1) { + t.fail('The second request should never be executed') + } + count += 1 + res.end('hello') + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const ee = new EventEmitter() + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.error(err) + const bufs = [] + response.body.on('data', (buf) => { + bufs.push(buf) + }) + response.body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + + const body = new Readable({ read () { } }) + body.on('error', (err) => { + t.type(err, errors.RequestAbortedError) + }) + client.request({ + path: '/', + method: 'GET', + signal: ee, + body + }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + + ee.emit('abort') + }) +}) + +test('Abort before sending request (no body) async iterator', (t) => { + t.plan(3) + + let count = 0 + const server = createServer((req, res) => { + if (count === 1) { + t.fail('The second request should never be executed') + } + count += 1 + res.end('hello') + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const ee = new EventEmitter() + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.error(err) + const bufs = [] + response.body.on('data', (buf) => { + bufs.push(buf) + }) + response.body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + + const body = wrapWithAsyncIterable(new Readable({ read () { } })) + client.request({ + path: '/', + method: 'GET', + signal: ee, + body + }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + + ee.emit('abort') + }) +}) + +test('Abort while waiting response (no body)', (t) => { + t.plan(1) + + const ee = new EventEmitter() + const server = createServer((req, res) => { + ee.emit('abort') + res.setHeader('content-type', 'text/plain') + res.end('hello world') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', signal: ee }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + }) +}) + +test('Abort while waiting response (write headers started) (no body)', (t) => { + t.plan(1) + + const ee = new EventEmitter() + const server = createServer((req, res) => { + res.writeHead(200, { 'content-type': 'text/plain' }) + res.flushHeaders() + ee.emit('abort') + res.end('hello world') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', signal: ee }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + }) +}) + +test('Abort while waiting response (write headers and write body started) (no body)', (t) => { + t.plan(2) + + const ee = new EventEmitter() + const server = createServer((req, res) => { + res.writeHead(200, { 'content-type': 'text/plain' }) + res.write('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', signal: ee }, (err, response) => { + t.error(err) + response.body.on('data', () => { + ee.emit('abort') + }) + response.body.on('error', err => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) +}) + +function waitingWithBody (body, type) { + test(`Abort while waiting response (with body ${type})`, (t) => { + t.plan(1) + + const ee = new EventEmitter() + const server = createServer((req, res) => { + ee.emit('abort') + res.setHeader('content-type', 'text/plain') + res.end('hello world') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'POST', body, signal: ee }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) +} + +waitingWithBody('hello', 'string') +waitingWithBody(createReadStream(__filename), 'stream') +waitingWithBody(new Uint8Array([42]), 'Uint8Array') +waitingWithBody(wrapWithAsyncIterable(createReadStream(__filename)), 'async-iterator') + +function writeHeadersStartedWithBody (body, type) { + test(`Abort while waiting response (write headers started) (with body ${type})`, (t) => { + t.plan(1) + + const ee = new EventEmitter() + const server = createServer((req, res) => { + res.writeHead(200, { 'content-type': 'text/plain' }) + res.flushHeaders() + ee.emit('abort') + res.end('hello world') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'POST', body, signal: ee }, (err, response) => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) +} + +writeHeadersStartedWithBody('hello', 'string') +writeHeadersStartedWithBody(createReadStream(__filename), 'stream') +writeHeadersStartedWithBody(new Uint8Array([42]), 'Uint8Array') +writeHeadersStartedWithBody(wrapWithAsyncIterable(createReadStream(__filename)), 'async-iterator') + +function writeBodyStartedWithBody (body, type) { + test(`Abort while waiting response (write headers and write body started) (with body ${type})`, (t) => { + t.plan(2) + + const ee = new EventEmitter() + const server = createServer((req, res) => { + res.writeHead(200, { 'content-type': 'text/plain' }) + res.write('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'POST', body, signal: ee }, (err, response) => { + t.error(err) + response.body.on('data', () => { + ee.emit('abort') + }) + response.body.on('error', err => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) + }) +} + +writeBodyStartedWithBody('hello', 'string') +writeBodyStartedWithBody(createReadStream(__filename), 'stream') +writeBodyStartedWithBody(new Uint8Array([42]), 'Uint8Array') +writeBodyStartedWithBody(wrapWithAsyncIterable(createReadStream(__filename)), 'async-iterator') diff --git a/test/agent.js b/test/agent.js new file mode 100644 index 0000000..65afd8b --- /dev/null +++ b/test/agent.js @@ -0,0 +1,782 @@ +'use strict' + +const { test, teardown } = require('tap') +const http = require('http') +const { PassThrough } = require('stream') +const { kRunning } = require('../lib/core/symbols') +const { + Agent, + errors, + request, + stream, + pipeline, + Pool, + setGlobalDispatcher, + getGlobalDispatcher +} = require('../') +const importFresh = require('import-fresh') + +test('setGlobalDispatcher', t => { + t.plan(2) + + t.test('fails if agent does not implement `get` method', t => { + t.plan(1) + t.throws(() => setGlobalDispatcher({ dispatch: 'not a function' }), errors.InvalidArgumentError) + }) + + t.test('sets global agent', t => { + t.plan(2) + t.doesNotThrow(() => setGlobalDispatcher(new Agent())) + t.doesNotThrow(() => setGlobalDispatcher({ dispatch: () => {} })) + }) + + t.teardown(() => { + // reset globalAgent to a fresh Agent instance for later tests + setGlobalDispatcher(new Agent()) + }) +}) + +test('Agent', t => { + t.plan(1) + + t.doesNotThrow(() => new Agent()) +}) + +test('agent should call callback after closing internal pools', t => { + t.plan(2) + + const wanted = 'payload' + + const server = http.createServer((req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end(wanted) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const dispatcher = new Agent() + + const origin = `http://localhost:${server.address().port}` + + request(origin, { dispatcher }) + .then(() => { + t.pass('first request should resolve') + }) + .catch(err => { + t.fail(err) + }) + + dispatcher.once('connect', () => { + dispatcher.close(() => { + request(origin, { dispatcher }) + .then(() => { + t.fail('second request should not resolve') + }) + .catch(err => { + t.type(err, errors.ClientDestroyedError) + }) + }) + }) + }) +}) + +test('agent close throws when callback is not a function', t => { + t.plan(1) + const dispatcher = new Agent() + try { + dispatcher.close({}) + } catch (err) { + t.type(err, errors.InvalidArgumentError) + } +}) + +test('agent should close internal pools', t => { + t.plan(2) + + const wanted = 'payload' + + const server = http.createServer((req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end(wanted) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const dispatcher = new Agent() + + const origin = `http://localhost:${server.address().port}` + + request(origin, { dispatcher }) + .then(() => { + t.pass('first request should resolve') + }) + .catch(err => { + t.fail(err) + }) + + dispatcher.once('connect', () => { + dispatcher.close() + .then(() => request(origin, { dispatcher })) + .then(() => { + t.fail('second request should not resolve') + }) + .catch(err => { + t.type(err, errors.ClientDestroyedError) + }) + }) + }) +}) + +test('agent should destroy internal pools and call callback', t => { + t.plan(2) + + const wanted = 'payload' + + const server = http.createServer((req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end(wanted) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const dispatcher = new Agent() + + const origin = `http://localhost:${server.address().port}` + + request(origin, { dispatcher }) + .then(() => { + t.fail() + }) + .catch(err => { + t.type(err, errors.ClientDestroyedError) + }) + + dispatcher.once('connect', () => { + dispatcher.destroy(() => { + request(origin, { dispatcher }) + .then(() => { + t.fail() + }) + .catch(err => { + t.type(err, errors.ClientDestroyedError) + }) + }) + }) + }) +}) + +test('agent destroy throws when callback is not a function', t => { + t.plan(1) + const dispatcher = new Agent() + try { + dispatcher.destroy(new Error('mock error'), {}) + } catch (err) { + t.type(err, errors.InvalidArgumentError) + } +}) + +test('agent close/destroy callback with error', t => { + t.plan(4) + const dispatcher = new Agent() + t.equal(dispatcher.closed, false) + dispatcher.close() + t.equal(dispatcher.closed, true) + t.equal(dispatcher.destroyed, false) + dispatcher.destroy(new Error('mock error')) + t.equal(dispatcher.destroyed, true) +}) + +test('agent should destroy internal pools', t => { + t.plan(2) + + const wanted = 'payload' + + const server = http.createServer((req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end(wanted) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const dispatcher = new Agent() + + const origin = `http://localhost:${server.address().port}` + + request(origin, { dispatcher }) + .then(() => { + t.fail() + }) + .catch(err => { + t.type(err, errors.ClientDestroyedError) + }) + + dispatcher.once('connect', () => { + dispatcher.destroy() + .then(() => request(origin, { dispatcher })) + .then(() => { + t.fail() + }) + .catch(err => { + t.type(err, errors.ClientDestroyedError) + }) + }) + }) +}) + +test('multiple connections', t => { + const connections = 3 + t.plan(6 * connections) + + const server = http.createServer((req, res) => { + res.writeHead(200, { + Connection: 'keep-alive', + 'Keep-Alive': 'timeout=1s' + }) + res.end('ok') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const origin = `http://localhost:${server.address().port}` + const dispatcher = new Agent({ connections }) + + t.teardown(dispatcher.close.bind(dispatcher)) + + dispatcher.on('connect', (origin, [dispatcher]) => { + t.ok(dispatcher) + }) + dispatcher.on('disconnect', (origin, [dispatcher], error) => { + t.ok(dispatcher) + t.type(error, errors.InformationalError) + t.equal(error.code, 'UND_ERR_INFO') + t.equal(error.message, 'reset') + }) + + for (let i = 0; i < connections; i++) { + await request(origin, { dispatcher }) + .then(() => { + t.pass('should pass') + }) + .catch(err => { + t.fail(err) + }) + } + }) +}) + +test('agent factory supports URL parameter', (t) => { + t.plan(2) + + const noopHandler = { + onConnect () {}, + onHeaders () {}, + onData () {}, + onComplete () { + server.close() + }, + onError (err) { + throw err + } + } + + const dispatcher = new Agent({ + factory: (origin, opts) => { + t.ok(origin instanceof URL) + return new Pool(origin, opts) + } + }) + + const server = http.createServer((req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end('asd') + }) + + server.listen(0, () => { + t.doesNotThrow(() => dispatcher.dispatch({ + origin: new URL(`http://localhost:${server.address().port}`), + path: '/', + method: 'GET' + }, noopHandler)) + }) +}) + +test('agent factory supports string parameter', (t) => { + t.plan(2) + + const noopHandler = { + onConnect () {}, + onHeaders () {}, + onData () {}, + onComplete () { + server.close() + }, + onError (err) { + throw err + } + } + + const dispatcher = new Agent({ + factory: (origin, opts) => { + t.ok(typeof origin === 'string') + return new Pool(origin, opts) + } + }) + + const server = http.createServer((req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end('asd') + }) + + server.listen(0, () => { + t.doesNotThrow(() => dispatcher.dispatch({ + origin: `http://localhost:${server.address().port}`, + path: '/', + method: 'GET' + }, noopHandler)) + }) +}) + +test('with globalAgent', t => { + t.plan(6) + const wanted = 'payload' + + const server = http.createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + res.setHeader('Content-Type', 'text/plain') + res.end(wanted) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + request(`http://localhost:${server.address().port}`) + .then(({ statusCode, headers, body }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal(wanted, Buffer.concat(bufs).toString('utf8')) + }) + }) + .catch(err => { + t.fail(err) + }) + }) +}) + +test('with local agent', t => { + t.plan(6) + const wanted = 'payload' + + const server = http.createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + res.setHeader('Content-Type', 'text/plain') + res.end(wanted) + }) + + t.teardown(server.close.bind(server)) + + const dispatcher = new Agent({ + connect: { + servername: 'agent1' + } + }) + + server.listen(0, () => { + request(`http://localhost:${server.address().port}`, { dispatcher }) + .then(({ statusCode, headers, body }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal(wanted, Buffer.concat(bufs).toString('utf8')) + }) + }) + .catch(err => { + t.fail(err) + }) + }) +}) + +test('fails with invalid args', t => { + t.throws(() => request(), errors.InvalidArgumentError, 'throws on missing url argument') + t.throws(() => request(''), errors.InvalidArgumentError, 'throws on invalid url') + t.throws(() => request({}), errors.InvalidArgumentError, 'throws on missing url.origin argument') + t.throws(() => request({ origin: '' }), errors.InvalidArgumentError, 'throws on invalid url.origin argument') + t.throws(() => request('https://example.com', { path: 0 }), errors.InvalidArgumentError, 'throws on opts.path argument') + t.throws(() => request('https://example.com', { agent: new Agent() }), errors.InvalidArgumentError, 'throws on opts.path argument') + t.throws(() => request('https://example.com', 'asd'), errors.InvalidArgumentError, 'throws on non object opts argument') + t.end() +}) + +test('with globalAgent', t => { + t.plan(6) + const wanted = 'payload' + + const server = http.createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + res.setHeader('Content-Type', 'text/plain') + res.end(wanted) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + stream( + `http://localhost:${server.address().port}`, + { + opaque: new PassThrough() + }, + ({ statusCode, headers, opaque: pt }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + pt.on('data', (buf) => { + bufs.push(buf) + }) + pt.on('end', () => { + t.equal(wanted, Buffer.concat(bufs).toString('utf8')) + }) + pt.on('error', () => { + t.fail() + }) + return pt + } + ) + }) +}) + +test('with a local agent', t => { + t.plan(9) + const wanted = 'payload' + + const server = http.createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + res.setHeader('Content-Type', 'text/plain') + res.end(wanted) + }) + + t.teardown(server.close.bind(server)) + + const dispatcher = new Agent() + + dispatcher.on('connect', (origin, [dispatcher]) => { + t.ok(dispatcher) + t.equal(dispatcher[kRunning], 0) + process.nextTick(() => { + t.equal(dispatcher[kRunning], 1) + }) + }) + + server.listen(0, () => { + stream( + `http://localhost:${server.address().port}`, + { + dispatcher, + opaque: new PassThrough() + }, + ({ statusCode, headers, opaque: pt }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + pt.on('data', (buf) => { + bufs.push(buf) + }) + pt.on('end', () => { + t.equal(wanted, Buffer.concat(bufs).toString('utf8')) + }) + pt.on('error', () => { + t.fail() + }) + return pt + } + ) + }) +}) + +test('stream: fails with invalid URL', t => { + t.plan(4) + t.throws(() => stream(), errors.InvalidArgumentError, 'throws on missing url argument') + t.throws(() => stream(''), errors.InvalidArgumentError, 'throws on invalid url') + t.throws(() => stream({}), errors.InvalidArgumentError, 'throws on missing url.origin argument') + t.throws(() => stream({ origin: '' }), errors.InvalidArgumentError, 'throws on invalid url.origin argument') +}) + +test('with globalAgent', t => { + t.plan(6) + const wanted = 'payload' + + const server = http.createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + res.setHeader('Content-Type', 'text/plain') + res.end(wanted) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const bufs = [] + + pipeline( + `http://localhost:${server.address().port}`, + {}, + ({ statusCode, headers, body }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + return body + } + ) + .end() + .on('data', buf => { + bufs.push(buf) + }) + .on('end', () => { + t.equal(wanted, Buffer.concat(bufs).toString('utf8')) + }) + .on('error', () => { + t.fail() + }) + }) +}) + +test('with a local agent', t => { + t.plan(6) + const wanted = 'payload' + + const server = http.createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + res.setHeader('Content-Type', 'text/plain') + res.end(wanted) + }) + + t.teardown(server.close.bind(server)) + + const dispatcher = new Agent() + + server.listen(0, () => { + const bufs = [] + + pipeline( + `http://localhost:${server.address().port}`, + { dispatcher }, + ({ statusCode, headers, body }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + return body + } + ) + .end() + .on('data', buf => { + bufs.push(buf) + }) + .on('end', () => { + t.equal(wanted, Buffer.concat(bufs).toString('utf8')) + }) + .on('error', () => { + t.fail() + }) + }) +}) + +test('pipeline: fails with invalid URL', t => { + t.plan(4) + t.throws(() => pipeline(), errors.InvalidArgumentError, 'throws on missing url argument') + t.throws(() => pipeline(''), errors.InvalidArgumentError, 'throws on invalid url') + t.throws(() => pipeline({}), errors.InvalidArgumentError, 'throws on missing url.origin argument') + t.throws(() => pipeline({ origin: '' }), errors.InvalidArgumentError, 'throws on invalid url.origin argument') +}) + +test('pipeline: fails with invalid onInfo', (t) => { + t.plan(2) + pipeline({ origin: 'http://localhost', path: '/', onInfo: 'foo' }, () => {}).on('error', (err) => { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid onInfo callback') + }) +}) + +test('request: fails with invalid onInfo', async (t) => { + try { + await request({ origin: 'http://localhost', path: '/', onInfo: 'foo' }) + t.fail('should throw') + } catch (e) { + t.ok(e) + t.equal(e.message, 'invalid onInfo callback') + } + t.end() +}) + +test('stream: fails with invalid onInfo', async (t) => { + try { + await stream({ origin: 'http://localhost', path: '/', onInfo: 'foo' }, () => new PassThrough()) + t.fail('should throw') + } catch (e) { + t.ok(e) + t.equal(e.message, 'invalid onInfo callback') + } + t.end() +}) + +test('constructor validations', t => { + t.plan(4) + t.throws(() => new Agent({ factory: 'ASD' }), errors.InvalidArgumentError, 'throws on invalid opts argument') + t.throws(() => new Agent({ maxRedirections: 'ASD' }), errors.InvalidArgumentError, 'throws on invalid opts argument') + t.throws(() => new Agent({ maxRedirections: -1 }), errors.InvalidArgumentError, 'throws on invalid opts argument') + t.throws(() => new Agent({ maxRedirections: null }), errors.InvalidArgumentError, 'throws on invalid opts argument') +}) + +test('dispatch validations', t => { + const dispatcher = new Agent() + + const noopHandler = { + onConnect () {}, + onHeaders () {}, + onData () {}, + onComplete () { + server.close() + }, + onError (err) { + throw err + } + } + + const server = http.createServer((req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end('asd') + }) + + t.plan(6) + t.throws(() => dispatcher.dispatch('ASD'), errors.InvalidArgumentError, 'throws on missing handler') + t.throws(() => dispatcher.dispatch('ASD', noopHandler), errors.InvalidArgumentError, 'throws on invalid opts argument type') + t.throws(() => dispatcher.dispatch({}, noopHandler), errors.InvalidArgumentError, 'throws on invalid opts.origin argument') + t.throws(() => dispatcher.dispatch({ origin: '' }, noopHandler), errors.InvalidArgumentError, 'throws on invalid opts.origin argument') + t.throws(() => dispatcher.dispatch({}, {}), errors.InvalidArgumentError, 'throws on invalid handler.onError') + + server.listen(0, () => { + t.doesNotThrow(() => dispatcher.dispatch({ + origin: new URL(`http://localhost:${server.address().port}`), + path: '/', + method: 'GET' + }, noopHandler)) + }) +}) + +test('drain', t => { + t.plan(2) + + const dispatcher = new Agent({ + connections: 1, + pipelining: 1 + }) + + dispatcher.on('drain', () => { + t.pass() + }) + + class Handler { + onConnect () {} + onHeaders () {} + onData () {} + onComplete () {} + onError () { + t.fail() + } + } + + const server = http.createServer((req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end('asd') + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + t.equal(dispatcher.dispatch({ + origin: `http://localhost:${server.address().port}`, + method: 'GET', + path: '/' + }, new Handler()), false) + }) +}) + +test('global api', t => { + t.plan(6 * 2) + + const server = http.createServer((req, res) => { + if (req.url === '/bar') { + t.equal(req.method, 'PUT') + t.equal(req.url, '/bar') + } else { + t.equal(req.method, 'GET') + t.equal(req.url, '/foo') + } + req.pipe(res) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const origin = `http://localhost:${server.address().port}` + await request(origin, { path: '/foo' }) + await request(`${origin}/foo`) + await request({ origin, path: '/foo' }) + await stream({ origin, path: '/foo' }, () => new PassThrough()) + await request({ protocol: 'http:', hostname: 'localhost', port: server.address().port, path: '/foo' }) + await request(`${origin}/bar`, { body: 'asd' }) + }) +}) + +test('global api throws', t => { + const origin = 'http://asd' + t.throws(() => request(`${origin}/foo`, { path: '/foo' }), errors.InvalidArgumentError) + t.throws(() => request({ origin, path: 0 }, { path: '/foo' }), errors.InvalidArgumentError) + t.throws(() => request({ origin, pathname: 0 }, { path: '/foo' }), errors.InvalidArgumentError) + t.throws(() => request({ origin: 0 }, { path: '/foo' }), errors.InvalidArgumentError) + t.throws(() => request(0), errors.InvalidArgumentError) + t.throws(() => request(1), errors.InvalidArgumentError) + t.end() +}) + +test('unreachable request rejects and can be caught', t => { + t.plan(1) + + request('https://thisis.not/avalid/url').catch(() => { + t.pass() + }) +}) + +test('connect is not valid', t => { + t.plan(1) + + t.throws(() => new Agent({ connect: false }), errors.InvalidArgumentError, 'connect must be a function or an object') +}) + +test('the dispatcher is truly global', t => { + const agent = getGlobalDispatcher() + const undiciFresh = importFresh('../index.js') + t.equal(agent, undiciFresh.getGlobalDispatcher()) + t.end() +}) + +teardown(() => process.exit()) diff --git a/test/async_hooks.js b/test/async_hooks.js new file mode 100644 index 0000000..2e8533d --- /dev/null +++ b/test/async_hooks.js @@ -0,0 +1,206 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const { createHook, executionAsyncId } = require('async_hooks') +const { readFile } = require('fs') +const { PassThrough } = require('stream') + +const transactions = new Map() + +function getCurrentTransaction () { + const asyncId = executionAsyncId() + return transactions.has(asyncId) ? transactions.get(asyncId) : null +} + +function setCurrentTransaction (trans) { + const asyncId = executionAsyncId() + transactions.set(asyncId, trans) +} + +const hook = createHook({ + init (asyncId, type, triggerAsyncId, resource) { + if (type === 'TIMERWRAP') return + // process._rawDebug(type + ' ' + asyncId) + transactions.set(asyncId, getCurrentTransaction()) + }, + destroy (asyncId) { + transactions.delete(asyncId) + } +}) + +hook.enable() + +test('async hooks', (t) => { + t.plan(31) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + readFile(__filename, (err, buf) => { + t.error(err) + const buf1 = buf.slice(0, buf.length / 2) + const buf2 = buf.slice(buf.length / 2) + // we split the file so that it's received in 2 chunks + // and it should restore the state on the second + res.write(buf1) + setTimeout(() => { + res.end(buf2) + }, 10) + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + body.resume() + t.strictSame(getCurrentTransaction(), null) + + setCurrentTransaction({ hello: 'world2' }) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.strictSame(getCurrentTransaction(), { hello: 'world2' }) + + body.once('data', () => { + t.pass() + body.resume() + }) + + body.on('end', () => { + t.pass() + }) + }) + }) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + body.resume() + t.strictSame(getCurrentTransaction(), null) + + setCurrentTransaction({ hello: 'world' }) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.strictSame(getCurrentTransaction(), { hello: 'world' }) + + body.once('data', () => { + t.pass() + body.resume() + }) + + body.on('end', () => { + t.pass() + }) + }) + }) + + client.request({ path: '/', method: 'HEAD' }, (err, { statusCode, headers, body }) => { + t.error(err) + body.resume() + t.strictSame(getCurrentTransaction(), null) + + setCurrentTransaction({ hello: 'world' }) + + client.request({ path: '/', method: 'HEAD' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.strictSame(getCurrentTransaction(), { hello: 'world' }) + + body.once('data', () => { + t.pass() + body.resume() + }) + + body.on('end', () => { + t.pass() + }) + }) + }) + + client.stream({ path: '/', method: 'GET' }, () => { + t.strictSame(getCurrentTransaction(), null) + return new PassThrough().resume() + }, (err) => { + t.error(err) + t.strictSame(getCurrentTransaction(), null) + + setCurrentTransaction({ hello: 'world' }) + + client.stream({ path: '/', method: 'GET' }, () => { + t.strictSame(getCurrentTransaction(), { hello: 'world' }) + return new PassThrough().resume() + }, (err) => { + t.error(err) + t.strictSame(getCurrentTransaction(), { hello: 'world' }) + }) + }) + }) +}) + +test('async hooks client is destroyed', (t) => { + t.plan(7) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + readFile(__filename, (err, buf) => { + t.error(err) + res.write('asd') + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', throwOnError: true }, (err, { body }) => { + t.error(err) + body.resume() + body.on('error', (err) => { + t.ok(err) + }) + t.strictSame(getCurrentTransaction(), null) + + setCurrentTransaction({ hello: 'world2' }) + + client.request({ path: '/', method: 'GET' }, (err) => { + t.equal(err.message, 'The client is destroyed') + t.strictSame(getCurrentTransaction(), { hello: 'world2' }) + }) + client.destroy((err) => { + t.error(err) + }) + }) + }) +}) + +test('async hooks pipeline handler', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + setCurrentTransaction({ hello: 'world2' }) + + client + .pipeline({ path: '/', method: 'GET' }, ({ body }) => { + t.strictSame(getCurrentTransaction(), { hello: 'world2' }) + return body + }) + .on('close', () => { + t.pass() + }) + .resume() + .end() + }) +}) diff --git a/test/autoselectfamily.js b/test/autoselectfamily.js new file mode 100644 index 0000000..0b44a3e --- /dev/null +++ b/test/autoselectfamily.js @@ -0,0 +1,198 @@ +'use strict' + +const { test, skip } = require('tap') +const dgram = require('dgram') +const { Resolver } = require('dns') +const dnsPacket = require('dns-packet') +const { createServer } = require('http') +const { Client, Agent, request } = require('..') +const { nodeHasAutoSelectFamily } = require('../lib/core/util') + +/* + * IMPORTANT + * + * As only some version of Node have autoSelectFamily enabled by default (>= 20), make sure the option is always + * explicitly passed in tests in this file to avoid compatibility problems across release lines. + * + */ + +if (!nodeHasAutoSelectFamily) { + skip('autoSelectFamily is not supportee') + process.exit() +} + +function _lookup (resolver, hostname, options, cb) { + resolver.resolve(hostname, 'ANY', (err, replies) => { + if (err) { + return cb(err) + } + + const hosts = replies + .map((r) => ({ address: r.address, family: r.type === 'AAAA' ? 6 : 4 })) + .sort((a, b) => b.family - a.family) + + if (options.all === true) { + return cb(null, hosts) + } + + return cb(null, hosts[0].address, hosts[0].family) + }) +} + +function createDnsServer (ipv6Addr, ipv4Addr, cb) { + // Create a DNS server which replies with a AAAA and a A record for the same host + const socket = dgram.createSocket('udp4') + + socket.on('message', (msg, { address, port }) => { + const parsed = dnsPacket.decode(msg) + + const response = dnsPacket.encode({ + type: 'answer', + id: parsed.id, + questions: parsed.questions, + answers: [ + { type: 'AAAA', class: 'IN', name: 'example.org', data: '::1', ttl: 123 }, + { type: 'A', class: 'IN', name: 'example.org', data: '127.0.0.1', ttl: 123 } + ] + }) + + socket.send(response, port, address) + }) + + socket.bind(0, () => { + const resolver = new Resolver() + resolver.setServers([`127.0.0.1:${socket.address().port}`]) + + cb(null, { dnsServer: socket, lookup: _lookup.bind(null, resolver) }) + }) +} + +test('with autoSelectFamily enable the request succeeds when using request', (t) => { + t.plan(3) + + createDnsServer('::1', '127.0.0.1', function (_, { dnsServer, lookup }) { + const server = createServer((req, res) => { + res.end('hello') + }) + + t.teardown(() => { + server.close() + dnsServer.close() + }) + + server.listen(0, '127.0.0.1', () => { + const agent = new Agent({ connect: { lookup }, autoSelectFamily: true }) + + request( + `http://example.org:${server.address().port}/`, { + method: 'GET', + dispatcher: agent + }, (err, { statusCode, body }) => { + t.error(err) + + let response = Buffer.alloc(0) + + body.on('data', chunk => { + response = Buffer.concat([response, chunk]) + }) + + body.on('end', () => { + t.strictSame(statusCode, 200) + t.strictSame(response.toString('utf-8'), 'hello') + }) + }) + }) + }) +}) + +test('with autoSelectFamily enable the request succeeds when using a client', (t) => { + t.plan(3) + + createDnsServer('::1', '127.0.0.1', function (_, { dnsServer, lookup }) { + const server = createServer((req, res) => { + res.end('hello') + }) + + t.teardown(() => { + server.close() + dnsServer.close() + }) + + server.listen(0, '127.0.0.1', () => { + const client = new Client(`http://example.org:${server.address().port}`, { connect: { lookup }, autoSelectFamily: true }) + + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { statusCode, body }) => { + t.error(err) + + let response = Buffer.alloc(0) + + body.on('data', chunk => { + response = Buffer.concat([response, chunk]) + }) + + body.on('end', () => { + t.strictSame(statusCode, 200) + t.strictSame(response.toString('utf-8'), 'hello') + }) + }) + }) + }) +}) + +test('with autoSelectFamily disabled the request fails when using request', (t) => { + t.plan(1) + + createDnsServer('::1', '127.0.0.1', function (_, { dnsServer, lookup }) { + const server = createServer((req, res) => { + res.end('hello') + }) + + t.teardown(() => { + server.close() + dnsServer.close() + }) + + server.listen(0, '127.0.0.1', () => { + const agent = new Agent({ connect: { lookup, autoSelectFamily: false } }) + + request(`http://example.org:${server.address().port}`, { + method: 'GET', + dispatcher: agent + }, (err, { statusCode, body }) => { + t.ok(['ECONNREFUSED', 'EAFNOSUPPORT'].includes(err.code)) + }) + }) + }) +}) + +test('with autoSelectFamily disabled the request fails when using a client', (t) => { + t.plan(1) + + createDnsServer('::1', '127.0.0.1', function (_, { dnsServer, lookup }) { + const server = createServer((req, res) => { + res.end('hello') + }) + + t.teardown(() => { + server.close() + dnsServer.close() + }) + + server.listen(0, '127.0.0.1', () => { + const client = new Client(`http://example.org:${server.address().port}`, { connect: { lookup, autoSelectFamily: false } }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { statusCode, body }) => { + t.ok(['ECONNREFUSED', 'EAFNOSUPPORT'].includes(err.code)) + }) + }) + }) +}) diff --git a/test/balanced-pool.js b/test/balanced-pool.js new file mode 100644 index 0000000..d20f926 --- /dev/null +++ b/test/balanced-pool.js @@ -0,0 +1,566 @@ +'use strict' + +const { test } = require('tap') +const { BalancedPool, Pool, Client, errors } = require('..') +const { nodeMajor } = require('../lib/core/util') +const { createServer } = require('http') +const { promisify } = require('util') + +test('throws when factory is not a function', (t) => { + t.plan(2) + + try { + new BalancedPool(null, { factory: '' }) // eslint-disable-line + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'factory must be a function.') + } +}) + +test('add/remove upstreams', (t) => { + t.plan(7) + + const upstream01 = 'http://localhost:1' + const upstream02 = 'http://localhost:2' + + const pool = new BalancedPool() + t.same(pool.upstreams, []) + + // try to remove non-existent upstream + pool.removeUpstream(upstream01) + t.same(pool.upstreams, []) + + pool.addUpstream(upstream01) + t.same(pool.upstreams, [upstream01]) + + // try to add the same upstream + pool.addUpstream(upstream01) + t.same(pool.upstreams, [upstream01]) + + pool.addUpstream(upstream02) + t.same(pool.upstreams, [upstream01, upstream02]) + + pool.removeUpstream(upstream02) + t.same(pool.upstreams, [upstream01]) + + pool.removeUpstream(upstream01) + t.same(pool.upstreams, []) +}) + +test('basic get', async (t) => { + t.plan(16) + + let server1Called = 0 + const server1 = createServer((req, res) => { + server1Called++ + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server1.close.bind(server1)) + + await promisify(server1.listen).call(server1, 0) + + let server2Called = 0 + const server2 = createServer((req, res) => { + server2Called++ + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server2.close.bind(server2)) + + await promisify(server2.listen).call(server2, 0) + + const client = new BalancedPool() + client.addUpstream(`http://localhost:${server1.address().port}`) + client.addUpstream(`http://localhost:${server2.address().port}`) + t.teardown(client.destroy.bind(client)) + + { + const { statusCode, headers, body } = await client.request({ path: '/', method: 'GET' }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + t.equal('hello', await body.text()) + } + + { + const { statusCode, headers, body } = await client.request({ path: '/', method: 'GET' }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + t.equal('hello', await body.text()) + } + + t.equal(server1Called, 1) + t.equal(server2Called, 1) + + t.equal(client.destroyed, false) + t.equal(client.closed, false) + await client.close() + t.equal(client.destroyed, true) + t.equal(client.closed, true) +}) + +test('connect/disconnect event(s)', (t) => { + const clients = 2 + + t.plan(clients * 5) + + const server = createServer((req, res) => { + res.writeHead(200, { + Connection: 'keep-alive', + 'Keep-Alive': 'timeout=1s' + }) + res.end('ok') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const pool = new BalancedPool(`http://localhost:${server.address().port}`, { + connections: clients, + keepAliveTimeoutThreshold: 100 + }) + t.teardown(pool.close.bind(pool)) + + pool.on('connect', (origin, [pool, pool2, client]) => { + t.equal(client instanceof Client, true) + }) + pool.on('disconnect', (origin, [pool, pool2, client], error) => { + t.ok(client instanceof Client) + t.type(error, errors.InformationalError) + t.equal(error.code, 'UND_ERR_INFO') + }) + + for (let i = 0; i < clients; i++) { + pool.request({ + path: '/', + method: 'GET' + }, (err, { headers, body }) => { + t.error(err) + body.resume() + }) + } + }) +}) + +test('busy', (t) => { + t.plan(8 * 6 + 2 + 1) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new BalancedPool(`http://localhost:${server.address().port}`, { + connections: 2, + pipelining: 2 + }) + client.on('drain', () => { + t.pass() + }) + client.on('connect', () => { + t.pass() + }) + t.teardown(client.destroy.bind(client)) + + for (let n = 1; n <= 8; ++n) { + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + } + }) +}) + +test('factory option with basic get request', async (t) => { + t.plan(12) + + let factoryCalled = 0 + const opts = { + factory: (origin, opts) => { + factoryCalled++ + return new Pool(origin, opts) + } + } + + const client = new BalancedPool([], opts) // eslint-disable-line + + let serverCalled = 0 + const server = createServer((req, res) => { + serverCalled++ + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen).call(server, 0) + + client.addUpstream(`http://localhost:${server.address().port}`) + + t.same(client.upstreams, [`http://localhost:${server.address().port}`]) + + t.teardown(client.destroy.bind(client)) + + { + const { statusCode, headers, body } = await client.request({ path: '/', method: 'GET' }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + t.equal('hello', await body.text()) + } + + t.equal(serverCalled, 1) + t.equal(factoryCalled, 1) + + t.equal(client.destroyed, false) + t.equal(client.closed, false) + await client.close() + t.equal(client.destroyed, true) + t.equal(client.closed, true) +}) + +test('throws when upstream is missing', async (t) => { + t.plan(2) + + const pool = new BalancedPool() + + try { + await pool.request({ path: '/', method: 'GET' }) + } catch (e) { + t.type(e, errors.BalancedPoolMissingUpstreamError) + t.equal(e.message, 'No upstream has been added to the BalancedPool') + } +}) + +class TestServer { + constructor ({ config: { server, socketHangup, downOnRequests, socketHangupOnRequests }, onRequest }) { + this.config = { + downOnRequests: downOnRequests || [], + socketHangupOnRequests: socketHangupOnRequests || [], + socketHangup + } + this.name = server + // start a server listening to any port available on the host + this.port = 0 + this.iteration = 0 + this.requestsCount = 0 + this.onRequest = onRequest + this.server = null + } + + _shouldHangupOnClient () { + if (this.config.socketHangup) { + return true + } + if (this.config.socketHangupOnRequests.includes(this.requestsCount)) { + return true + } + + return false + } + + _shouldStopServer () { + if (this.config.upstreamDown === true || this.config.downOnRequests.includes(this.requestsCount)) { + return true + } + return false + } + + async prepareForIteration (iteration) { + // set current iteration + this.iteration = iteration + + if (this._shouldStopServer()) { + await this.stop() + } else if (!this.isRunning()) { + await this.start() + } + } + + start () { + this.server = createServer((req, res) => { + if (this._shouldHangupOnClient()) { + req.destroy(new Error('(ツ)')) + return + } + this.requestsCount++ + res.end('server is running!') + + this.onRequest(this) + }).listen(this.port) + + this.server.keepAliveTimeout = 2000 + + return new Promise((resolve) => { + this.server.on('listening', () => { + // store the used port to use it again if the server was stopped as part of test and then started again + this.port = this.server.address().port + + return resolve() + }) + }) + } + + isRunning () { + return !!this.server.address() + } + + stop () { + if (!this.isRunning()) { + return + } + + return new Promise(resolve => { + this.server.close(() => resolve()) + }) + } +} + +const cases = [ + + // 0 + + { + iterations: 100, + maxWeightPerServer: 100, + errorPenalty: 7, + config: [{ server: 'A' }, { server: 'B' }, { server: 'C' }], + expected: ['A', 'B', 'C', 'A', 'B', 'C', 'A', 'B', 'C', 'A', 'B', 'C'], + expectedConnectionRefusedErrors: 0, + expectedSocketErrors: 0, + expectedRatios: [0.34, 0.33, 0.33] + }, + + // 1 + + { + iterations: 100, + maxWeightPerServer: 100, + errorPenalty: 15, + config: [{ server: 'A', downOnRequests: [0] }, { server: 'B' }, { server: 'C' }], + expected: ['A/connectionRefused', 'B', 'C', 'B', 'C', 'B', 'C', 'A', 'B', 'C', 'A'], + expectedConnectionRefusedErrors: 1, + expectedSocketErrors: 0, + expectedRatios: [0.32, 0.34, 0.34] + }, + + // 2 + + { + iterations: 100, + maxWeightPerServer: 100, + errorPenalty: 15, + config: [{ server: 'A' }, { server: 'B', downOnRequests: [0] }, { server: 'C' }], + expected: ['A', 'B/connectionRefused', 'C', 'A', 'C', 'A', 'C', 'A', 'B', 'C'], + expectedConnectionRefusedErrors: 1, + expectedSocketErrors: 0, + expectedRatios: [0.34, 0.32, 0.34] + }, + + // 3 + + { + iterations: 100, + maxWeightPerServer: 100, + errorPenalty: 15, + config: [{ server: 'A' }, { server: 'B', downOnRequests: [0] }, { server: 'C', downOnRequests: [0] }], + expected: ['A', 'B/connectionRefused', 'C/connectionRefused', 'A', 'A', 'A', 'B', 'C'], + expectedConnectionRefusedErrors: 2, + expectedSocketErrors: 0, + expectedRatios: [0.35, 0.33, 0.32] + }, + + // 4 + + { + iterations: 100, + maxWeightPerServer: 100, + errorPenalty: 15, + config: [{ server: 'A', downOnRequests: [0] }, { server: 'B', downOnRequests: [0] }, { server: 'C', downOnRequests: [0] }], + expected: ['A/connectionRefused', 'B/connectionRefused', 'C/connectionRefused', 'A', 'B', 'C', 'A', 'B', 'C'], + expectedConnectionRefusedErrors: 3, + expectedSocketErrors: 0, + expectedRatios: [0.34, 0.33, 0.33] + }, + + // 5 + + { + iterations: 100, + maxWeightPerServer: 100, + errorPenalty: 15, + config: [{ server: 'A', downOnRequests: [0, 1, 2] }, { server: 'B', downOnRequests: [0, 1, 2] }, { server: 'C', downOnRequests: [0, 1, 2] }], + expected: ['A/connectionRefused', 'B/connectionRefused', 'C/connectionRefused', 'A/connectionRefused', 'B/connectionRefused', 'C/connectionRefused', 'A/connectionRefused', 'B/connectionRefused', 'C/connectionRefused', 'A', 'B', 'C', 'A', 'B', 'C'], + expectedConnectionRefusedErrors: 9, + expectedSocketErrors: 0, + expectedRatios: [0.34, 0.33, 0.33] + }, + + // 6 + + { + iterations: 100, + maxWeightPerServer: 100, + errorPenalty: 15, + config: [{ server: 'A', downOnRequests: [0] }, { server: 'B', downOnRequests: [0, 1] }, { server: 'C', downOnRequests: [0] }], + expected: ['A/connectionRefused', 'B/connectionRefused', 'C/connectionRefused', 'A', 'B/connectionRefused', 'C', 'A', 'B', 'C', 'A', 'B', 'C', 'A', 'C', 'A', 'C', 'A', 'C', 'A', 'B'], + expectedConnectionRefusedErrors: 4, + expectedSocketErrors: 0, + expectedRatios: [0.36, 0.29, 0.35] + }, + + // 7 + + { + iterations: 100, + maxWeightPerServer: 100, + errorPenalty: 15, + config: [{ server: 'A' }, { server: 'B' }, { server: 'C', downOnRequests: [1] }], + expected: ['A', 'B', 'C', 'A', 'B', 'C/connectionRefused', 'A', 'B', 'A', 'B', 'A', 'B', 'C', 'A', 'B', 'C'], + expectedConnectionRefusedErrors: 1, + expectedSocketErrors: 0, + expectedRatios: [0.34, 0.34, 0.32], + + // Skip because the behavior of Node.js has changed + skip: nodeMajor >= 19 + }, + + // 8 + + { + iterations: 100, + maxWeightPerServer: 100, + errorPenalty: 15, + config: [{ server: 'A', socketHangupOnRequests: [1] }, { server: 'B' }, { server: 'C' }], + expected: ['A', 'B', 'C', 'A/socketError', 'B', 'C', 'B', 'C', 'B', 'C', 'A'], + expectedConnectionRefusedErrors: 0, + expectedSocketErrors: 1, + expectedRatios: [0.32, 0.34, 0.34] + }, + + // 9 + + { + iterations: 100, + maxWeightPerServer: 100, + errorPenalty: 7, + config: [{ server: 'A' }, { server: 'B' }, { server: 'C' }, { server: 'D' }, { server: 'E' }], + expected: ['A', 'B', 'C', 'D', 'E', 'A', 'B', 'C', 'D', 'E'], + expectedConnectionRefusedErrors: 0, + expectedSocketErrors: 0, + expectedRatios: [0.2, 0.2, 0.2, 0.2, 0.2] + }, + + // 10 + { + iterations: 100, + maxWeightPerServer: 100, + errorPenalty: 15, + config: [{ server: 'A', downOnRequests: [0, 1, 2, 3] }, { server: 'B' }, { server: 'C' }], + expected: ['A/connectionRefused', 'B', 'C', 'B', 'C', 'B', 'C', 'A/connectionRefused', 'B', 'C', 'B', 'C', 'A/connectionRefused', 'B', 'C', 'B', 'C', 'A/connectionRefused', 'B', 'C', 'A', 'B', 'C', 'A', 'B', 'C'], + expectedConnectionRefusedErrors: 4, + expectedSocketErrors: 0, + expectedRatios: [0.18, 0.41, 0.41] + } + +] + +for (const [index, { config, expected, expectedRatios, iterations = 9, expectedConnectionRefusedErrors = 0, expectedSocketErrors = 0, maxWeightPerServer, errorPenalty = 10, only = false, skip = false }] of cases.entries()) { + test(`weighted round robin - case ${index}`, { only, skip }, async (t) => { + // cerate an array to store succesfull reqeusts + const requestLog = [] + + // create instances of the test servers according to the config + const servers = config.map((serverConfig) => new TestServer({ + config: serverConfig, + onRequest: (server) => { + requestLog.push(server.name) + } + })) + t.teardown(() => servers.map(server => server.stop())) + + // start all servers to get a port so that we can build the upstream urls to supply them to undici + await Promise.all(servers.map(server => server.start())) + + // build upstream urls + const urls = servers.map(server => `http://localhost:${server.port}`) + + // add upstreams + const client = new BalancedPool(urls[0], { maxWeightPerServer, errorPenalty }) + urls.slice(1).map(url => client.addUpstream(url)) + + let connectionRefusedErrors = 0 + let socketErrors = 0 + for (let i = 0; i < iterations; i++) { + // setup test servers for the next iteration + + await Promise.all(servers.map(server => server.prepareForIteration(i))) + + // send a request using undinci + try { + await client.request({ path: '/', method: 'GET' }) + } catch (e) { + const serverWithError = + servers.find(server => server.port === e.port) || + servers.find(server => { + if (typeof AggregateError === 'function' && e instanceof AggregateError) { + return e.errors.some(e => server.port === (e.socket?.remotePort ?? e.port)) + } + + return server.port === e.socket.remotePort + }) + + serverWithError.requestsCount++ + + if (e.code === 'ECONNREFUSED') { + requestLog.push(`${serverWithError.name}/connectionRefused`) + connectionRefusedErrors++ + } + if (e.code === 'UND_ERR_SOCKET') { + requestLog.push(`${serverWithError.name}/socketError`) + + socketErrors++ + } + } + } + const totalRequests = servers.reduce((acc, server) => { + return acc + server.requestsCount + }, 0) + + t.equal(totalRequests, iterations) + + t.equal(connectionRefusedErrors, expectedConnectionRefusedErrors) + t.equal(socketErrors, expectedSocketErrors) + + if (expectedRatios) { + const ratios = servers.reduce((acc, el) => { + acc[el.name] = 0 + return acc + }, {}) + requestLog.map(el => ratios[el[0]]++) + + t.match(Object.keys(ratios).map(k => ratios[k] / iterations), expectedRatios) + } + + if (expected) { + t.match(requestLog.slice(0, expected.length), expected) + } + + await client.close() + }) +} diff --git a/test/ca-fingerprint.js b/test/ca-fingerprint.js new file mode 100644 index 0000000..f71063f --- /dev/null +++ b/test/ca-fingerprint.js @@ -0,0 +1,126 @@ +'use strict' + +const crypto = require('crypto') +const https = require('https') +const { test } = require('tap') +const { Client, buildConnector } = require('..') +const pem = require('https-pem') + +const caFingerprint = getFingerprint(pem.cert.toString() + .split('\n') + .slice(1, -1) + .map(line => line.trim()) + .join('') +) + +test('Validate CA fingerprint with a custom connector', t => { + t.plan(2) + + const server = https.createServer(pem, (req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end('hello') + }) + + server.listen(0, function () { + const connector = buildConnector({ rejectUnauthorized: false }) + const client = new Client(`https://localhost:${server.address().port}`, { + connect (opts, cb) { + connector(opts, (err, socket) => { + if (err) { + cb(err) + } else if (getIssuerCertificate(socket).fingerprint256 !== caFingerprint) { + socket.destroy() + cb(new Error('Fingerprint does not match')) + } else { + cb(null, socket) + } + }) + } + }) + + t.teardown(() => { + client.close() + server.close() + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + }) +}) + +test('Bad CA fingerprint with a custom connector', t => { + t.plan(2) + + const server = https.createServer(pem, (req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end('hello') + }) + + server.listen(0, function () { + const connector = buildConnector({ rejectUnauthorized: false }) + const client = new Client(`https://localhost:${server.address().port}`, { + connect (opts, cb) { + connector(opts, (err, socket) => { + if (err) { + cb(err) + } else if (getIssuerCertificate(socket).fingerprint256 !== 'FO:OB:AR') { + socket.destroy() + cb(new Error('Fingerprint does not match')) + } else { + cb(null, socket) + } + }) + } + }) + + t.teardown(() => { + client.close() + server.close() + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.equal(err.message, 'Fingerprint does not match') + t.equal(data.body, undefined) + }) + }) +}) + +function getIssuerCertificate (socket) { + let certificate = socket.getPeerCertificate(true) + while (certificate && Object.keys(certificate).length > 0) { + // invalid certificate + if (certificate.issuerCertificate == null) { + return null + } + + // We have reached the root certificate. + // In case of self-signed certificates, `issuerCertificate` may be a circular reference. + if (certificate.fingerprint256 === certificate.issuerCertificate.fingerprint256) { + break + } + + // continue the loop + certificate = certificate.issuerCertificate + } + return certificate +} + +function getFingerprint (content, inputEncoding = 'base64', outputEncoding = 'hex') { + const shasum = crypto.createHash('sha256') + shasum.update(content, inputEncoding) + const res = shasum.digest(outputEncoding) + return res.toUpperCase().match(/.{1,2}/g).join(':') +} diff --git a/test/client-abort.js b/test/client-abort.js new file mode 100644 index 0000000..5854bc2 --- /dev/null +++ b/test/client-abort.js @@ -0,0 +1,213 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const { createServer } = require('http') +const { Readable } = require('stream') + +class OnAbortError extends Error {} + +test('aborted response errors', (t) => { + t.plan(3) + + const server = createServer() + server.once('request', (req, res) => { + // TODO: res.write will cause body to emit 'error' twice + // due to bug in readable-stream. + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + body.destroy() + body + .on('error', err => { + t.type(err, errors.RequestAbortedError) + }) + .on('close', () => { + t.pass() + }) + }) + }) +}) + +test('aborted req', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end(Buffer.alloc(4 + 1, 'a')) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + method: 'POST', + path: '/', + body: new Readable({ + read () { + setImmediate(() => { + this.destroy() + }) + } + }) + }, (err) => { + t.type(err, errors.RequestAbortedError) + }) + }) +}) + +test('abort', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.dispatch({ + method: 'GET', + path: '/' + }, { + onConnect (abort) { + setImmediate(abort) + }, + onHeaders () { + t.fail() + }, + onData () { + t.fail() + }, + onComplete () { + t.fail() + }, + onError (err) { + t.type(err, errors.RequestAbortedError) + } + }) + + client.on('disconnect', () => { + t.pass() + }) + }) +}) + +test('abort pipelined', (t) => { + t.plan(6) + + const server = createServer((req, res) => { + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.destroy.bind(client)) + + let counter = 0 + client.dispatch({ + method: 'GET', + path: '/' + }, { + onConnect (abort) { + // This request will be retried + if (counter++ === 1) { + abort() + } + t.pass() + }, + onHeaders () { + t.fail() + }, + onData () { + t.fail() + }, + onComplete () { + t.fail() + }, + onError (err) { + t.type(err, errors.RequestAbortedError) + } + }) + + client.dispatch({ + method: 'GET', + path: '/' + }, { + onConnect (abort) { + abort() + }, + onHeaders () { + t.fail() + }, + onData () { + t.fail() + }, + onComplete () { + t.fail() + }, + onError (err) { + t.type(err, errors.RequestAbortedError) + } + }) + + client.on('disconnect', () => { + t.pass() + }) + }) +}) + +test('propagate unallowed throws in request.onError', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.dispatch({ + method: 'GET', + path: '/' + }, { + onConnect (abort) { + setImmediate(abort) + }, + onHeaders () { + t.pass() + }, + onData () { + t.pass() + }, + onComplete () { + t.pass() + }, + onError () { + throw new OnAbortError('error') + } + }) + + client.on('error', (err) => { + t.type(err, OnAbortError) + }) + + client.on('disconnect', () => { + t.pass() + }) + }) +}) diff --git a/test/client-connect.js b/test/client-connect.js new file mode 100644 index 0000000..7c8ca5e --- /dev/null +++ b/test/client-connect.js @@ -0,0 +1,308 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const http = require('http') +const EE = require('events') +const { kBusy } = require('../lib/core/symbols') + +test('basic connect', (t) => { + t.plan(3) + + const server = http.createServer((c) => { + t.fail() + }) + server.on('connect', (req, socket, firstBodyChunk) => { + socket.write('HTTP/1.1 200 Connection established\r\n\r\n') + + let data = firstBodyChunk.toString() + socket.on('data', (buf) => { + data += buf.toString() + }) + + socket.on('end', () => { + socket.end(data) + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const signal = new EE() + const promise = client.connect({ + signal, + path: '/' + }) + t.equal(signal.listenerCount('abort'), 1) + const { socket } = await promise + t.equal(signal.listenerCount('abort'), 0) + + let recvData = '' + socket.on('data', (d) => { + recvData += d + }) + + socket.on('end', () => { + t.equal(recvData.toString(), 'Body') + }) + + socket.write('Body') + socket.end() + }) +}) + +test('connect error', (t) => { + t.plan(1) + + const server = http.createServer((c) => { + t.fail() + }) + server.on('connect', (req, socket, firstBodyChunk) => { + socket.destroy() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + await client.connect({ + path: '/' + }) + } catch (err) { + t.ok(err) + } + }) +}) + +test('connect invalid opts', (t) => { + t.plan(6) + + const client = new Client('http://localhost:5432') + + client.connect(null, err => { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid opts') + }) + + try { + client.connect(null, null) + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid opts') + } + + try { + client.connect({ path: '/' }, null) + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid callback') + } +}) + +test('connect wait for empty pipeline', (t) => { + t.plan(7) + + let canConnect = false + const server = http.createServer((req, res) => { + res.end() + canConnect = true + }) + server.on('connect', (req, socket, firstBodyChunk) => { + t.equal(canConnect, true) + socket.write('HTTP/1.1 200 Connection established\r\n\r\n') + + let data = firstBodyChunk.toString() + socket.on('data', (buf) => { + data += buf.toString() + }) + + socket.on('end', () => { + socket.end(data) + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.error(err) + }) + client.once('connect', () => { + process.nextTick(() => { + t.equal(client[kBusy], false) + + client.connect({ + path: '/' + }, (err, { socket }) => { + t.error(err) + let recvData = '' + socket.on('data', (d) => { + recvData += d + }) + + socket.on('end', () => { + t.equal(recvData.toString(), 'Body') + }) + + socket.write('Body') + socket.end() + }) + t.equal(client[kBusy], true) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.error(err) + }) + }) + }) + }) +}) + +test('connect aborted', (t) => { + t.plan(6) + + const server = http.createServer((req, res) => { + t.fail() + }) + server.on('connect', (req, c, firstBodyChunk) => { + t.fail() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 3 + }) + t.teardown(client.destroy.bind(client)) + + const signal = new EE() + client.connect({ + path: '/', + signal, + opaque: 'asd' + }, (err, { opaque }) => { + t.equal(opaque, 'asd') + t.equal(signal.listenerCount('abort'), 0) + t.type(err, errors.RequestAbortedError) + }) + t.equal(client[kBusy], true) + t.equal(signal.listenerCount('abort'), 1) + signal.emit('abort') + + client.close(() => { + t.pass() + }) + }) +}) + +test('basic connect error', (t) => { + t.plan(2) + + const server = http.createServer((c) => { + t.fail() + }) + server.on('connect', (req, socket, firstBodyChunk) => { + socket.write('HTTP/1.1 200 Connection established\r\n\r\n') + + let data = firstBodyChunk.toString() + socket.on('data', (buf) => { + data += buf.toString() + }) + + socket.on('end', () => { + socket.end(data) + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const _err = new Error() + client.connect({ + path: '/' + }, (err, { socket }) => { + t.error(err) + socket.on('error', (err) => { + t.equal(err, _err) + }) + throw _err + }) + }) +}) + +test('connect invalid signal', (t) => { + t.plan(2) + + const server = http.createServer((req, res) => { + t.fail() + }) + server.on('connect', (req, c, firstBodyChunk) => { + t.fail() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.on('disconnect', () => { + t.fail() + }) + + client.connect({ + path: '/', + signal: 'error', + opaque: 'asd' + }, (err, { opaque }) => { + t.equal(opaque, 'asd') + t.type(err, errors.InvalidArgumentError) + }) + }) +}) + +test('connect aborted after connect', (t) => { + t.plan(3) + + const signal = new EE() + const server = http.createServer((req, res) => { + t.fail() + }) + server.on('connect', (req, c, firstBodyChunk) => { + signal.emit('abort') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 3 + }) + t.teardown(client.destroy.bind(client)) + + client.connect({ + path: '/', + signal, + opaque: 'asd' + }, (err, { opaque }) => { + t.equal(opaque, 'asd') + t.type(err, errors.RequestAbortedError) + }) + t.equal(client[kBusy], true) + }) +}) diff --git a/test/client-dispatch.js b/test/client-dispatch.js new file mode 100644 index 0000000..c3de37a --- /dev/null +++ b/test/client-dispatch.js @@ -0,0 +1,815 @@ +'use strict' + +const { test } = require('tap') +const http = require('http') +const { Client, Pool, errors } = require('..') +const stream = require('stream') + +test('dispatch invalid opts', (t) => { + t.plan(14) + + const client = new Client('http://localhost:5000') + + try { + client.dispatch({ + path: '/', + method: 'GET', + upgrade: 1 + }, null) + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'handler must be an object') + } + + try { + client.dispatch({ + path: '/', + method: 'GET', + upgrade: 1 + }, 'asd') + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'handler must be an object') + } + + client.dispatch({ + path: '/', + method: 'GET', + upgrade: 1 + }, { + onError (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'upgrade must be a string') + } + }) + + client.dispatch({ + path: '/', + method: 'GET', + headersTimeout: 'asd' + }, { + onError (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid headersTimeout') + } + }) + + client.dispatch({ + path: '/', + method: 'GET', + bodyTimeout: 'asd' + }, { + onError (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid bodyTimeout') + } + }) + + client.dispatch({ + origin: 'another', + path: '/', + method: 'GET', + bodyTimeout: 'asd' + }, { + onError (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid bodyTimeout') + } + }) + + client.dispatch(null, { + onError (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'opts must be an object.') + } + }) +}) + +test('basic dispatch get', (t) => { + t.plan(11) + + const server = http.createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + t.equal(undefined, req.headers.foo) + t.equal('bar', req.headers.bar) + t.equal('', req.headers.baz) + t.equal(undefined, req.headers['content-length']) + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + const reqHeaders = { + foo: undefined, + bar: 'bar', + baz: null + } + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const bufs = [] + client.dispatch({ + path: '/', + method: 'GET', + headers: reqHeaders + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.equal(statusCode, 200) + t.equal(Array.isArray(headers), true) + }, + onData (buf) { + bufs.push(buf) + }, + onComplete (trailers) { + t.same(trailers, []) + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }, + onError () { + t.fail() + } + }) + }) +}) + +test('trailers dispatch get', (t) => { + t.plan(12) + + const server = http.createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + t.equal(undefined, req.headers.foo) + t.equal('bar', req.headers.bar) + t.equal(undefined, req.headers['content-length']) + res.addTrailers({ 'Content-MD5': 'test' }) + res.setHeader('Content-Type', 'text/plain') + res.setHeader('Trailer', 'Content-MD5') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + const reqHeaders = { + foo: undefined, + bar: 'bar' + } + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const bufs = [] + client.dispatch({ + path: '/', + method: 'GET', + headers: reqHeaders + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.equal(statusCode, 200) + t.equal(Array.isArray(headers), true) + { + const contentTypeIdx = headers.findIndex(x => x.toString() === 'Content-Type') + t.equal(headers[contentTypeIdx + 1].toString(), 'text/plain') + } + }, + onData (buf) { + bufs.push(buf) + }, + onComplete (trailers) { + t.equal(Array.isArray(trailers), true) + { + const contentMD5Idx = trailers.findIndex(x => x.toString() === 'Content-MD5') + t.equal(trailers[contentMD5Idx + 1].toString(), 'test') + } + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }, + onError () { + t.fail() + } + }) + }) +}) + +test('dispatch onHeaders error', (t) => { + t.plan(1) + + const server = http.createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const _err = new Error() + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + throw _err + }, + onData (buf) { + t.fail() + }, + onComplete (trailers) { + t.fail() + }, + onError (err) { + t.equal(err, _err) + } + }) + }) +}) + +test('dispatch onComplete error', (t) => { + t.plan(2) + + const server = http.createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const _err = new Error() + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.pass() + }, + onData (buf) { + t.fail() + }, + onComplete (trailers) { + throw _err + }, + onError (err) { + t.equal(err, _err) + } + }) + }) +}) + +test('dispatch onData error', (t) => { + t.plan(2) + + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const _err = new Error() + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.pass() + }, + onData (buf) { + throw _err + }, + onComplete (trailers) { + t.fail() + }, + onError (err) { + t.equal(err, _err) + } + }) + }) +}) + +test('dispatch onConnect error', (t) => { + t.plan(1) + + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const _err = new Error() + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + throw _err + }, + onHeaders (statusCode, headers) { + t.fail() + }, + onData (buf) { + t.fail() + }, + onComplete (trailers) { + t.fail() + }, + onError (err) { + t.equal(err, _err) + } + }) + }) +}) + +test('connect call onUpgrade once', (t) => { + t.plan(2) + + const server = http.createServer((c) => { + t.fail() + }) + server.on('connect', (req, socket, firstBodyChunk) => { + socket.write('HTTP/1.1 200 Connection established\r\n\r\n') + + let data = firstBodyChunk.toString() + socket.on('data', (buf) => { + data += buf.toString() + }) + + socket.on('end', () => { + socket.end(data) + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + let recvData = '' + let count = 0 + client.dispatch({ + method: 'CONNECT', + path: '/' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.pass('should not throw') + }, + onUpgrade (statusCode, headers, socket) { + t.equal(count++, 0) + + socket.on('data', (d) => { + recvData += d + }) + + socket.on('end', () => { + t.equal(recvData.toString(), 'Body') + }) + + socket.write('Body') + socket.end() + }, + onData (buf) { + t.fail() + }, + onComplete (trailers) { + t.fail() + }, + onError () { + t.fail() + } + }) + }) +}) + +test('dispatch onConnect missing', (t) => { + t.plan(1) + + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET' + }, { + onHeaders (statusCode, headers) { + t.pass('should not throw') + }, + onData (buf) { + t.pass('should not throw') + }, + onComplete (trailers) { + t.pass('should not throw') + }, + onError (err) { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + } + }) + }) +}) + +test('dispatch onHeaders missing', (t) => { + t.plan(1) + + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onData (buf) { + t.fail('should not throw') + }, + onComplete (trailers) { + t.fail('should not throw') + }, + onError (err) { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + } + }) + }) +}) + +test('dispatch onData missing', (t) => { + t.plan(1) + + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.fail('should not throw') + }, + onComplete (trailers) { + t.fail('should not throw') + }, + onError (err) { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + } + }) + }) +}) + +test('dispatch onComplete missing', (t) => { + t.plan(1) + + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.fail() + }, + onData (buf) { + t.fail() + }, + onError (err) { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + } + }) + }) +}) + +test('dispatch onError missing', (t) => { + t.plan(1) + + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.fail() + }, + onData (buf) { + t.fail() + }, + onComplete (trailers) { + t.fail() + } + }) + } catch (err) { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + } + }) +}) + +test('dispatch CONNECT onUpgrade missing', (t) => { + t.plan(2) + + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET', + upgrade: 'Websocket' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + }, + onError (err) { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + t.equal(err.message, 'invalid onUpgrade method') + } + }) + }) +}) + +test('dispatch upgrade onUpgrade missing', (t) => { + t.plan(2) + + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET', + upgrade: 'Websocket' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + }, + onError (err) { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + t.equal(err.message, 'invalid onUpgrade method') + } + }) + }) +}) + +test('dispatch pool onError missing', (t) => { + t.plan(2) + + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + client.dispatch({ + path: '/', + method: 'GET', + upgrade: 'Websocket' + }, { + }) + } catch (err) { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + t.equal(err.message, 'invalid onError method') + } + }) +}) + +test('dispatch onBodySent not a function', (t) => { + t.plan(2) + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET' + }, { + onBodySent: '42', + onConnect () {}, + onHeaders () {}, + onData () {}, + onError (err) { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + t.equal(err.message, 'invalid onBodySent method') + } + }) + }) +}) + +test('dispatch onBodySent buffer', (t) => { + t.plan(3) + + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + const body = 'hello 🚀' + client.dispatch({ + path: '/', + method: 'POST', + body + }, { + onBodySent (chunk) { + t.equal(chunk.toString(), body) + }, + onRequestSent () { + t.pass() + }, + onError (err) { + throw err + }, + onConnect () {}, + onHeaders () {}, + onData () {}, + onComplete () { + t.pass() + } + }) + }) +}) + +test('dispatch onBodySent stream', (t) => { + t.plan(8) + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + const chunks = ['he', 'llo', 'world', '🚀'] + const toSendBytes = chunks.reduce((a, b) => a + Buffer.byteLength(b), 0) + const body = stream.Readable.from(chunks) + server.listen(0, () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + let sentBytes = 0 + let currentChunk = 0 + client.dispatch({ + path: '/', + method: 'POST', + body + }, { + onBodySent (chunk) { + t.equal(chunks[currentChunk++], chunk) + sentBytes += Buffer.byteLength(chunk) + }, + onRequestSent () { + t.pass() + }, + onError (err) { + throw err + }, + onConnect () {}, + onHeaders () {}, + onData () {}, + onComplete () { + t.equal(currentChunk, chunks.length) + t.equal(sentBytes, toSendBytes) + t.pass() + } + }) + }) +}) + +test('dispatch onBodySent async-iterable', (t) => { + const server = http.createServer((req, res) => { + res.end('ad') + }) + t.teardown(server.close.bind(server)) + const chunks = ['he', 'llo', 'world', '🚀'] + const toSendBytes = chunks.reduce((a, b) => a + Buffer.byteLength(b), 0) + server.listen(0, () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + let sentBytes = 0 + let currentChunk = 0 + client.dispatch({ + path: '/', + method: 'POST', + body: chunks + }, { + onBodySent (chunk) { + t.equal(chunks[currentChunk++], chunk) + sentBytes += Buffer.byteLength(chunk) + }, + onError (err) { + throw err + }, + onConnect () {}, + onHeaders () {}, + onData () {}, + onComplete () { + t.equal(currentChunk, chunks.length) + t.equal(sentBytes, toSendBytes) + t.end() + } + }) + }) +}) + +test('dispatch onBodySent throws error', (t) => { + const server = http.createServer((req, res) => { + res.end('ended') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + const body = 'hello' + client.dispatch({ + path: '/', + method: 'POST', + body + }, { + onBodySent (chunk) { + throw new Error('fail') + }, + onError (err) { + t.type(err, Error) + t.equal(err.message, 'fail') + t.end() + }, + onConnect () {}, + onHeaders () {}, + onData () {}, + onComplete () {} + }) + }) +}) diff --git a/test/client-errors.js b/test/client-errors.js new file mode 100644 index 0000000..cec7f37 --- /dev/null +++ b/test/client-errors.js @@ -0,0 +1,1285 @@ +'use strict' + +const { test } = require('tap') +const { Client, Pool, errors } = require('..') +const { createServer } = require('http') +const https = require('https') +const pem = require('https-pem') +const net = require('net') +const { Readable } = require('stream') + +const { kSocket } = require('../lib/core/symbols') +const { wrapWithAsyncIterable, maybeWrapStream, consts } = require('./utils/async-iterators') + +class IteratorError extends Error {} + +test('GET errors and reconnect with pipelining 1', (t) => { + t.plan(9) + + const server = createServer() + + server.once('request', (req, res) => { + t.pass('first request received, destroying') + res.socket.destroy() + + server.once('request', (req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 1 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', idempotent: false, opaque: 'asd' }, (err, data) => { + t.type(err, Error) // we are expecting an error + t.equal(data.opaque, 'asd') + }) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('GET errors and reconnect with pipelining 3', (t) => { + const server = createServer() + const requestsThatWillError = 3 + let requests = 0 + + t.plan(6 + requestsThatWillError * 3) + + server.on('request', (req, res) => { + if (requests++ < requestsThatWillError) { + t.pass('request received, destroying') + + // socket might not be there if it was destroyed by another + // pipelined request + if (res.socket) { + res.socket.destroy() + } + } else { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + } + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 3 + }) + t.teardown(client.destroy.bind(client)) + + // all of these will error + for (let i = 0; i < 3; i++) { + client.request({ path: '/', method: 'GET', idempotent: false, opaque: 'asd' }, (err, data) => { + t.type(err, Error) // we are expecting an error + t.equal(data.opaque, 'asd') + }) + } + + // this will be queued up + client.request({ path: '/', method: 'GET', idempotent: false }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +function errorAndPipelining (type) { + test(`POST with a ${type} that errors and pipelining 1 should reconnect`, (t) => { + t.plan(12) + + const server = createServer() + server.once('request', (req, res) => { + t.equal('/', req.url) + t.equal('POST', req.method) + t.equal('42', req.headers['content-length']) + + const bufs = [] + req.on('data', (buf) => { + bufs.push(buf) + }) + + req.on('aborted', () => { + // we will abruptly close the connection here + // but this will still end + t.equal('a string', Buffer.concat(bufs).toString('utf8')) + }) + + server.once('request', (req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'POST', + headers: { + // higher than the length of the string + 'content-length': 42 + }, + opaque: 'asd', + body: maybeWrapStream(new Readable({ + read () { + this.push('a string') + this.destroy(new Error('kaboom')) + } + }), type) + }, (err, data) => { + t.equal(err.message, 'kaboom') + t.equal(data.opaque, 'asd') + }) + + // this will be queued up + client.request({ path: '/', method: 'GET', idempotent: false }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) + }) +} + +errorAndPipelining(consts.STREAM) +errorAndPipelining(consts.ASYNC_ITERATOR) + +function errorAndChunkedEncodingPipelining (type) { + test(`POST with chunked encoding, ${type} body that errors and pipelining 1 should reconnect`, (t) => { + t.plan(12) + + const server = createServer() + server.once('request', (req, res) => { + t.equal('/', req.url) + t.equal('POST', req.method) + t.equal(req.headers['content-length'], undefined) + + const bufs = [] + req.on('data', (buf) => { + bufs.push(buf) + }) + + req.on('aborted', () => { + // we will abruptly close the connection here + // but this will still end + t.equal('a string', Buffer.concat(bufs).toString('utf8')) + }) + + server.once('request', (req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'POST', + opaque: 'asd', + body: maybeWrapStream(new Readable({ + read () { + this.push('a string') + this.destroy(new Error('kaboom')) + } + }), type) + }, (err, data) => { + t.equal(err.message, 'kaboom') + t.equal(data.opaque, 'asd') + }) + + // this will be queued up + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) + }) +} + +errorAndChunkedEncodingPipelining(consts.STREAM) +errorAndChunkedEncodingPipelining(consts.ASYNC_ITERATOR) + +test('invalid options throws', (t) => { + try { + new Client({ port: 'foobar', protocol: 'https:' }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'Invalid URL: port must be a valid integer or a string representation of an integer.') + } + + try { + new Client(new URL('http://asd:200/somepath')) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid url') + } + + try { + new Client(new URL('http://asd:200?q=asd')) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid url') + } + + try { + new Client(new URL('http://asd:200#asd')) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid url') + } + + try { + new Client(new URL('http://localhost:200'), { // eslint-disable-line + socketPath: 1 + }) + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid socketPath') + } + + try { + new Client(new URL('http://localhost:200'), { // eslint-disable-line + keepAliveTimeout: 'asd' + }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid keepAliveTimeout') + } + + try { + new Client(new URL('http://localhost:200'), { // eslint-disable-line + localAddress: 123 + }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'localAddress must be valid string IP address') + } + + try { + new Client(new URL('http://localhost:200'), { // eslint-disable-line + localAddress: 'abcd123' + }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'localAddress must be valid string IP address') + } + + try { + new Client(new URL('http://localhost:200'), { // eslint-disable-line + keepAliveMaxTimeout: 'asd' + }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid keepAliveMaxTimeout') + } + + try { + new Client(new URL('http://localhost:200'), { // eslint-disable-line + keepAliveMaxTimeout: 0 + }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid keepAliveMaxTimeout') + } + + try { + new Client(new URL('http://localhost:200'), { // eslint-disable-line + keepAliveTimeoutThreshold: 'asd' + }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid keepAliveTimeoutThreshold') + } + + try { + new Client({ // eslint-disable-line + protocol: 'asd' + }) + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'Invalid URL protocol: the URL must start with `http:` or `https:`.') + } + + try { + new Client({ // eslint-disable-line + hostname: 1 + }) + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'Invalid URL protocol: the URL must start with `http:` or `https:`.') + } + + try { + new Client(new URL('http://localhost:200'), { // eslint-disable-line + maxHeaderSize: 'asd' + }) + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid maxHeaderSize') + } + + try { + new Client(1) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'Invalid URL: The URL argument must be a non-null object.') + } + + try { + const client = new Client(new URL('http://localhost:200')) // eslint-disable-line + client.destroy(null, null) + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid callback') + } + + try { + const client = new Client(new URL('http://localhost:200')) // eslint-disable-line + client.close(null, null) + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid callback') + } + + try { + new Client(new URL('http://localhost:200'), { maxKeepAliveTimeout: 1e3 }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead') + } + + try { + new Client(new URL('http://localhost:200'), { keepAlive: false }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'unsupported keepAlive, use pipelining=0 instead') + } + + try { + new Client(new URL('http://localhost:200'), { idleTimeout: 30e3 }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'unsupported idleTimeout, use keepAliveTimeout instead') + } + + try { + new Client(new URL('http://localhost:200'), { socketTimeout: 30e3 }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'unsupported socketTimeout, use headersTimeout & bodyTimeout instead') + } + + try { + new Client(new URL('http://localhost:200'), { requestTimeout: 30e3 }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'unsupported requestTimeout, use headersTimeout & bodyTimeout instead') + } + + try { + new Client(new URL('http://localhost:200'), { connectTimeout: -1 }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid connectTimeout') + } + + try { + new Client(new URL('http://localhost:200'), { connectTimeout: Infinity }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid connectTimeout') + } + + try { + new Client(new URL('http://localhost:200'), { connectTimeout: 'asd' }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid connectTimeout') + } + + try { + new Client(new URL('http://localhost:200'), { connect: 'asd' }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'connect must be a function or an object') + } + + try { + new Client(new URL('http://localhost:200'), { connect: -1 }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'connect must be a function or an object') + } + + try { + new Pool(new URL('http://localhost:200'), { connect: 'asd' }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'connect must be a function or an object') + } + + try { + new Pool(new URL('http://localhost:200'), { connect: -1 }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'connect must be a function or an object') + } + + try { + new Client(new URL('http://localhost:200'), { maxCachedSessions: -10 }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'maxCachedSessions must be a positive integer or zero') + } + + try { + new Client(new URL('http://localhost:200'), { maxCachedSessions: 'foo' }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'maxCachedSessions must be a positive integer or zero') + } + + try { + new Client(new URL('http://localhost:200'), { maxRequestsPerClient: 'foo' }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'maxRequestsPerClient must be a positive number') + } + + try { + new Client(new URL('http://localhost:200'), { autoSelectFamilyAttemptTimeout: 'foo' }) // eslint-disable-line + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'autoSelectFamilyAttemptTimeout must be a positive number') + } + + t.end() +}) + +test('POST which fails should error response', (t) => { + t.plan(6) + + const server = createServer() + server.on('request', (req, res) => { + req.once('data', () => { + res.destroy() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + function checkError (err) { + // Different platforms error with different codes... + t.ok( + err.code === 'EPIPE' || + err.code === 'ECONNRESET' || + err.code === 'UND_ERR_SOCKET' || + err.message === 'other side closed' + ) + } + + { + const body = new Readable({ read () {} }) + body.push('asd') + body.on('error', (err) => { + checkError(err) + }) + + client.request({ + path: '/', + method: 'POST', + body + }, (err) => { + checkError(err) + }) + } + + { + const body = new Readable({ read () {} }) + body.push('asd') + body.on('error', (err) => { + checkError(err) + }) + + client.request({ + path: '/', + method: 'POST', + headers: { + 'content-length': 100 + }, + body + }, (err) => { + checkError(err) + }) + } + + { + const body = wrapWithAsyncIterable(['asd'], true) + + client.request({ + path: '/', + method: 'POST', + body + }, (err) => { + checkError(err) + }) + } + + { + const body = wrapWithAsyncIterable(['asd'], true) + + client.request({ + path: '/', + method: 'POST', + headers: { + 'content-length': 100 + }, + body + }, (err) => { + checkError(err) + }) + } + }) +}) + +test('client destroy cleanup', (t) => { + t.plan(3) + + const _err = new Error('kaboom') + let client + const server = createServer() + server.once('request', (req, res) => { + req.once('data', () => { + client.destroy(_err, (err) => { + t.error(err) + }) + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const body = new Readable({ read () {} }) + body.push('asd') + body.on('error', (err) => { + t.equal(err, _err) + }) + + client.request({ + path: '/', + method: 'POST', + body + }, (err, data) => { + t.equal(err, _err) + }) + }) +}) + +test('throwing async-iterator causes error', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end(Buffer.alloc(4 + 1, 'a')) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + method: 'POST', + path: '/', + body: (async function * () { + yield 'hello' + throw new IteratorError('bad iterator') + })() + }, (err) => { + t.type(err, IteratorError) + }) + }) +}) + +test('client async-iterator destroy cleanup', (t) => { + t.plan(2) + + const _err = new Error('kaboom') + let client + const server = createServer() + server.once('request', (req, res) => { + req.once('data', () => { + client.destroy(_err, (err) => { + t.error(err) + }) + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const body = wrapWithAsyncIterable(['asd'], true) + + client.request({ + path: '/', + method: 'POST', + body + }, (err, data) => { + t.equal(err, _err) + }) + }) +}) + +test('GET errors body', (t) => { + t.plan(2) + + const server = createServer() + server.once('request', (req, res) => { + res.write('asd') + setTimeout(() => { + res.destroy() + }, 19) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + body.resume() + body.on('error', err => ( + t.ok(err) + )) + }) + }) +}) + +test('validate request body', (t) => { + t.plan(6) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'POST', + body: /asdasd/ + }, (err, data) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.request({ + path: '/', + method: 'POST', + body: 0 + }, (err, data) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.request({ + path: '/', + method: 'POST', + body: false + }, (err, data) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.request({ + path: '/', + method: 'POST', + body: '' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + + client.request({ + path: '/', + method: 'POST', + body: new Uint8Array() + }, (err, data) => { + t.error(err) + data.body.resume() + }) + + client.request({ + path: '/', + method: 'POST', + body: Buffer.alloc(10) + }, (err, data) => { + t.error(err) + data.body.resume() + }) + }) +}) + +test('parser error', (t) => { + t.plan(2) + + const server = net.createServer() + server.once('connection', (socket) => { + socket.write('asd\n\r213123') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err) => { + t.ok(err) + client.close((err) => { + t.error(err) + }) + }) + }) +}) + +function socketFailWrite (type) { + test(`socket fail while writing ${type} request body`, (t) => { + t.plan(2) + + const server = createServer() + server.once('request', (req, res) => { + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const preBody = new Readable({ read () {} }) + preBody.push('asd') + const body = maybeWrapStream(preBody, type) + client.on('connect', () => { + process.nextTick(() => { + client[kSocket].destroy('kaboom') + }) + }) + + client.request({ + path: '/', + method: 'POST', + body + }, (err) => { + t.ok(err) + }) + client.close((err) => { + t.error(err) + }) + }) + }) +} +socketFailWrite(consts.STREAM) +socketFailWrite(consts.ASYNC_ITERATOR) + +function socketFailEndWrite (type) { + test(`socket fail while ending ${type} request body`, (t) => { + t.plan(3) + + const server = createServer() + server.once('request', (req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.destroy.bind(client)) + + const _err = new Error('kaboom') + client.on('connect', () => { + process.nextTick(() => { + client[kSocket].destroy(_err) + }) + }) + const preBody = new Readable({ read () {} }) + preBody.push(null) + const body = maybeWrapStream(preBody, type) + + client.request({ + path: '/', + method: 'POST', + body + }, (err) => { + t.equal(err, _err) + }) + client.close((err) => { + t.error(err) + client.close((err) => { + t.type(err, errors.ClientDestroyedError) + }) + }) + }) + }) +} + +socketFailEndWrite(consts.STREAM) +socketFailEndWrite(consts.ASYNC_ITERATOR) + +test('queued request should not fail on socket destroy', (t) => { + t.plan(4) + + const server = createServer() + server.on('request', (req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 1 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume().on('error', () => { + t.pass() + }) + client[kSocket].destroy() + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume().on('end', () => { + t.pass() + }) + }) + }) + }) +}) + +test('queued request should fail on client destroy', (t) => { + t.plan(6) + + const server = createServer() + server.on('request', (req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 1 + }) + t.teardown(client.destroy.bind(client)) + + let requestErrored = false + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + .on('error', () => { + t.pass() + }) + client.destroy((err) => { + t.error(err) + t.equal(requestErrored, true) + }) + }) + client.request({ + path: '/', + method: 'GET', + opaque: 'asd' + }, (err, data) => { + requestErrored = true + t.ok(err) + t.equal(data.opaque, 'asd') + }) + }) +}) + +test('retry idempotent inflight', (t) => { + t.plan(3) + + const server = createServer() + server.on('request', (req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'POST', + body: new Readable({ + read () { + this.destroy(new Error('kaboom')) + } + }) + }, (err) => { + t.ok(err) + }) + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + }) +}) + +test('invalid opts', (t) => { + t.plan(2) + + const client = new Client('http://localhost:5000') + client.request(null, (err) => { + t.type(err, errors.InvalidArgumentError) + }) + client.pipeline(null).on('error', (err) => { + t.type(err, errors.InvalidArgumentError) + }) +}) + +test('default port for http and https', (t) => { + t.plan(4) + + try { + new Client(new URL('http://localhost:80')) // eslint-disable-line + t.pass('Should not throw') + } catch (err) { + t.fail(err) + } + + try { + new Client(new URL('http://localhost')) // eslint-disable-line + t.pass('Should not throw') + } catch (err) { + t.fail(err) + } + + try { + new Client(new URL('https://localhost:443')) // eslint-disable-line + t.pass('Should not throw') + } catch (err) { + t.fail(err) + } + + try { + new Client(new URL('https://localhost')) // eslint-disable-line + t.pass('Should not throw') + } catch (err) { + t.fail(err) + } +}) + +test('CONNECT throws in next tick', (t) => { + t.plan(3) + + const server = createServer() + server.on('request', (req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body + .on('end', () => { + let ticked = false + client.request({ + path: '/', + method: 'CONNECT' + }, (err) => { + t.ok(err) + t.strictSame(ticked, true) + }) + ticked = true + }) + .resume() + }) + }) +}) + +test('invalid signal', (t) => { + t.plan(8) + + const client = new Client('http://localhost:3333') + t.teardown(client.destroy.bind(client)) + + let ticked = false + client.request({ path: '/', method: 'GET', signal: {}, opaque: 'asd' }, (err, { opaque }) => { + t.equal(ticked, true) + t.equal(opaque, 'asd') + t.type(err, errors.InvalidArgumentError) + }) + client.pipeline({ path: '/', method: 'GET', signal: {} }, () => {}) + .on('error', (err) => { + t.equal(ticked, true) + t.type(err, errors.InvalidArgumentError) + }) + client.stream({ path: '/', method: 'GET', signal: {}, opaque: 'asd' }, () => {}, (err, { opaque }) => { + t.equal(ticked, true) + t.equal(opaque, 'asd') + t.type(err, errors.InvalidArgumentError) + }) + ticked = true +}) + +test('invalid body chunk does not crash', (t) => { + t.plan(1) + + const server = createServer() + server.on('request', (req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + body: new Readable({ + objectMode: true, + read () { + this.push({}) + } + }), + method: 'GET' + }, (err) => { + t.equal(err.code, 'ERR_INVALID_ARG_TYPE') + }) + }) +}) + +test('socket errors', t => { + t.plan(2) + const client = new Client('http://localhost:5554') + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.ok(err) + // TODO: Why UND_ERR_SOCKET? + t.ok(err.code === 'ECONNREFUSED' || err.code === 'UND_ERR_SOCKET', err.code) + t.end() + }) +}) + +test('headers overflow', t => { + t.plan(2) + const server = createServer() + server.on('request', (req, res) => { + res.writeHead(200, { + 'x-test-1': '1', + 'x-test-2': '2' + }) + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + maxHeaderSize: 10 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.ok(err) + t.equal(err.code, 'UND_ERR_HEADERS_OVERFLOW') + t.end() + }) + }) +}) + +test('SocketError should expose socket details (net)', (t) => { + t.plan(8) + + const server = createServer() + + server.once('request', (req, res) => { + res.destroy() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.ok(err instanceof errors.SocketError) + if (err.socket.remoteFamily === 'IPv4') { + t.equal(err.socket.remoteFamily, 'IPv4') + t.equal(err.socket.localAddress, '127.0.0.1') + t.equal(err.socket.remoteAddress, '127.0.0.1') + } else { + t.equal(err.socket.remoteFamily, 'IPv6') + t.equal(err.socket.localAddress, '::1') + t.equal(err.socket.remoteAddress, '::1') + } + t.type(err.socket.localPort, 'number') + t.type(err.socket.remotePort, 'number') + t.type(err.socket.bytesWritten, 'number') + t.type(err.socket.bytesRead, 'number') + }) + }) +}) + +test('SocketError should expose socket details (tls)', (t) => { + t.plan(8) + + const server = https.createServer(pem) + + server.once('request', (req, res) => { + res.destroy() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`https://localhost:${server.address().port}`, { + tls: { + rejectUnauthorized: false + } + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.ok(err instanceof errors.SocketError) + if (err.socket.remoteFamily === 'IPv4') { + t.equal(err.socket.remoteFamily, 'IPv4') + t.equal(err.socket.localAddress, '127.0.0.1') + t.equal(err.socket.remoteAddress, '127.0.0.1') + } else { + t.equal(err.socket.remoteFamily, 'IPv6') + t.equal(err.socket.localAddress, '::1') + t.equal(err.socket.remoteAddress, '::1') + } + t.type(err.socket.localPort, 'number') + t.type(err.socket.remotePort, 'number') + t.type(err.socket.bytesWritten, 'number') + t.type(err.socket.bytesRead, 'number') + }) + }) +}) diff --git a/test/client-head-reset-override.js b/test/client-head-reset-override.js new file mode 100644 index 0000000..a7d79e2 --- /dev/null +++ b/test/client-head-reset-override.js @@ -0,0 +1,62 @@ +'use strict' + +const { createServer } = require('http') +const { test } = require('tap') +const { Client } = require('..') + +test('override HEAD reset', (t) => { + const expected = 'testing123' + const server = createServer((req, res) => { + if (req.method === 'GET') { + res.write(expected) + } + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + let done + client.on('disconnect', () => { + if (!done) { + t.fail() + } + }) + + client.request({ + path: '/', + method: 'HEAD', + reset: false + }, (err, res) => { + t.error(err) + res.body.resume() + }) + + client.request({ + path: '/', + method: 'HEAD', + reset: false + }, (err, res) => { + t.error(err) + res.body.resume() + }) + + client.request({ + path: '/', + method: 'GET', + reset: false + }, (err, res) => { + t.error(err) + let str = '' + res.body.on('data', (data) => { + str += data + }).on('end', () => { + t.same(str, expected) + done = true + t.end() + }) + }) + }) +}) diff --git a/test/client-idempotent-body.js b/test/client-idempotent-body.js new file mode 100644 index 0000000..99e5371 --- /dev/null +++ b/test/client-idempotent-body.js @@ -0,0 +1,43 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') + +test('idempotent retry', (t) => { + t.plan(11) + + const body = 'world' + const server = createServer((req, res) => { + let buf = '' + req.on('data', data => { + buf += data + }).on('end', () => { + t.strictSame(buf, body) + res.end() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.close.bind(client)) + + const _err = new Error() + + for (let n = 0; n < 4; ++n) { + client.stream({ + path: '/', + method: 'PUT', + idempotent: true, + body + }, () => { + throw _err + }, (err) => { + t.equal(err, _err) + }) + } + }) +}) diff --git a/test/client-keep-alive.js b/test/client-keep-alive.js new file mode 100644 index 0000000..393807b --- /dev/null +++ b/test/client-keep-alive.js @@ -0,0 +1,359 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const timers = require('../lib/timers') +const { kConnect } = require('../lib/core/symbols') +const { createServer } = require('net') +const http = require('http') +const FakeTimers = require('@sinonjs/fake-timers') + +test('keep-alive header', (t) => { + t.plan(2) + + const server = createServer((socket) => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 0\r\n') + socket.write('Keep-Alive: timeout=2s\r\n') + socket.write('Connection: keep-alive\r\n') + socket.write('\r\n\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.on('end', () => { + const timeout = setTimeout(() => { + t.fail() + }, 4e3) + client.on('disconnect', () => { + t.pass() + clearTimeout(timeout) + }) + }).resume() + }) + }) +}) + +test('keep-alive header 0', (t) => { + t.plan(2) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((socket) => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 0\r\n') + socket.write('Keep-Alive: timeout=1s\r\n') + socket.write('Connection: keep-alive\r\n') + socket.write('\r\n\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeoutThreshold: 500 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.on('end', () => { + client.on('disconnect', () => { + t.pass() + }) + clock.tick(600) + }).resume() + }) + }) +}) + +test('keep-alive header 1', (t) => { + t.plan(2) + + const server = createServer((socket) => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 0\r\n') + socket.write('Keep-Alive: timeout=1s\r\n') + socket.write('Connection: keep-alive\r\n') + socket.write('\r\n\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.on('end', () => { + const timeout = setTimeout(() => { + t.fail() + }, 0) + client.on('disconnect', () => { + t.pass() + clearTimeout(timeout) + }) + }).resume() + }) + }) +}) + +test('keep-alive header no postfix', (t) => { + t.plan(2) + + const server = createServer((socket) => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 0\r\n') + socket.write('Keep-Alive: timeout=2\r\n') + socket.write('Connection: keep-alive\r\n') + socket.write('\r\n\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.on('end', () => { + const timeout = setTimeout(() => { + t.fail() + }, 4e3) + client.on('disconnect', () => { + t.pass() + clearTimeout(timeout) + }) + }).resume() + }) + }) +}) + +test('keep-alive not timeout', (t) => { + t.plan(2) + + const server = createServer((socket) => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 0\r\n') + socket.write('Keep-Alive: timeoutasdasd=1s\r\n') + socket.write('Connection: keep-alive\r\n') + socket.write('\r\n\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 1e3 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.on('end', () => { + const timeout = setTimeout(() => { + t.fail() + }, 3e3) + client.on('disconnect', () => { + t.pass() + clearTimeout(timeout) + }) + }).resume() + }) + }) +}) + +test('keep-alive threshold', (t) => { + t.plan(2) + + const server = createServer((socket) => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 0\r\n') + socket.write('Keep-Alive: timeout=30s\r\n') + socket.write('Connection: keep-alive\r\n') + socket.write('\r\n\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 30e3, + keepAliveTimeoutThreshold: 29e3 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.on('end', () => { + const timeout = setTimeout(() => { + t.fail() + }, 3e3) + client.on('disconnect', () => { + t.pass() + clearTimeout(timeout) + }) + }).resume() + }) + }) +}) + +test('keep-alive max keepalive', (t) => { + t.plan(2) + + const server = createServer((socket) => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 0\r\n') + socket.write('Keep-Alive: timeout=30s\r\n') + socket.write('Connection: keep-alive\r\n') + socket.write('\r\n\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 30e3, + keepAliveMaxTimeout: 1e3 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.on('end', () => { + const timeout = setTimeout(() => { + t.fail() + }, 3e3) + client.on('disconnect', () => { + t.pass() + clearTimeout(timeout) + }) + }).resume() + }) + }) +}) + +test('connection close', (t) => { + t.plan(4) + + let close = false + const server = createServer((socket) => { + if (close) { + return + } + close = true + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 0\r\n') + socket.write('Connection: close\r\n') + socket.write('\r\n\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.destroy.bind(client)) + + client[kConnect](() => { + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.on('end', () => { + const timeout = setTimeout(() => { + t.fail() + }, 3e3) + client.once('disconnect', () => { + close = false + t.pass() + clearTimeout(timeout) + }) + }).resume() + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.on('end', () => { + const timeout = setTimeout(() => { + t.fail() + }, 3e3) + client.once('disconnect', () => { + t.pass() + clearTimeout(timeout) + }) + }).resume() + }) + }) + }) +}) + +test('Disable keep alive', (t) => { + t.plan(7) + + const ports = [] + const server = http.createServer((req, res) => { + t.notOk(ports.includes(req.socket.remotePort)) + ports.push(req.socket.remotePort) + t.match(req.headers, { connection: 'close' }) + res.writeHead(200, { connection: 'close' }) + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { pipelining: 0 }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.on('end', () => { + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.on('end', () => { + t.pass() + }).resume() + }) + }).resume() + }) + }) +}) diff --git a/test/client-node-max-header-size.js b/test/client-node-max-header-size.js new file mode 100644 index 0000000..b537490 --- /dev/null +++ b/test/client-node-max-header-size.js @@ -0,0 +1,23 @@ +'use strict' + +const { execSync } = require('node:child_process') +const { test } = require('tap') + +const command = 'node -e "require(\'.\').request(\'https://httpbin.org/get\')"' + +test("respect Node.js' --max-http-header-size", async (t) => { + t.throws( + // TODO: Drop the `--unhandled-rejections=throw` once we drop Node.js 14 + () => execSync(`${command} --max-http-header-size=1 --unhandled-rejections=throw`), + /UND_ERR_HEADERS_OVERFLOW/, + 'max-http-header-size=1 should throw' + ) + + t.doesNotThrow( + () => execSync(command), + /UND_ERR_HEADERS_OVERFLOW/, + 'default max-http-header-size should not throw' + ) + + t.end() +}) diff --git a/test/client-pipeline.js b/test/client-pipeline.js new file mode 100644 index 0000000..9b677a0 --- /dev/null +++ b/test/client-pipeline.js @@ -0,0 +1,1042 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const EE = require('events') +const { createServer } = require('http') +const { + pipeline, + Readable, + Transform, + Writable, + PassThrough +} = require('stream') +const { nodeMajor } = require('../lib/core/util') + +test('pipeline get', (t) => { + t.plan(17) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + t.equal(undefined, req.headers['content-length']) + res.setHeader('Content-Type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + { + const bufs = [] + const signal = new EE() + client.pipeline({ signal, path: '/', method: 'GET' }, ({ statusCode, headers, body }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + t.equal(signal.listenerCount('abort'), 1) + return body + }) + .end() + .on('data', (buf) => { + bufs.push(buf) + }) + .on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + .on('close', () => { + t.equal(signal.listenerCount('abort'), 0) + }) + t.equal(signal.listenerCount('abort'), 1) + } + + { + const bufs = [] + client.pipeline({ path: '/', method: 'GET' }, ({ statusCode, headers, body }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + return body + }) + .end() + .on('data', (buf) => { + bufs.push(buf) + }) + .on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + } + }) +}) + +test('pipeline echo', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + let res = '' + const buf1 = Buffer.alloc(1e3).toString() + const buf2 = Buffer.alloc(1e6).toString() + pipeline( + new Readable({ + read () { + this.push(buf1) + this.push(buf2) + this.push(null) + } + }), + client.pipeline({ + path: '/', + method: 'PUT' + }, ({ body }) => { + return pipeline(body, new PassThrough(), () => {}) + }), + new Writable({ + write (chunk, encoding, callback) { + res += chunk.toString() + callback() + }, + final (callback) { + t.equal(res, buf1 + buf2) + callback() + } + }), + (err) => { + t.error(err) + } + ) + }) +}) + +test('pipeline ignore request body', (t) => { + t.plan(2) + + let done + const server = createServer((req, res) => { + res.write('asd') + res.end() + done() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + let res = '' + const buf1 = Buffer.alloc(1e3).toString() + const buf2 = Buffer.alloc(1e6).toString() + pipeline( + new Readable({ + read () { + this.push(buf1) + this.push(buf2) + done = () => this.push(null) + } + }), + client.pipeline({ + path: '/', + method: 'PUT' + }, ({ body }) => { + return pipeline(body, new PassThrough(), () => {}) + }), + new Writable({ + write (chunk, encoding, callback) { + res += chunk.toString() + callback() + }, + final (callback) { + t.equal(res, 'asd') + callback() + } + }), + (err) => { + t.error(err) + } + ) + }) +}) + +test('pipeline invalid handler', (t) => { + t.plan(1) + + const client = new Client('http://localhost:5000') + client.pipeline({}, null).on('error', (err) => { + t.ok(/handler/.test(err)) + }) +}) + +test('pipeline invalid handler return after destroy should not error', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 3 + }) + t.teardown(client.destroy.bind(client)) + + const dup = client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => { + body.on('error', (err) => { + t.equal(err.message, 'asd') + }) + dup.destroy(new Error('asd')) + return {} + }) + .on('error', (err) => { + t.equal(err.message, 'asd') + }) + .on('close', () => { + t.pass() + }) + .end() + }) +}) + +test('pipeline error body', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const buf = Buffer.alloc(1e6).toString() + pipeline( + new Readable({ + read () { + this.push(buf) + } + }), + client.pipeline({ + path: '/', + method: 'PUT' + }, ({ body }) => { + const pt = new PassThrough() + process.nextTick(() => { + pt.destroy(new Error('asd')) + }) + body.on('error', (err) => { + t.ok(err) + }) + return pipeline(body, pt, () => {}) + }), + new PassThrough(), + (err) => { + t.ok(err) + } + ) + }) +}) + +test('pipeline destroy body', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const buf = Buffer.alloc(1e6).toString() + pipeline( + new Readable({ + read () { + this.push(buf) + } + }), + client.pipeline({ + path: '/', + method: 'PUT' + }, ({ body }) => { + const pt = new PassThrough() + process.nextTick(() => { + pt.destroy() + }) + body.on('error', (err) => { + t.ok(err) + }) + return pipeline(body, pt, () => {}) + }), + new PassThrough(), + (err) => { + t.ok(err) + } + ) + }) +}) + +test('pipeline backpressure', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const buf = Buffer.alloc(1e6).toString() + const duplex = client.pipeline({ + path: '/', + method: 'PUT' + }, ({ body }) => { + const pt = new PassThrough() + return pipeline(body, pt, () => {}) + }) + + duplex.end(buf) + duplex.on('data', () => { + duplex.pause() + setImmediate(() => { + duplex.resume() + }) + }).on('end', () => { + t.pass() + }) + }) +}) + +test('pipeline invalid handler return', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => { + // TODO: Should body cause unhandled exception? + body.on('error', () => {}) + }) + .on('error', (err) => { + t.type(err, errors.InvalidReturnValueError) + }) + .end() + + client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => { + // TODO: Should body cause unhandled exception? + body.on('error', () => {}) + return {} + }) + .on('error', (err) => { + t.type(err, errors.InvalidReturnValueError) + }) + .end() + }) +}) + +test('pipeline throw handler', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => { + // TODO: Should body cause unhandled exception? + body.on('error', () => {}) + throw new Error('asd') + }) + .on('error', (err) => { + t.equal(err.message, 'asd') + }) + .end() + }) +}) + +test('pipeline destroy and throw handler', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const dup = client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => { + dup.destroy() + // TODO: Should body cause unhandled exception? + body.on('error', () => {}) + throw new Error('asd') + }) + .end() + .on('error', (err) => { + t.type(err, errors.RequestAbortedError) + }) + .on('close', () => { + t.pass() + }) + }) +}) + +test('pipeline abort res', (t) => { + t.plan(2) + + let _res + const server = createServer((req, res) => { + res.write('asd') + _res = res + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => { + setImmediate(() => { + body.destroy() + _res.write('asdasdadasd') + const timeout = setTimeout(() => { + t.fail() + }, 100) + client.on('disconnect', () => { + clearTimeout(timeout) + t.pass() + }) + }) + return body + }) + .on('error', (err) => { + t.type(err, errors.RequestAbortedError) + }) + .end() + }) +}) + +test('pipeline abort server res', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.destroy() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'GET' + }, () => { + t.fail() + }) + .on('error', (err) => { + t.type(err, errors.SocketError) + }) + .end() + }) +}) + +test('pipeline abort duplex', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'PUT' + }, (err, data) => { + t.error(err) + data.body.resume() + + client.pipeline({ + path: '/', + method: 'PUT' + }, () => { + t.fail() + }).destroy().on('error', (err) => { + t.type(err, errors.RequestAbortedError) + }) + }) + }) +}) + +test('pipeline abort piped res', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.write('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => { + const pt = new PassThrough() + setImmediate(() => { + pt.destroy() + }) + return pipeline(body, pt, () => {}) + }) + .on('error', (err) => { + // Node < 13 doesn't always detect premature close. + if (nodeMajor < 13) { + t.ok(err) + } else { + t.equal(err.code, 'UND_ERR_ABORTED') + } + }) + .end() + }) +}) + +test('pipeline abort piped res 2', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.write('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => { + const pt = new PassThrough() + body.on('error', (err) => { + t.type(err, errors.RequestAbortedError) + }) + setImmediate(() => { + pt.destroy() + }) + body.pipe(pt) + return pt + }) + .on('error', (err) => { + t.type(err, errors.RequestAbortedError) + }) + .end() + }) +}) + +test('pipeline abort piped res 3', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.write('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => { + const pt = new PassThrough() + body.on('error', (err) => { + t.equal(err.message, 'asd') + }) + setImmediate(() => { + pt.destroy(new Error('asd')) + }) + body.pipe(pt) + return pt + }) + .on('error', (err) => { + t.equal(err.message, 'asd') + }) + .end() + }) +}) + +test('pipeline abort server res after headers', (t) => { + t.plan(1) + + let _res + const server = createServer((req, res) => { + res.write('asd') + _res = res + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'GET' + }, (data) => { + _res.destroy() + return data.body + }) + .on('error', (err) => { + t.type(err, errors.SocketError) + }) + .end() + }) +}) + +test('pipeline w/ write abort server res after headers', (t) => { + t.plan(1) + + let _res + const server = createServer((req, res) => { + req.pipe(res) + _res = res + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'PUT' + }, (data) => { + _res.destroy() + return data.body + }) + .on('error', (err) => { + t.type(err, errors.SocketError) + }) + .resume() + .write('asd') + }) +}) + +test('destroy in push', (t) => { + t.plan(3) + + let _res + const server = createServer((req, res) => { + res.write('asd') + _res = res + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.pipeline({ path: '/', method: 'GET' }, ({ body }) => { + body.once('data', () => { + _res.write('asd') + body.on('data', (buf) => { + body.destroy() + _res.end() + }).on('error', (err) => { + t.ok(err) + }) + }) + return body + }).on('error', (err) => { + t.ok(err) + }).resume().end() + + client.pipeline({ path: '/', method: 'GET' }, ({ body }) => { + let buf = '' + body.on('data', (chunk) => { + buf = chunk.toString() + _res.end() + }).on('end', () => { + t.equal('asd', buf) + }) + return body + }).resume().end() + }) +}) + +test('pipeline args validation', (t) => { + t.plan(2) + + const client = new Client('http://localhost:5000') + + const ret = client.pipeline(null, () => {}) + ret.on('error', (err) => { + t.ok(/opts/.test(err.message)) + t.type(err, errors.InvalidArgumentError) + }) +}) + +test('pipeline factory throw not unhandled', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.write('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'GET' + }, (data) => { + throw new Error('asd') + }) + .on('error', (err) => { + t.ok(err) + }) + .end() + }) +}) + +test('pipeline destroy before dispatch', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client + .pipeline({ path: '/', method: 'GET' }, ({ body }) => { + return body + }) + .on('error', (err) => { + t.ok(err) + }) + .end() + .destroy() + }) +}) + +test('pipeline legacy stream', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.write(Buffer.alloc(16e3)) + setImmediate(() => { + res.end(Buffer.alloc(16e3)) + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client + .pipeline({ path: '/', method: 'GET' }, ({ body }) => { + const pt = new PassThrough() + pt.pause = null + return body.pipe(pt) + }) + .resume() + .on('end', () => { + t.pass() + }) + .end() + }) +}) + +test('pipeline objectMode', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end(JSON.stringify({ asd: 1 })) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client + .pipeline({ path: '/', method: 'GET', objectMode: true }, ({ body }) => { + return pipeline(body, new Transform({ + readableObjectMode: true, + transform (chunk, encoding, callback) { + callback(null, JSON.parse(chunk)) + } + }), () => {}) + }) + .on('data', data => { + t.strictSame(data, { asd: 1 }) + }) + .end() + }) +}) + +test('pipeline invalid opts', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end(JSON.stringify({ asd: 1 })) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.close((err) => { + t.error(err) + }) + client + .pipeline({ path: '/', method: 'GET', objectMode: true }, ({ body }) => { + t.fail() + }) + .on('error', (err) => { + t.ok(err) + }) + }) +}) + +test('pipeline CONNECT throw', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'CONNECT' + }, () => { + t.fail() + }).on('error', (err) => { + t.type(err, errors.InvalidArgumentError) + }) + client.on('disconnect', () => { + t.fail() + }) + }) +}) + +test('pipeline body without destroy', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => { + const pt = new PassThrough({ autoDestroy: false }) + pt.destroy = null + return body.pipe(pt) + }) + .end() + .on('end', () => { + t.pass() + }) + .resume() + }) +}) + +test('pipeline ignore 1xx', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.writeProcessing() + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + let buf = '' + client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => body) + .on('data', (chunk) => { + buf += chunk + }) + .on('end', () => { + t.equal(buf, 'hello') + }) + .end() + }) +}) +test('pipeline ignore 1xx and use onInfo', (t) => { + t.plan(3) + + const infos = [] + const server = createServer((req, res) => { + res.writeProcessing() + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + let buf = '' + client.pipeline({ + path: '/', + method: 'GET', + onInfo: (x) => { + infos.push(x) + } + }, ({ body }) => body) + .on('data', (chunk) => { + buf += chunk + }) + .on('end', () => { + t.equal(buf, 'hello') + t.equal(infos.length, 1) + t.equal(infos[0].statusCode, 102) + }) + .end() + }) +}) + +test('pipeline backpressure', (t) => { + t.plan(1) + + const expected = Buffer.alloc(1e6).toString() + + const server = createServer((req, res) => { + res.writeProcessing() + res.end(expected) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + let buf = '' + client.pipeline({ + path: '/', + method: 'GET' + }, ({ body }) => body) + .end() + .pipe(new Transform({ + highWaterMark: 1, + transform (chunk, encoding, callback) { + setImmediate(() => { + callback(null, chunk) + }) + } + })) + .on('data', chunk => { + buf += chunk + }) + .on('end', () => { + t.equal(buf, expected) + }) + }) +}) + +test('pipeline abort after headers', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.writeProcessing() + res.write('asd') + setImmediate(() => { + res.write('asd') + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const signal = new EE() + client.pipeline({ + path: '/', + method: 'GET', + signal + }, ({ body }) => { + process.nextTick(() => { + signal.emit('abort') + }) + return body + }) + .end() + .on('error', (err) => { + t.type(err, errors.RequestAbortedError) + }) + }) +}) diff --git a/test/client-pipelining.js b/test/client-pipelining.js new file mode 100644 index 0000000..8cd21fe --- /dev/null +++ b/test/client-pipelining.js @@ -0,0 +1,752 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const { finished, Readable } = require('stream') +const { kConnect } = require('../lib/core/symbols') +const EE = require('events') +const { kBusy, kRunning, kSize } = require('../lib/core/symbols') +const { maybeWrapStream, consts } = require('./utils/async-iterators') + +test('20 times GET with pipelining 10', (t) => { + const num = 20 + t.plan(3 * num + 1) + + let count = 0 + let countGreaterThanOne = false + const server = createServer((req, res) => { + count++ + setTimeout(function () { + countGreaterThanOne = countGreaterThanOne || count > 1 + res.end(req.url) + }, 10) + }) + t.teardown(server.close.bind(server)) + + // needed to check for a warning on the maxListeners on the socket + function onWarning (warning) { + if (!/ExperimentalWarning/.test(warning)) { + t.fail() + } + } + process.on('warning', onWarning) + t.teardown(() => { + process.removeListener('warning', onWarning) + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 10 + }) + t.teardown(client.close.bind(client)) + + for (let i = 0; i < num; i++) { + makeRequest(i) + } + + function makeRequest (i) { + makeRequestAndExpectUrl(client, i, t, () => { + count-- + + if (i === num - 1) { + t.ok(countGreaterThanOne, 'seen more than one parallel request') + } + }) + } + }) +}) + +function makeRequestAndExpectUrl (client, i, t, cb) { + return client.request({ path: '/' + i, method: 'GET' }, (err, { statusCode, headers, body }) => { + cb() + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('/' + i, Buffer.concat(bufs).toString('utf8')) + }) + }) +} + +test('A client should enqueue as much as twice its pipelining factor', (t) => { + const num = 10 + let sent = 0 + // x * 6 + 1 t.ok + 5 drain + t.plan(num * 6 + 1 + 5 + 2) + + let count = 0 + let countGreaterThanOne = false + const server = createServer((req, res) => { + count++ + t.ok(count <= 5) + setTimeout(function () { + countGreaterThanOne = countGreaterThanOne || count > 1 + res.end(req.url) + }, 10) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.close.bind(client)) + + for (; sent < 2;) { + t.notOk(client[kSize] > client.pipelining, 'client is not full') + makeRequest() + t.ok(client[kSize] <= client.pipelining, 'we can send more requests') + } + + t.ok(client[kBusy], 'client is busy') + t.notOk(client[kSize] > client.pipelining, 'client is full') + makeRequest() + t.ok(client[kBusy], 'we must stop now') + t.ok(client[kBusy], 'client is busy') + t.ok(client[kSize] > client.pipelining, 'client is full') + + function makeRequest () { + makeRequestAndExpectUrl(client, sent++, t, () => { + count-- + setImmediate(() => { + if (client[kSize] === 0) { + t.ok(countGreaterThanOne, 'seen more than one parallel request') + const start = sent + for (; sent < start + 2 && sent < num;) { + t.notOk(client[kSize] > client.pipelining, 'client is not full') + t.ok(makeRequest()) + } + } + }) + }) + return client[kSize] <= client.pipelining + } + }) +}) + +test('pipeline 1 is 1 active request', (t) => { + t.plan(9) + + let res2 + const server = createServer((req, res) => { + res.write('asd') + res2 = res + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 1 + }) + t.teardown(client.destroy.bind(client)) + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.equal(client[kSize], 1) + t.error(err) + t.notOk(client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + finished(data.body, (err) => { + t.ok(err) + client.close((err) => { + t.error(err) + }) + }) + data.body.destroy() + res2.end() + })) + data.body.resume() + res2.end() + }) + t.ok(client[kSize] <= client.pipelining) + t.ok(client[kBusy]) + t.equal(client[kSize], 1) + }) +}) + +test('pipelined chunked POST stream', (t) => { + t.plan(4 + 8 + 8) + + let a = 0 + let b = 0 + + const server = createServer((req, res) => { + req.on('data', chunk => { + // Make sure a and b don't interleave. + t.ok(a === 9 || b === 0) + res.write(chunk) + }).on('end', () => { + res.end() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + body.resume() + t.error(err) + }) + + client.request({ + path: '/', + method: 'POST', + body: new Readable({ + read () { + this.push(++a > 8 ? null : 'a') + } + }) + }, (err, { body }) => { + body.resume() + t.error(err) + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + body.resume() + t.error(err) + }) + + client.request({ + path: '/', + method: 'POST', + body: new Readable({ + read () { + this.push(++b > 8 ? null : 'b') + } + }) + }, (err, { body }) => { + body.resume() + t.error(err) + }) + }) +}) + +test('pipelined chunked POST iterator', (t) => { + t.plan(4 + 8 + 8) + + let a = 0 + let b = 0 + + const server = createServer((req, res) => { + req.on('data', chunk => { + // Make sure a and b don't interleave. + t.ok(a === 9 || b === 0) + res.write(chunk) + }).on('end', () => { + res.end() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + body.resume() + t.error(err) + }) + + client.request({ + path: '/', + method: 'POST', + body: (async function * () { + while (++a <= 8) { + yield 'a' + } + })() + }, (err, { body }) => { + body.resume() + t.error(err) + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + body.resume() + t.error(err) + }) + + client.request({ + path: '/', + method: 'POST', + body: (async function * () { + while (++b <= 8) { + yield 'b' + } + })() + }, (err, { body }) => { + body.resume() + t.error(err) + }) + }) +}) + +function errordInflightPost (bodyType) { + test(`errored POST body lets inflight complete ${bodyType}`, (t) => { + t.plan(6) + + let serverRes + const server = createServer() + server.on('request', (req, res) => { + serverRes = res + res.write('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body + .resume() + .once('data', () => { + client.request({ + path: '/', + method: 'POST', + opaque: 'asd', + body: maybeWrapStream(new Readable({ + read () { + this.destroy(new Error('kaboom')) + } + }).once('error', (err) => { + t.ok(err) + }).on('error', () => { + // Readable emits error twice... + }), bodyType) + }, (err, data) => { + t.ok(err) + t.equal(data.opaque, 'asd') + }) + client.close((err) => { + t.error(err) + }) + serverRes.end() + }) + .on('end', () => { + t.pass() + }) + }) + }) + }) +} + +errordInflightPost(consts.STREAM) +errordInflightPost(consts.ASYNC_ITERATOR) + +test('pipelining non-idempotent', (t) => { + t.plan(4) + + const server = createServer() + server.on('request', (req, res) => { + setTimeout(() => { + res.end('asd') + }, 10) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.close.bind(client)) + + let ended = false + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + t.pass() + ended = true + }) + }) + + client.request({ + path: '/', + method: 'GET', + idempotent: false + }, (err, data) => { + t.error(err) + t.equal(ended, true) + data.body.resume() + }) + }) +}) + +function pipeliningNonIdempotentWithBody (bodyType) { + test(`pipelining non-idempotent w body ${bodyType}`, (t) => { + t.plan(4) + + const server = createServer() + server.on('request', (req, res) => { + setImmediate(() => { + res.end('asd') + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.close.bind(client)) + + let ended = false + let reading = false + client.request({ + path: '/', + method: 'POST', + body: maybeWrapStream(new Readable({ + read () { + if (reading) { + return + } + reading = true + this.push('asd') + setImmediate(() => { + this.push(null) + ended = true + }) + } + }), bodyType) + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + + client.request({ + path: '/', + method: 'GET', + idempotent: false + }, (err, data) => { + t.error(err) + t.equal(ended, true) + data.body.resume() + }) + }) + }) +} + +pipeliningNonIdempotentWithBody(consts.STREAM) +pipeliningNonIdempotentWithBody(consts.ASYNC_ITERATOR) + +function pipeliningHeadBusy (bodyType) { + test(`pipelining HEAD busy ${bodyType}`, (t) => { + t.plan(7) + + const server = createServer() + server.on('request', (req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 10 + }) + t.teardown(client.close.bind(client)) + + client[kConnect](() => { + let ended = false + client.once('disconnect', () => { + t.equal(ended, true) + }) + + { + const body = new Readable({ + read () { } + }) + client.request({ + path: '/', + method: 'GET', + body: maybeWrapStream(body, bodyType) + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + body.push(null) + t.equal(client[kBusy], true) + } + + { + const body = new Readable({ + read () { } + }) + client.request({ + path: '/', + method: 'HEAD', + body: maybeWrapStream(body, bodyType) + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + ended = true + t.pass() + }) + }) + body.push(null) + t.equal(client[kBusy], true) + } + }) + }) + }) +} + +pipeliningHeadBusy(consts.STREAM) +pipeliningHeadBusy(consts.ASYNC_ITERATOR) + +test('pipelining empty pipeline before reset', (t) => { + t.plan(8) + + let c = 0 + const server = createServer() + server.on('request', (req, res) => { + if (c++ === 0) { + res.end('asd') + } else { + setTimeout(() => { + res.end('asd') + }, 100) + } + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 10 + }) + t.teardown(client.close.bind(client)) + + client[kConnect](() => { + let ended = false + client.once('disconnect', () => { + t.equal(ended, true) + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + t.equal(client[kBusy], false) + + client.request({ + path: '/', + method: 'HEAD', + body: 'asd' + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + ended = true + t.pass() + }) + }) + t.equal(client[kBusy], true) + t.equal(client[kRunning], 2) + }) + }) +}) + +function pipeliningIdempotentBusy (bodyType) { + test(`pipelining idempotent busy ${bodyType}`, (t) => { + t.plan(12) + + const server = createServer() + server.on('request', (req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 10 + }) + t.teardown(client.close.bind(client)) + + { + const body = new Readable({ + read () { } + }) + client.request({ + path: '/', + method: 'GET', + body: maybeWrapStream(body, bodyType) + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + body.push(null) + t.equal(client[kBusy], true) + } + + client[kConnect](() => { + { + const body = new Readable({ + read () { } + }) + client.request({ + path: '/', + method: 'GET', + body: maybeWrapStream(body, bodyType) + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + body.push(null) + t.equal(client[kBusy], true) + } + + { + const signal = new EE() + const body = new Readable({ + read () { } + }) + client.request({ + path: '/', + method: 'GET', + body: maybeWrapStream(body, bodyType), + signal + }, (err, data) => { + t.ok(err) + }) + t.equal(client[kBusy], true) + signal.emit('abort') + t.equal(client[kBusy], true) + } + + { + const body = new Readable({ + read () { } + }) + client.request({ + path: '/', + method: 'GET', + idempotent: false, + body: maybeWrapStream(body, bodyType) + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + body.push(null) + t.equal(client[kBusy], true) + } + }) + }) + }) +} + +pipeliningIdempotentBusy(consts.STREAM) +pipeliningIdempotentBusy(consts.ASYNC_ITERATOR) + +test('pipelining blocked', (t) => { + t.plan(6) + + const server = createServer() + + let blocking = true + let count = 0 + + server.on('request', (req, res) => { + t.ok(!count || !blocking) + count++ + setImmediate(() => { + res.end('asd') + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 10 + }) + t.teardown(client.close.bind(client)) + client.request({ + path: '/', + method: 'GET', + blocking: true + }, (err, data) => { + t.error(err) + blocking = false + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + }) +}) diff --git a/test/client-post.js b/test/client-post.js new file mode 100644 index 0000000..363b43c --- /dev/null +++ b/test/client-post.js @@ -0,0 +1,73 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const { Blob } = require('buffer') + +test('request post blob', { skip: !Blob }, (t) => { + t.plan(4) + + const server = createServer(async (req, res) => { + t.equal(req.headers['content-type'], 'application/json') + let str = '' + for await (const chunk of req) { + str += chunk + } + t.equal(str, 'asd') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET', + body: new Blob(['asd'], { + type: 'application/json' + }) + }, (err, data) => { + t.error(err) + data.body.resume().on('end', () => { + t.pass() + }) + }) + }) +}) + +test('request post arrayBuffer', { skip: !Blob }, (t) => { + t.plan(3) + + const server = createServer(async (req, res) => { + let str = '' + for await (const chunk of req) { + str += chunk + } + t.equal(str, 'asd') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const buf = Buffer.from('asd') + const dst = new ArrayBuffer(buf.byteLength) + buf.copy(new Uint8Array(dst)) + + client.request({ + path: '/', + method: 'GET', + body: dst + }, (err, data) => { + t.error(err) + data.body.resume().on('end', () => { + t.pass() + }) + }) + }) +}) diff --git a/test/client-reconnect.js b/test/client-reconnect.js new file mode 100644 index 0000000..ae1a206 --- /dev/null +++ b/test/client-reconnect.js @@ -0,0 +1,54 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const FakeTimers = require('@sinonjs/fake-timers') +const timers = require('../lib/timers') + +test('multiple reconnect', (t) => { + t.plan(5) + + let n = 0 + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + n === 0 ? res.destroy() : res.end('ok') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.ok(err) + t.equal(err.code, 'UND_ERR_SOCKET') + }) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + + client.on('disconnect', () => { + if (++n === 1) { + t.pass() + } + process.nextTick(() => { + clock.tick(1000) + }) + }) + }) +}) diff --git a/test/client-request.js b/test/client-request.js new file mode 100644 index 0000000..3e66705 --- /dev/null +++ b/test/client-request.js @@ -0,0 +1,997 @@ +/* globals AbortController */ + +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const { createServer } = require('http') +const EE = require('events') +const { kConnect } = require('../lib/core/symbols') +const { Readable } = require('stream') +const net = require('net') +const { promisify } = require('util') +const { NotSupportedError } = require('../lib/core/errors') +const { nodeMajor } = require('../lib/core/util') +const { parseFormDataString } = require('./utils/formdata') + +test('request dump', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + let dumped = false + client.on('disconnect', () => { + t.equal(dumped, true) + }) + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.dump().then(() => { + dumped = true + t.pass() + }) + }) + }) +}) + +test('request dump with abort signal', (t) => { + t.plan(2) + const server = createServer((req, res) => { + res.write('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + let ac + if (!global.AbortController) { + const { AbortController } = require('abort-controller') + ac = new AbortController() + } else { + ac = new AbortController() + } + body.dump({ signal: ac.signal }).catch((err) => { + t.equal(err.name, 'AbortError') + server.close() + }) + ac.abort() + }) + }) +}) + +test('request hwm', (t) => { + t.plan(2) + const server = createServer((req, res) => { + res.write('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET', + highWaterMark: 1000 + }, (err, { body }) => { + t.error(err) + t.same(body.readableHighWaterMark, 1000) + body.dump() + }) + }) +}) + +test('request abort before headers', (t) => { + t.plan(6) + + const signal = new EE() + const server = createServer((req, res) => { + res.end('hello') + signal.emit('abort') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client[kConnect](() => { + client.request({ + path: '/', + method: 'GET', + signal + }, (err) => { + t.type(err, errors.RequestAbortedError) + t.equal(signal.listenerCount('abort'), 0) + }) + t.equal(signal.listenerCount('abort'), 1) + + client.request({ + path: '/', + method: 'GET', + signal + }, (err) => { + t.type(err, errors.RequestAbortedError) + t.equal(signal.listenerCount('abort'), 0) + }) + t.equal(signal.listenerCount('abort'), 2) + }) + }) +}) + +test('request body destroyed on invalid callback', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const body = new Readable({ + read () {} + }) + try { + client.request({ + path: '/', + method: 'GET', + body + }, null) + } catch (err) { + t.equal(body.destroyed, true) + } + }) +}) + +test('trailers', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.writeHead(200, { Trailer: 'Content-MD5' }) + res.addTrailers({ 'Content-MD5': 'test' }) + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const { body, trailers } = await client.request({ + path: '/', + method: 'GET' + }) + + body + .on('data', () => t.fail()) + .on('end', () => { + t.strictSame(trailers, { 'content-md5': 'test' }) + }) + }) +}) + +test('destroy socket abruptly', { skip: true }, async (t) => { + t.plan(2) + + const server = net.createServer((socket) => { + const lines = [ + 'HTTP/1.1 200 OK', + 'Date: Sat, 09 Oct 2010 14:28:02 GMT', + 'Connection: close', + '', + 'the body' + ] + socket.end(lines.join('\r\n')) + + // Unfortunately calling destroy synchronously might get us flaky results, + // therefore we delay it to the next event loop run. + setImmediate(socket.destroy.bind(socket)) + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const { statusCode, body } = await client.request({ + path: '/', + method: 'GET' + }) + + t.equal(statusCode, 200) + + body.setEncoding('utf8') + + let actual = '' + + for await (const chunk of body) { + actual += chunk + } + + t.equal(actual, 'the body') +}) + +test('destroy socket abruptly with keep-alive', { skip: true }, async (t) => { + t.plan(2) + + const server = net.createServer((socket) => { + const lines = [ + 'HTTP/1.1 200 OK', + 'Date: Sat, 09 Oct 2010 14:28:02 GMT', + 'Connection: keep-alive', + 'Content-Length: 42', + '', + 'the body' + ] + socket.end(lines.join('\r\n')) + + // Unfortunately calling destroy synchronously might get us flaky results, + // therefore we delay it to the next event loop run. + setImmediate(socket.destroy.bind(socket)) + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const { statusCode, body } = await client.request({ + path: '/', + method: 'GET' + }) + + t.equal(statusCode, 200) + + body.setEncoding('utf8') + + try { + /* eslint-disable */ + for await (const _ of body) { + // empty on purpose + } + /* eslint-enable */ + t.fail('no error') + } catch (err) { + t.pass('error happened') + } +}) + +test('request json', (t) => { + t.plan(1) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + t.strictSame(obj, await body.json()) + }) +}) + +test('request long multibyte json', (t) => { + t.plan(1) + + const obj = { asd: 'ã‚'.repeat(100000) } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + t.strictSame(obj, await body.json()) + }) +}) + +test('request text', (t) => { + t.plan(1) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + t.strictSame(JSON.stringify(obj), await body.text()) + }) +}) + +test('empty host header', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.end(req.headers.host) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const serverAddress = `localhost:${server.address().port}` + const client = new Client(`http://${serverAddress}`) + t.teardown(client.destroy.bind(client)) + + const getWithHost = async (host, wanted) => { + const { body } = await client.request({ + path: '/', + method: 'GET', + headers: { host } + }) + t.strictSame(await body.text(), wanted) + } + + await getWithHost('test', 'test') + await getWithHost(undefined, serverAddress) + await getWithHost('', '') + }) +}) + +test('request long multibyte text', (t) => { + t.plan(1) + + const obj = { asd: 'ã‚'.repeat(100000) } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + t.strictSame(JSON.stringify(obj), await body.text()) + }) +}) + +test('request blob', { skip: nodeMajor < 16 }, (t) => { + t.plan(2) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.setHeader('Content-Type', 'application/json') + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + + const blob = await body.blob() + t.strictSame(obj, JSON.parse(await blob.text())) + t.equal(blob.type, 'application/json') + }) +}) + +test('request arrayBuffer', (t) => { + t.plan(2) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + const ab = await body.arrayBuffer() + + t.strictSame(Buffer.from(JSON.stringify(obj)), Buffer.from(ab)) + t.ok(ab instanceof ArrayBuffer) + }) +}) + +test('request body', { skip: nodeMajor < 16 }, (t) => { + t.plan(1) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + + let x = '' + for await (const chunk of body.body) { + x += Buffer.from(chunk) + } + t.strictSame(JSON.stringify(obj), x) + }) +}) + +test('request post body no missing data', { skip: nodeMajor < 16 }, (t) => { + t.plan(2) + + const server = createServer(async (req, res) => { + let ret = '' + for await (const chunk of req) { + ret += chunk + } + t.equal(ret, 'asd') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET', + body: new Readable({ + read () { + this.push('asd') + this.push(null) + } + }), + maxRedirections: 2 + }) + await body.text() + t.pass() + }) +}) + +test('request post body no extra data handler', { skip: nodeMajor < 16 }, (t) => { + t.plan(3) + + const server = createServer(async (req, res) => { + let ret = '' + for await (const chunk of req) { + ret += chunk + } + t.equal(ret, 'asd') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const reqBody = new Readable({ + read () { + this.push('asd') + this.push(null) + } + }) + process.nextTick(() => { + t.equal(reqBody.listenerCount('data'), 0) + }) + const { body } = await client.request({ + path: '/', + method: 'GET', + body: reqBody, + maxRedirections: 0 + }) + await body.text() + t.pass() + }) +}) + +test('request with onInfo callback', (t) => { + t.plan(3) + const infos = [] + const server = createServer((req, res) => { + res.writeProcessing() + res.setHeader('Content-Type', 'application/json') + res.end(JSON.stringify({ foo: 'bar' })) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + await client.request({ + path: '/', + method: 'GET', + onInfo: (x) => { infos.push(x) } + }) + t.equal(infos.length, 1) + t.equal(infos[0].statusCode, 102) + t.pass() + }) +}) + +test('request with onInfo callback but socket is destroyed before end of response', (t) => { + t.plan(5) + const infos = [] + let response + const server = createServer((req, res) => { + response = res + res.writeProcessing() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + try { + await client.request({ + path: '/', + method: 'GET', + onInfo: (x) => { + infos.push(x) + response.destroy() + } + }) + t.error() + } catch (e) { + t.ok(e) + t.equal(e.message, 'other side closed') + } + t.equal(infos.length, 1) + t.equal(infos[0].statusCode, 102) + t.pass() + }) +}) + +test('request onInfo callback headers parsing', async (t) => { + t.plan(4) + const infos = [] + + const server = net.createServer((socket) => { + const lines = [ + 'HTTP/1.1 103 Early Hints', + 'Link: ; rel=preload; as=style', + '', + 'HTTP/1.1 200 OK', + 'Date: Sat, 09 Oct 2010 14:28:02 GMT', + 'Connection: close', + '', + 'the body' + ] + socket.end(lines.join('\r\n')) + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET', + onInfo: (x) => { infos.push(x) } + }) + await body.dump() + t.equal(infos.length, 1) + t.equal(infos[0].statusCode, 103) + t.same(infos[0].headers, { link: '; rel=preload; as=style' }) + t.pass() +}) + +test('request raw responseHeaders', async (t) => { + t.plan(4) + const infos = [] + + const server = net.createServer((socket) => { + const lines = [ + 'HTTP/1.1 103 Early Hints', + 'Link: ; rel=preload; as=style', + '', + 'HTTP/1.1 200 OK', + 'Date: Sat, 09 Oct 2010 14:28:02 GMT', + 'Connection: close', + '', + 'the body' + ] + socket.end(lines.join('\r\n')) + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const { body, headers } = await client.request({ + path: '/', + method: 'GET', + responseHeaders: 'raw', + onInfo: (x) => { infos.push(x) } + }) + await body.dump() + t.equal(infos.length, 1) + t.same(infos[0].headers, ['Link', '; rel=preload; as=style']) + t.same(headers, ['Date', 'Sat, 09 Oct 2010 14:28:02 GMT', 'Connection', 'close']) + t.pass() +}) + +test('request formData', { skip: nodeMajor < 16 }, (t) => { + t.plan(1) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + + try { + await body.formData() + t.fail('should throw NotSupportedError') + } catch (error) { + t.ok(error instanceof NotSupportedError) + } + }) +}) + +test('request text2', (t) => { + t.plan(2) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET' + }) + const p = body.text() + let ret = '' + body.on('data', chunk => { + ret += chunk + }).on('end', () => { + t.equal(JSON.stringify(obj), ret) + }) + t.strictSame(JSON.stringify(obj), await p) + }) +}) + +test('request with FormData body', { skip: nodeMajor < 16 }, (t) => { + const { FormData } = require('../') + const { Blob } = require('buffer') + + const fd = new FormData() + fd.set('key', 'value') + fd.set('file', new Blob(['Hello, world!']), 'hello_world.txt') + + const server = createServer(async (req, res) => { + const contentType = req.headers['content-type'] + // ensure we received a multipart/form-data header + t.ok(/^multipart\/form-data; boundary=-+formdata-undici-0\d+$/.test(contentType)) + + const chunks = [] + + for await (const chunk of req) { + chunks.push(chunk) + } + + const { fileMap, fields } = await parseFormDataString( + Buffer.concat(chunks), + contentType + ) + + t.same(fields[0], { key: 'key', value: 'value' }) + t.ok(fileMap.has('file')) + t.equal(fileMap.get('file').data.toString(), 'Hello, world!') + t.same(fileMap.get('file').info, { + filename: 'hello_world.txt', + encoding: '7bit', + mimeType: 'application/octet-stream' + }) + + return res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + await client.request({ + path: '/', + method: 'POST', + body: fd + }) + + t.end() + }) +}) + +test('request with FormData body on node < 16', { skip: nodeMajor >= 16 }, async (t) => { + t.plan(1) + + // a FormData polyfill, for example + class FormData {} + + const fd = new FormData() + + const client = new Client('http://localhost:3000') + t.teardown(client.destroy.bind(client)) + + await t.rejects(client.request({ + path: '/', + method: 'POST', + body: fd + }), errors.InvalidArgumentError) +}) + +test('request post body Buffer from string', (t) => { + t.plan(2) + const requestBody = Buffer.from('abcdefghijklmnopqrstuvwxyz') + + const server = createServer(async (req, res) => { + let ret = '' + for await (const chunk of req) { + ret += chunk + } + t.equal(ret, 'abcdefghijklmnopqrstuvwxyz') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'POST', + body: requestBody, + maxRedirections: 2 + }) + await body.text() + t.pass() + }) +}) + +test('request post body Buffer from buffer', (t) => { + t.plan(2) + const fullBuffer = new TextEncoder().encode('abcdefghijklmnopqrstuvwxyz') + const requestBody = Buffer.from(fullBuffer.buffer, 8, 16) + + const server = createServer(async (req, res) => { + let ret = '' + for await (const chunk of req) { + ret += chunk + } + t.equal(ret, 'ijklmnopqrstuvwx') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'POST', + body: requestBody, + maxRedirections: 2 + }) + await body.text() + t.pass() + }) +}) + +test('request post body Uint8Array', (t) => { + t.plan(2) + const fullBuffer = new TextEncoder().encode('abcdefghijklmnopqrstuvwxyz') + const requestBody = new Uint8Array(fullBuffer.buffer, 8, 16) + + const server = createServer(async (req, res) => { + let ret = '' + for await (const chunk of req) { + ret += chunk + } + t.equal(ret, 'ijklmnopqrstuvwx') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'POST', + body: requestBody, + maxRedirections: 2 + }) + await body.text() + t.pass() + }) +}) + +test('request post body Uint32Array', (t) => { + t.plan(2) + const fullBuffer = new TextEncoder().encode('abcdefghijklmnopqrstuvwxyz') + const requestBody = new Uint32Array(fullBuffer.buffer, 8, 4) + + const server = createServer(async (req, res) => { + let ret = '' + for await (const chunk of req) { + ret += chunk + } + t.equal(ret, 'ijklmnopqrstuvwx') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'POST', + body: requestBody, + maxRedirections: 2 + }) + await body.text() + t.pass() + }) +}) + +test('request post body Float64Array', (t) => { + t.plan(2) + const fullBuffer = new TextEncoder().encode('abcdefghijklmnopqrstuvwxyz') + const requestBody = new Float64Array(fullBuffer.buffer, 8, 2) + + const server = createServer(async (req, res) => { + let ret = '' + for await (const chunk of req) { + ret += chunk + } + t.equal(ret, 'ijklmnopqrstuvwx') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'POST', + body: requestBody, + maxRedirections: 2 + }) + await body.text() + t.pass() + }) +}) + +test('request post body BigUint64Array', (t) => { + t.plan(2) + const fullBuffer = new TextEncoder().encode('abcdefghijklmnopqrstuvwxyz') + const requestBody = new BigUint64Array(fullBuffer.buffer, 8, 2) + + const server = createServer(async (req, res) => { + let ret = '' + for await (const chunk of req) { + ret += chunk + } + t.equal(ret, 'ijklmnopqrstuvwx') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'POST', + body: requestBody, + maxRedirections: 2 + }) + await body.text() + t.pass() + }) +}) + +test('request post body DataView', (t) => { + t.plan(2) + const fullBuffer = new TextEncoder().encode('abcdefghijklmnopqrstuvwxyz') + const requestBody = new DataView(fullBuffer.buffer, 8, 16) + + const server = createServer(async (req, res) => { + let ret = '' + for await (const chunk of req) { + ret += chunk + } + t.equal(ret, 'ijklmnopqrstuvwx') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'POST', + body: requestBody, + maxRedirections: 2 + }) + await body.text() + t.pass() + }) +}) diff --git a/test/client-stream.js b/test/client-stream.js new file mode 100644 index 0000000..a230c44 --- /dev/null +++ b/test/client-stream.js @@ -0,0 +1,847 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const { createServer } = require('http') +const { PassThrough, Writable, Readable } = require('stream') +const EE = require('events') + +test('stream get', (t) => { + t.plan(9) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const signal = new EE() + client.stream({ + signal, + path: '/', + method: 'GET', + opaque: new PassThrough() + }, ({ statusCode, headers, opaque: pt }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + pt.on('data', (buf) => { + bufs.push(buf) + }) + pt.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + return pt + }, (err) => { + t.equal(signal.listenerCount('abort'), 0) + t.error(err) + }) + t.equal(signal.listenerCount('abort'), 1) + }) +}) + +test('stream promise get', (t) => { + t.plan(6) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + await client.stream({ + path: '/', + method: 'GET', + opaque: new PassThrough() + }, ({ statusCode, headers, opaque: pt }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + pt.on('data', (buf) => { + bufs.push(buf) + }) + pt.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + return pt + }) + }) +}) + +test('stream GET destroy res', (t) => { + t.plan(14) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.stream({ + path: '/', + method: 'GET' + }, ({ statusCode, headers }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + const pt = new PassThrough() + .on('error', (err) => { + t.ok(err) + }) + .on('data', () => { + pt.destroy(new Error('kaboom')) + }) + + return pt + }, (err) => { + t.ok(err) + }) + + client.stream({ + path: '/', + method: 'GET' + }, ({ statusCode, headers }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + let ret = '' + const pt = new PassThrough() + pt.on('data', chunk => { + ret += chunk + }).on('end', () => { + t.equal(ret, 'hello') + }) + + return pt + }, (err) => { + t.error(err) + }) + }) +}) + +test('stream GET remote destroy', (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.write('asd') + setImmediate(() => { + res.destroy() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.stream({ + path: '/', + method: 'GET' + }, () => { + const pt = new PassThrough() + pt.on('error', (err) => { + t.ok(err) + }) + return pt + }, (err) => { + t.ok(err) + }) + + client.stream({ + path: '/', + method: 'GET' + }, () => { + const pt = new PassThrough() + pt.on('error', (err) => { + t.ok(err) + }) + return pt + }).catch((err) => { + t.ok(err) + }) + }) +}) + +test('stream response resume back pressure and non standard error', (t) => { + t.plan(5) + + const server = createServer((req, res) => { + res.write(Buffer.alloc(1e3)) + setImmediate(() => { + res.write(Buffer.alloc(1e7)) + res.end() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const pt = new PassThrough() + client.stream({ + path: '/', + method: 'GET' + }, () => { + pt.on('data', () => { + pt.emit('error', new Error('kaboom')) + }).once('error', (err) => { + t.equal(err.message, 'kaboom') + }) + return pt + }, (err) => { + t.ok(err) + t.equal(pt.destroyed, true) + }) + + client.once('disconnect', (err) => { + t.ok(err) + }) + + client.stream({ + path: '/', + method: 'GET' + }, () => { + const pt = new PassThrough() + pt.resume() + return pt + }, (err) => { + t.error(err) + }) + }) +}) + +test('stream waits only for writable side', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end(Buffer.alloc(1e3)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const pt = new PassThrough({ autoDestroy: false }) + client.stream({ + path: '/', + method: 'GET' + }, () => pt, (err) => { + t.error(err) + t.equal(pt.destroyed, false) + }) + }) +}) + +test('stream args validation', (t) => { + t.plan(3) + + const client = new Client('http://localhost:5000') + client.stream({ + path: '/', + method: 'GET' + }, null, (err) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.stream(null, null, (err) => { + t.type(err, errors.InvalidArgumentError) + }) + + try { + client.stream(null, null, 'asd') + } catch (err) { + t.type(err, errors.InvalidArgumentError) + } +}) + +test('stream args validation promise', (t) => { + t.plan(2) + + const client = new Client('http://localhost:5000') + client.stream({ + path: '/', + method: 'GET' + }, null).catch((err) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.stream(null, null).catch((err) => { + t.type(err, errors.InvalidArgumentError) + }) +}) + +test('stream destroy if not readable', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + const pt = new PassThrough() + pt.readable = false + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.stream({ + path: '/', + method: 'GET' + }, () => { + return pt + }, (err) => { + t.error(err) + t.equal(pt.destroyed, true) + }) + }) +}) + +test('stream server side destroy', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.destroy() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.stream({ + path: '/', + method: 'GET' + }, () => { + t.fail() + }, (err) => { + t.type(err, errors.SocketError) + }) + }) +}) + +test('stream invalid return', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.write('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.stream({ + path: '/', + method: 'GET' + }, () => { + return {} + }, (err) => { + t.type(err, errors.InvalidReturnValueError) + }) + }) +}) + +test('stream body without destroy', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.stream({ + path: '/', + method: 'GET' + }, () => { + const pt = new PassThrough({ autoDestroy: false }) + pt.destroy = null + pt.resume() + return pt + }, (err) => { + t.error(err) + }) + }) +}) + +test('stream factory abort', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const signal = new EE() + client.stream({ + path: '/', + method: 'GET', + signal + }, () => { + signal.emit('abort') + return new PassThrough() + }, (err) => { + t.equal(signal.listenerCount('abort'), 0) + t.type(err, errors.RequestAbortedError) + }) + t.equal(signal.listenerCount('abort'), 1) + }) +}) + +test('stream factory throw', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.stream({ + path: '/', + method: 'GET' + }, () => { + throw new Error('asd') + }, (err) => { + t.equal(err.message, 'asd') + }) + client.stream({ + path: '/', + method: 'GET' + }, () => { + throw new Error('asd') + }, (err) => { + t.equal(err.message, 'asd') + }) + client.stream({ + path: '/', + method: 'GET' + }, () => { + return new PassThrough() + }, (err) => { + t.error(err) + }) + }) +}) + +test('stream CONNECT throw', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.stream({ + path: '/', + method: 'CONNECT' + }, () => { + }, (err) => { + t.type(err, errors.InvalidArgumentError) + }) + }) +}) + +test('stream abort after complete', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const pt = new PassThrough() + const signal = new EE() + client.stream({ + path: '/', + method: 'GET', + signal + }, () => { + return pt + }, (err) => { + t.error(err) + signal.emit('abort') + }) + }) +}) + +test('stream abort before dispatch', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const pt = new PassThrough() + const signal = new EE() + client.stream({ + path: '/', + method: 'GET', + signal + }, () => { + return pt + }, (err) => { + t.type(err, errors.RequestAbortedError) + }) + signal.emit('abort') + }) +}) + +test('trailers', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.writeHead(200, { Trailer: 'Content-MD5' }) + res.addTrailers({ 'Content-MD5': 'test' }) + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.stream({ + path: '/', + method: 'GET' + }, () => new PassThrough(), (err, data) => { + t.error(err) + t.strictSame(data.trailers, { 'content-md5': 'test' }) + }) + }) +}) + +test('stream ignore 1xx', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.writeProcessing() + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + let buf = '' + client.stream({ + path: '/', + method: 'GET' + }, () => new Writable({ + write (chunk, encoding, callback) { + buf += chunk + callback() + } + }), (err, data) => { + t.error(err) + t.equal(buf, 'hello') + }) + }) +}) + +test('stream ignore 1xx and use onInfo', (t) => { + t.plan(4) + + const infos = [] + const server = createServer((req, res) => { + res.writeProcessing() + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + let buf = '' + client.stream({ + path: '/', + method: 'GET', + onInfo: (x) => { + infos.push(x) + } + }, () => new Writable({ + write (chunk, encoding, callback) { + buf += chunk + callback() + } + }), (err, data) => { + t.error(err) + t.equal(buf, 'hello') + t.equal(infos.length, 1) + t.equal(infos[0].statusCode, 102) + }) + }) +}) + +test('stream backpressure', (t) => { + t.plan(2) + + const expected = Buffer.alloc(1e6).toString() + + const server = createServer((req, res) => { + res.writeProcessing() + res.end(expected) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + let buf = '' + client.stream({ + path: '/', + method: 'GET' + }, () => new Writable({ + highWaterMark: 1, + write (chunk, encoding, callback) { + buf += chunk + process.nextTick(callback) + } + }), (err, data) => { + t.error(err) + t.equal(buf, expected) + }) + }) +}) + +test('stream body destroyed on invalid callback', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const body = new Readable({ + read () {} + }) + try { + client.stream({ + path: '/', + method: 'GET', + body + }, () => {}, null) + } catch (err) { + t.equal(body.destroyed, true) + } + }) +}) + +test('stream needDrain', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.end(Buffer.alloc(4096)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(() => { + client.destroy() + }) + + const dst = new PassThrough() + dst.pause() + + if (dst.writableNeedDrain === undefined) { + Object.defineProperty(dst, 'writableNeedDrain', { + get () { + return this._writableState.needDrain + } + }) + } + + while (dst.write(Buffer.alloc(4096))) { + // Do nothing. + } + + const orgWrite = dst.write + dst.write = () => t.fail() + const p = client.stream({ + path: '/', + method: 'GET' + }, () => { + t.equal(dst._writableState.needDrain, true) + t.equal(dst.writableNeedDrain, true) + + setImmediate(() => { + dst.write = (...args) => { + orgWrite.call(dst, ...args) + } + dst.resume() + }) + + return dst + }) + + p.then(() => { + t.pass() + }) + }) +}) + +test('stream legacy needDrain', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.end(Buffer.alloc(4096)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(() => { + client.destroy() + }) + + const dst = new PassThrough() + dst.pause() + + if (dst.writableNeedDrain !== undefined) { + Object.defineProperty(dst, 'writableNeedDrain', { + get () { + } + }) + } + + while (dst.write(Buffer.alloc(4096))) { + // Do nothing + } + + const orgWrite = dst.write + dst.write = () => t.fail() + const p = client.stream({ + path: '/', + method: 'GET' + }, () => { + t.equal(dst._writableState.needDrain, true) + t.equal(dst.writableNeedDrain, undefined) + + setImmediate(() => { + dst.write = (...args) => { + orgWrite.call(dst, ...args) + } + dst.resume() + }) + + return dst + }) + + p.then(() => { + t.pass() + }) + }) + + test('stream throwOnError', (t) => { + t.plan(2) + + const errStatusCode = 500 + const errMessage = 'Internal Server Error' + + const server = createServer((req, res) => { + res.writeHead(errStatusCode, { 'Content-Type': 'text/plain' }) + res.end(errMessage) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.stream({ + path: '/', + method: 'GET', + throwOnError: true, + opaque: new PassThrough() + }, ({ opaque: pt }) => { + pt.on('data', () => { + t.fail() + }) + return pt + }, (e) => { + t.equal(e.status, errStatusCode) + t.equal(e.body, errMessage) + t.end() + }) + }) + }) + + test('steam throwOnError=true, error on stream', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.stream({ + path: '/', + method: 'GET', + throwOnError: true, + opaque: new PassThrough() + }, () => { + throw new Error('asd') + }, (e) => { + t.equal(e.message, 'asd') + }) + }) + }) +}) diff --git a/test/client-timeout.js b/test/client-timeout.js new file mode 100644 index 0000000..5f1686a --- /dev/null +++ b/test/client-timeout.js @@ -0,0 +1,197 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const { createServer } = require('http') +const { Readable } = require('stream') +const FakeTimers = require('@sinonjs/fake-timers') +const timers = require('../lib/timers') + +test('refresh timeout on pause', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.flushHeaders() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 500 + }) + t.teardown(client.destroy.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers, resume) { + setTimeout(() => { + resume() + }, 1000) + return false + }, + onData () { + + }, + onComplete () { + + }, + onError (err) { + t.type(err, errors.BodyTimeoutError) + } + }) + }) +}) + +test('start headers timeout after request body', (t) => { + t.plan(2) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0, + headersTimeout: 100 + }) + t.teardown(client.destroy.bind(client)) + + const body = new Readable({ read () {} }) + client.dispatch({ + path: '/', + body, + method: 'GET' + }, { + onConnect () { + process.nextTick(() => { + clock.tick(200) + }) + queueMicrotask(() => { + body.push(null) + body.on('end', () => { + clock.tick(200) + }) + }) + }, + onHeaders (statusCode, headers, resume) { + }, + onData () { + + }, + onComplete () { + + }, + onError (err) { + t.equal(body.readableEnded, true) + t.type(err, errors.HeadersTimeoutError) + } + }) + }) +}) + +test('start headers timeout after async iterator request body', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0, + headersTimeout: 100 + }) + t.teardown(client.destroy.bind(client)) + let res + const body = (async function * () { + await new Promise((resolve) => { res = resolve }) + process.nextTick(() => { + clock.tick(200) + }) + })() + client.dispatch({ + path: '/', + body, + method: 'GET' + }, { + onConnect () { + process.nextTick(() => { + clock.tick(200) + }) + queueMicrotask(() => { + res() + }) + }, + onHeaders (statusCode, headers, resume) { + }, + onData () { + + }, + onComplete () { + + }, + onError (err) { + t.type(err, errors.HeadersTimeoutError) + } + }) + }) +}) + +test('parser resume with no body timeout', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0 + }) + t.teardown(client.destroy.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers, resume) { + setTimeout(resume, 2000) + return false + }, + onData () { + + }, + onComplete () { + t.pass() + }, + onError (err) { + t.error(err) + } + }) + }) +}) diff --git a/test/client-unref.js b/test/client-unref.js new file mode 100644 index 0000000..c7e3a5d --- /dev/null +++ b/test/client-unref.js @@ -0,0 +1,47 @@ +'use strict' + +const { Worker, isMainThread, workerData } = require('worker_threads') + +if (isMainThread) { + const tap = require('tap') + const { createServer } = require('http') + + tap.test('client automatically closes itself when idle', t => { + t.plan(1) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + server.keepAliveTimeout = 9999 + + server.listen(0, () => { + const url = `http://localhost:${server.address().port}` + const worker = new Worker(__filename, { workerData: { url } }) + worker.on('exit', code => { + t.equal(code, 0) + }) + }) + }) + + tap.test('client automatically closes itself if the server is not there', t => { + t.plan(1) + + const url = 'http://localhost:4242' // hopefully empty port + const worker = new Worker(__filename, { workerData: { url } }) + worker.on('exit', code => { + t.equal(code, 0) + }) + }) +} else { + const { Client } = require('..') + + const client = new Client(workerData.url) + client.request({ path: '/', method: 'GET' }, () => { + // We do not care about Errors + + setTimeout(() => { + throw new Error() + }, 1e3).unref() + }) +} diff --git a/test/client-upgrade.js b/test/client-upgrade.js new file mode 100644 index 0000000..4ccbcce --- /dev/null +++ b/test/client-upgrade.js @@ -0,0 +1,452 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const net = require('net') +const http = require('http') +const EE = require('events') +const { kBusy } = require('../lib/core/symbols') + +test('basic upgrade', (t) => { + t.plan(6) + + const server = net.createServer((c) => { + c.on('data', (d) => { + t.ok(/upgrade: websocket/i.test(d)) + c.write('HTTP/1.1 101\r\n') + c.write('hello: world\r\n') + c.write('connection: upgrade\r\n') + c.write('upgrade: websocket\r\n') + c.write('\r\n') + c.write('Body') + }) + + c.on('end', () => { + c.end() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const signal = new EE() + client.upgrade({ + signal, + path: '/', + method: 'GET', + protocol: 'Websocket' + }, (err, data) => { + t.error(err) + + t.equal(signal.listenerCount('abort'), 0) + + const { headers, socket } = data + + let recvData = '' + data.socket.on('data', (d) => { + recvData += d + }) + + socket.on('close', () => { + t.equal(recvData.toString(), 'Body') + }) + + t.same(headers, { + hello: 'world', + connection: 'upgrade', + upgrade: 'websocket' + }) + socket.end() + }) + t.equal(signal.listenerCount('abort'), 1) + }) +}) + +test('basic upgrade promise', (t) => { + t.plan(2) + + const server = net.createServer((c) => { + c.on('data', (d) => { + c.write('HTTP/1.1 101\r\n') + c.write('hello: world\r\n') + c.write('connection: upgrade\r\n') + c.write('upgrade: websocket\r\n') + c.write('\r\n') + c.write('Body') + }) + + c.on('end', () => { + c.end() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const { headers, socket } = await client.upgrade({ + path: '/', + method: 'GET', + protocol: 'Websocket' + }) + + let recvData = '' + socket.on('data', (d) => { + recvData += d + }) + + socket.on('close', () => { + t.equal(recvData.toString(), 'Body') + }) + + t.same(headers, { + hello: 'world', + connection: 'upgrade', + upgrade: 'websocket' + }) + socket.end() + }) +}) + +test('upgrade error', (t) => { + t.plan(1) + + const server = net.createServer((c) => { + c.on('data', (d) => { + c.write('HTTP/1.1 101\r\n') + c.write('hello: world\r\n') + c.write('connection: upgrade\r\n') + c.write('\r\n') + c.write('Body') + }) + c.on('error', () => { + // Whether we get an error, end or close is undefined. + // Ignore error. + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + await client.upgrade({ + path: '/', + method: 'GET', + protocol: 'Websocket' + }) + } catch (err) { + t.ok(err) + } + }) +}) + +test('upgrade invalid opts', (t) => { + t.plan(6) + + const client = new Client('http://localhost:5432') + + client.upgrade(null, err => { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid opts') + }) + + try { + client.upgrade(null, null) + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid opts') + } + + try { + client.upgrade({ path: '/' }, null) + t.fail() + } catch (err) { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'invalid callback') + } +}) + +test('basic upgrade2', (t) => { + t.plan(3) + + const server = http.createServer() + server.on('upgrade', (req, c, head) => { + c.write('HTTP/1.1 101\r\n') + c.write('hello: world\r\n') + c.write('connection: upgrade\r\n') + c.write('upgrade: websocket\r\n') + c.write('\r\n') + c.write('Body') + c.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.upgrade({ + path: '/', + method: 'GET', + protocol: 'Websocket' + }, (err, data) => { + t.error(err) + + const { headers, socket } = data + + let recvData = '' + data.socket.on('data', (d) => { + recvData += d + }) + + socket.on('close', () => { + t.equal(recvData.toString(), 'Body') + }) + + t.same(headers, { + hello: 'world', + connection: 'upgrade', + upgrade: 'websocket' + }) + socket.end() + }) + }) +}) + +test('upgrade wait for empty pipeline', (t) => { + t.plan(7) + + let canConnect = false + const server = http.createServer((req, res) => { + res.end() + canConnect = true + }) + server.on('upgrade', (req, c, firstBodyChunk) => { + t.equal(canConnect, true) + c.write('HTTP/1.1 101\r\n') + c.write('hello: world\r\n') + c.write('connection: upgrade\r\n') + c.write('upgrade: websocket\r\n') + c.write('\r\n') + c.write('Body') + c.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.error(err) + }) + client.once('connect', () => { + process.nextTick(() => { + t.equal(client[kBusy], false) + + client.upgrade({ + path: '/' + }, (err, { socket }) => { + t.error(err) + let recvData = '' + socket.on('data', (d) => { + recvData += d + }) + + socket.on('end', () => { + t.equal(recvData.toString(), 'Body') + }) + + socket.write('Body') + socket.end() + }) + t.equal(client[kBusy], true) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.error(err) + }) + }) + }) + }) +}) + +test('upgrade aborted', (t) => { + t.plan(6) + + const server = http.createServer((req, res) => { + t.fail() + }) + server.on('upgrade', (req, c, firstBodyChunk) => { + t.fail() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 3 + }) + t.teardown(client.destroy.bind(client)) + + const signal = new EE() + client.upgrade({ + path: '/', + signal, + opaque: 'asd' + }, (err, { opaque }) => { + t.equal(opaque, 'asd') + t.type(err, errors.RequestAbortedError) + t.equal(signal.listenerCount('abort'), 0) + }) + t.equal(client[kBusy], true) + t.equal(signal.listenerCount('abort'), 1) + signal.emit('abort') + + client.close(() => { + t.pass() + }) + }) +}) + +test('basic aborted after res', (t) => { + t.plan(1) + + const signal = new EE() + const server = http.createServer() + server.on('upgrade', (req, c, head) => { + c.write('HTTP/1.1 101\r\n') + c.write('hello: world\r\n') + c.write('connection: upgrade\r\n') + c.write('upgrade: websocket\r\n') + c.write('\r\n') + c.write('Body') + c.end() + c.on('error', () => { + + }) + signal.emit('abort') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.upgrade({ + path: '/', + method: 'GET', + protocol: 'Websocket', + signal + }, (err) => { + t.type(err, errors.RequestAbortedError) + }) + }) +}) + +test('basic upgrade error', (t) => { + t.plan(2) + + const server = net.createServer((c) => { + c.on('data', (d) => { + c.write('HTTP/1.1 101\r\n') + c.write('hello: world\r\n') + c.write('connection: upgrade\r\n') + c.write('upgrade: websocket\r\n') + c.write('\r\n') + c.write('Body') + }) + c.on('error', () => { + + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const _err = new Error() + client.upgrade({ + path: '/', + method: 'GET', + protocol: 'Websocket' + }, (err, data) => { + t.error(err) + data.socket.on('error', (err) => { + t.equal(err, _err) + }) + throw _err + }) + }) +}) + +test('upgrade disconnect', (t) => { + t.plan(3) + + const server = net.createServer(connection => { + connection.destroy() + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.on('disconnect', (origin, [self], error) => { + t.equal(client, self) + t.type(error, Error) + }) + + client + .upgrade({ path: '/', method: 'GET' }) + .then(() => { + t.fail() + }) + .catch(error => { + t.type(error, Error) + }) + }) +}) + +test('upgrade invalid signal', (t) => { + t.plan(2) + + const server = net.createServer(() => { + t.fail() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.on('disconnect', () => { + t.fail() + }) + + client.upgrade({ + path: '/', + method: 'GET', + protocol: 'Websocket', + signal: 'error', + opaque: 'asd' + }, (err, { opaque }) => { + t.equal(opaque, 'asd') + t.type(err, errors.InvalidArgumentError) + }) + }) +}) diff --git a/test/client-write-max-listeners.js b/test/client-write-max-listeners.js new file mode 100644 index 0000000..118cdaa --- /dev/null +++ b/test/client-write-max-listeners.js @@ -0,0 +1,51 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const { Readable } = require('stream') + +test('socket close listener does not leak', (t) => { + t.plan(32) + + const server = createServer() + + server.on('request', (req, res) => { + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + const makeBody = () => { + return new Readable({ + read () { + process.nextTick(() => { + this.push(null) + }) + } + }) + } + + const onRequest = (err, data) => { + t.error(err) + data.body.on('end', () => t.pass()).resume() + } + + function onWarning (warning) { + if (!/ExperimentalWarning/.test(warning)) { + t.fail() + } + } + process.on('warning', onWarning) + t.teardown(() => { + process.removeListener('warning', onWarning) + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + for (let n = 0; n < 16; ++n) { + client.request({ path: '/', method: 'GET', body: makeBody() }, onRequest) + } + }) +}) diff --git a/test/client.js b/test/client.js new file mode 100644 index 0000000..92315d6 --- /dev/null +++ b/test/client.js @@ -0,0 +1,2096 @@ +'use strict' + +const { readFileSync, createReadStream } = require('fs') +const { createServer } = require('http') +const { Readable } = require('stream') +const { test } = require('tap') +const { Client, errors } = require('..') +const { kSocket } = require('../lib/core/symbols') +const { wrapWithAsyncIterable } = require('./utils/async-iterators') +const EE = require('events') +const { kUrl, kSize, kConnect, kBusy, kConnected, kRunning } = require('../lib/core/symbols') + +const hasIPv6 = (() => { + const iFaces = require('os').networkInterfaces() + const re = process.platform === 'win32' ? /Loopback Pseudo-Interface/ : /lo/ + return Object.keys(iFaces).some( + (name) => re.test(name) && iFaces[name].some(({ family }) => family === 6) + ) +})() + +test('basic get', (t) => { + t.plan(24) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + t.equal(undefined, req.headers.foo) + t.equal('bar', req.headers.bar) + t.equal(undefined, req.headers['content-length']) + res.setHeader('Content-Type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + const reqHeaders = { + foo: undefined, + bar: 'bar' + } + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + t.equal(client[kUrl].origin, `http://localhost:${server.address().port}`) + + const signal = new EE() + client.request({ + signal, + path: '/', + method: 'GET', + headers: reqHeaders + }, (err, data) => { + t.error(err) + const { statusCode, headers, body } = data + t.equal(statusCode, 200) + t.equal(signal.listenerCount('abort'), 1) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal(signal.listenerCount('abort'), 0) + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + t.equal(signal.listenerCount('abort'), 1) + + client.request({ + path: '/', + method: 'GET', + headers: reqHeaders + }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('basic get with custom request.reset=true', (t) => { + t.plan(26) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + t.equal(req.headers.connection, 'close') + t.equal(undefined, req.headers.foo) + t.equal('bar', req.headers.bar) + t.equal(undefined, req.headers['content-length']) + res.setHeader('Content-Type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + const reqHeaders = { + foo: undefined, + bar: 'bar' + } + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, {}) + t.teardown(client.close.bind(client)) + + t.equal(client[kUrl].origin, `http://localhost:${server.address().port}`) + + const signal = new EE() + client.request({ + signal, + path: '/', + method: 'GET', + reset: true, + headers: reqHeaders + }, (err, data) => { + t.error(err) + const { statusCode, headers, body } = data + t.equal(statusCode, 200) + t.equal(signal.listenerCount('abort'), 1) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal(signal.listenerCount('abort'), 0) + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + t.equal(signal.listenerCount('abort'), 1) + + client.request({ + path: '/', + reset: true, + method: 'GET', + headers: reqHeaders + }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('basic get with query params', (t) => { + t.plan(4) + + const server = createServer((req, res) => { + const searchParamsObject = buildParams(req.url) + t.strictSame(searchParamsObject, { + bool: 'true', + foo: '1', + bar: 'bar', + '%60~%3A%24%2C%2B%5B%5D%40%5E*()-': '%60~%3A%24%2C%2B%5B%5D%40%5E*()-', + multi: ['1', '2'], + nullVal: '', + undefinedVal: '' + }) + + res.statusCode = 200 + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + const query = { + bool: true, + foo: 1, + bar: 'bar', + nullVal: null, + undefinedVal: undefined, + '`~:$,+[]@^*()-': '`~:$,+[]@^*()-', + multi: [1, 2] + } + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + const signal = new EE() + client.request({ + signal, + path: '/', + method: 'GET', + query + }, (err, data) => { + t.error(err) + const { statusCode } = data + t.equal(statusCode, 200) + }) + t.equal(signal.listenerCount('abort'), 1) + }) +}) + +test('basic get with query params fails if url includes hashmark', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + t.fail() + }) + t.teardown(server.close.bind(server)) + + const query = { + foo: 1, + bar: 'bar', + multi: [1, 2] + } + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + const signal = new EE() + client.request({ + signal, + path: '/#', + method: 'GET', + query + }, (err, data) => { + t.equal(err.message, 'Query params cannot be passed when url already contains "?" or "#".') + }) + }) +}) + +test('basic get with empty query params', (t) => { + t.plan(4) + + const server = createServer((req, res) => { + const searchParamsObject = buildParams(req.url) + t.strictSame(searchParamsObject, {}) + + res.statusCode = 200 + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + const query = {} + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + const signal = new EE() + client.request({ + signal, + path: '/', + method: 'GET', + query + }, (err, data) => { + t.error(err) + const { statusCode } = data + t.equal(statusCode, 200) + }) + t.equal(signal.listenerCount('abort'), 1) + }) +}) + +test('basic get with query params partially in path', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + t.fail() + }) + t.teardown(server.close.bind(server)) + + const query = { + foo: 1 + } + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + const signal = new EE() + client.request({ + signal, + path: '/?bar=2', + method: 'GET', + query + }, (err, data) => { + t.equal(err.message, 'Query params cannot be passed when url already contains "?" or "#".') + }) + }) +}) + +test('basic get returns 400 when configured to throw on errors (callback)', (t) => { + t.plan(7) + + const server = createServer((req, res) => { + res.statusCode = 400 + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + const signal = new EE() + client.request({ + signal, + path: '/', + method: 'GET', + throwOnError: true + }, (err) => { + t.equal(err.message, 'Response status code 400: Bad Request') + t.equal(err.status, 400) + t.equal(err.statusCode, 400) + t.equal(err.headers.connection, 'keep-alive') + t.equal(err.headers['content-length'], '5') + t.same(err.body, null) + }) + t.equal(signal.listenerCount('abort'), 1) + }) +}) + +test('basic get returns 400 when configured to throw on errors and correctly handles malformed json (callback)', (t) => { + t.plan(6) + + const server = createServer((req, res) => { + res.writeHead(400, 'Invalid params', { 'content-type': 'application/json' }) + res.end('Invalid params') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + const signal = new EE() + client.request({ + signal, + path: '/', + method: 'GET', + throwOnError: true + }, (err) => { + t.equal(err.message, 'Response status code 400: Invalid params') + t.equal(err.status, 400) + t.equal(err.statusCode, 400) + t.equal(err.headers.connection, 'keep-alive') + t.same(err.body, null) + }) + t.equal(signal.listenerCount('abort'), 1) + }) +}) + +test('basic get returns 400 when configured to throw on errors (promise)', (t) => { + t.plan(6) + + const server = createServer((req, res) => { + res.writeHead(400, 'Invalid params', { 'content-type': 'text/plain' }) + res.end('Invalid params') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + const signal = new EE() + try { + await client.request({ + signal, + path: '/', + method: 'GET', + throwOnError: true + }) + t.fail('Should throw an error') + } catch (err) { + t.equal(err.message, 'Response status code 400: Invalid params') + t.equal(err.status, 400) + t.equal(err.statusCode, 400) + t.equal(err.body, 'Invalid params') + t.equal(err.headers.connection, 'keep-alive') + t.equal(err.headers['content-type'], 'text/plain') + } + }) +}) + +test('basic get returns error body when configured to throw on errors', (t) => { + t.plan(6) + + const server = createServer((req, res) => { + const body = { msg: 'Error', details: { code: 94 } } + const bodyAsString = JSON.stringify(body) + res.writeHead(400, 'Invalid params', { + 'Content-Type': 'application/json' + }) + res.end(bodyAsString) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + const signal = new EE() + try { + await client.request({ + signal, + path: '/', + method: 'GET', + throwOnError: true + }) + t.fail('Should throw an error') + } catch (err) { + t.equal(err.message, 'Response status code 400: Invalid params') + t.equal(err.status, 400) + t.equal(err.statusCode, 400) + t.equal(err.headers.connection, 'keep-alive') + t.equal(err.headers['content-type'], 'application/json') + t.same(err.body, { msg: 'Error', details: { code: 94 } }) + } + }) +}) + +test('basic head', (t) => { + t.plan(14) + + const server = createServer((req, res) => { + t.equal('/123', req.url) + t.equal('HEAD', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ path: '/123', method: 'HEAD' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + body + .resume() + .on('end', () => { + t.pass() + }) + }) + + client.request({ path: '/123', method: 'HEAD' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + body + .resume() + .on('end', () => { + t.pass() + }) + }) + }) +}) + +test('basic head (IPv6)', { skip: !hasIPv6 }, (t) => { + t.plan(14) + + const server = createServer((req, res) => { + t.equal('/123', req.url) + t.equal('HEAD', req.method) + t.equal(`[::1]:${server.address().port}`, req.headers.host) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, '::', () => { + const client = new Client(`http://[::1]:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ path: '/123', method: 'HEAD' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + body + .resume() + .on('end', () => { + t.pass() + }) + }) + + client.request({ path: '/123', method: 'HEAD' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + body + .resume() + .on('end', () => { + t.pass() + }) + }) + }) +}) + +test('get with host header', (t) => { + t.plan(7) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal('example.com', req.headers.host) + res.setHeader('content-type', 'text/plain') + res.end('hello from ' + req.headers.host) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET', headers: { host: 'example.com' } }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello from example.com', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('get with host header (IPv6)', { skip: !hasIPv6 }, (t) => { + t.plan(7) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal('[::1]', req.headers.host) + res.setHeader('content-type', 'text/plain') + res.end('hello from ' + req.headers.host) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, '::', () => { + const client = new Client(`http://[::1]:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET', headers: { host: '[::1]' } }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello from [::1]', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('head with host header', (t) => { + t.plan(7) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('HEAD', req.method) + t.equal('example.com', req.headers.host) + res.setHeader('content-type', 'text/plain') + res.end('hello from ' + req.headers.host) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'HEAD', headers: { host: 'example.com' } }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + body + .resume() + .on('end', () => { + t.pass() + }) + }) + }) +}) + +function postServer (t, expected) { + return function (req, res) { + t.equal(req.url, '/') + t.equal(req.method, 'POST') + t.notSame(req.headers['content-length'], null) + + req.setEncoding('utf8') + let data = '' + + req.on('data', function (d) { data += d }) + + req.on('end', () => { + t.equal(data, expected) + res.end('hello') + }) + } +} + +test('basic POST with string', (t) => { + t.plan(7) + + const expected = readFileSync(__filename, 'utf8') + + const server = createServer(postServer(t, expected)) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'POST', body: expected }, (err, data) => { + t.error(err) + t.equal(data.statusCode, 200) + const bufs = [] + data.body + .on('data', (buf) => { + bufs.push(buf) + }) + .on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('basic POST with empty string', (t) => { + t.plan(7) + + const server = createServer(postServer(t, '')) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'POST', body: '' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('basic POST with string and content-length', (t) => { + t.plan(7) + + const expected = readFileSync(__filename, 'utf8') + + const server = createServer(postServer(t, expected)) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'POST', + headers: { + 'content-length': Buffer.byteLength(expected) + }, + body: expected + }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('basic POST with Buffer', (t) => { + t.plan(7) + + const expected = readFileSync(__filename) + + const server = createServer(postServer(t, expected.toString())) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'POST', body: expected }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('basic POST with stream', (t) => { + t.plan(7) + + const expected = readFileSync(__filename, 'utf8') + + const server = createServer(postServer(t, expected)) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'POST', + headers: { + 'content-length': Buffer.byteLength(expected) + }, + headersTimeout: 0, + body: createReadStream(__filename) + }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('basic POST with paused stream', (t) => { + t.plan(7) + + const expected = readFileSync(__filename, 'utf8') + + const server = createServer(postServer(t, expected)) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const stream = createReadStream(__filename) + stream.pause() + client.request({ + path: '/', + method: 'POST', + headers: { + 'content-length': Buffer.byteLength(expected) + }, + headersTimeout: 0, + body: stream + }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('basic POST with custom stream', (t) => { + t.plan(4) + + const server = createServer((req, res) => { + req.resume().on('end', () => { + res.end('hello') + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const body = new EE() + body.pipe = () => {} + client.request({ + path: '/', + method: 'POST', + headersTimeout: 0, + body + }, (err, data) => { + t.error(err) + t.equal(data.statusCode, 200) + const bufs = [] + data.body.on('data', (buf) => { + bufs.push(buf) + }) + data.body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + t.strictSame(client[kBusy], true) + + body.on('close', () => { + body.emit('end') + }) + + client.on('connect', () => { + setImmediate(() => { + body.emit('data', '') + while (!client[kSocket]._writableState.needDrain) { + body.emit('data', Buffer.alloc(4096)) + } + client[kSocket].on('drain', () => { + body.emit('data', Buffer.alloc(4096)) + body.emit('close') + }) + }) + }) + }) +}) + +test('basic POST with iterator', (t) => { + t.plan(3) + + const expected = 'hello' + + const server = createServer((req, res) => { + req.resume().on('end', () => { + res.end(expected) + }) + }) + t.teardown(server.close.bind(server)) + + const iterable = { + [Symbol.iterator]: function * () { + for (let i = 0; i < expected.length - 1; i++) { + yield expected[i] + } + return expected[expected.length - 1] + } + } + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'POST', + requestTimeout: 0, + body: iterable + }, (err, { statusCode, body }) => { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('basic POST with iterator with invalid data', (t) => { + t.plan(1) + + const server = createServer(() => {}) + t.teardown(server.close.bind(server)) + + const iterable = { + [Symbol.iterator]: function * () { + yield 0 + } + } + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'POST', + requestTimeout: 0, + body: iterable + }, err => { + t.ok(err instanceof TypeError) + }) + }) +}) + +test('basic POST with async iterator', (t) => { + t.plan(7) + + const expected = readFileSync(__filename, 'utf8') + + const server = createServer(postServer(t, expected)) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'POST', + headers: { + 'content-length': Buffer.byteLength(expected) + }, + headersTimeout: 0, + body: wrapWithAsyncIterable(createReadStream(__filename)) + }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('basic POST with transfer encoding: chunked', (t) => { + t.plan(8) + + let body + const server = createServer(function (req, res) { + t.equal(req.url, '/') + t.equal(req.method, 'POST') + t.same(req.headers['content-length'], null) + t.equal(req.headers['transfer-encoding'], 'chunked') + + body.push(null) + + req.setEncoding('utf8') + let data = '' + + req.on('data', function (d) { data += d }) + + req.on('end', () => { + t.equal(data, 'asd') + res.end('hello') + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + body = new Readable({ + read () { } + }) + body.push('asd') + client.request({ + path: '/', + method: 'POST', + // no content-length header + body + }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('basic POST with empty stream', (t) => { + t.plan(4) + + const server = createServer(function (req, res) { + t.same(req.headers['content-length'], 0) + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const body = new Readable({ + autoDestroy: false, + read () { + }, + destroy (err, callback) { + callback(!this._readableState.endEmitted ? new Error('asd') : err) + } + }).on('end', () => { + process.nextTick(() => { + t.equal(body.destroyed, true) + }) + }) + body.push(null) + client.request({ + path: '/', + method: 'POST', + body + }, (err, { statusCode, headers, body }) => { + t.error(err) + body + .on('data', () => { + t.fail() + }) + .on('end', () => { + t.pass() + }) + }) + }) +}) + +test('10 times GET', (t) => { + const num = 10 + t.plan(3 * 10) + + const server = createServer((req, res) => { + res.end(req.url) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + for (let i = 0; i < num; i++) { + makeRequest(i) + } + + function makeRequest (i) { + client.request({ path: '/' + i, method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('/' + i, Buffer.concat(bufs).toString('utf8')) + }) + }) + } + }) +}) + +test('10 times HEAD', (t) => { + const num = 10 + t.plan(3 * 10) + + const server = createServer((req, res) => { + res.end(req.url) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + for (let i = 0; i < num; i++) { + makeRequest(i) + } + + function makeRequest (i) { + client.request({ path: '/' + i, method: 'HEAD' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + body + .resume() + .on('end', () => { + t.pass() + }) + }) + } + }) +}) + +test('Set-Cookie', (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.setHeader('Set-Cookie', ['a cookie', 'another cookie', 'more cookies']) + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.strictSame(headers['set-cookie'], ['a cookie', 'another cookie', 'more cookies']) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('ignore request header mutations', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + t.equal(req.headers.test, 'test') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const headers = { test: 'test' } + client.request({ + path: '/', + method: 'GET', + headers + }, (err, { body }) => { + t.error(err) + body.resume() + }) + headers.test = 'asd' + }) +}) + +test('url-like url', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client({ + hostname: 'localhost', + port: server.address().port, + protocol: 'http:' + }) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.error(err) + data.body.resume() + }) + }) +}) + +test('an absolute url as path', (t) => { + t.plan(2) + + const path = 'http://example.com' + + const server = createServer((req, res) => { + t.equal(req.url, path) + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client({ + hostname: 'localhost', + port: server.address().port, + protocol: 'http:' + }) + t.teardown(client.close.bind(client)) + + client.request({ path, method: 'GET' }, (err, data) => { + t.error(err) + data.body.resume() + }) + }) +}) + +test('multiple destroy callback', (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client({ + hostname: 'localhost', + port: server.address().port, + protocol: 'http:' + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.error(err) + data.body + .resume() + .on('error', () => { + t.pass() + }) + client.destroy(new Error(), (err) => { + t.error(err) + }) + client.destroy(new Error(), (err) => { + t.error(err) + }) + }) + }) +}) + +test('only one streaming req at a time', (t) => { + t.plan(7) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 4 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + + client.request({ + path: '/', + method: 'PUT', + idempotent: true, + body: new Readable({ + read () { + setImmediate(() => { + t.equal(client[kBusy], true) + this.push(null) + }) + } + }).on('resume', () => { + t.equal(client[kSize], 1) + }) + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + t.equal(client[kBusy], true) + }) + }) +}) + +test('only one async iterating req at a time', (t) => { + t.plan(6) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 4 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + const body = wrapWithAsyncIterable(new Readable({ + read () { + setImmediate(() => { + t.equal(client[kBusy], true) + this.push(null) + }) + } + })) + client.request({ + path: '/', + method: 'PUT', + idempotent: true, + body + }, (err, data) => { + t.error(err) + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + t.equal(client[kBusy], true) + }) + }) +}) + +test('300 requests succeed', (t) => { + t.plan(300 * 3) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + for (let n = 0; n < 300; ++n) { + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.on('data', (chunk) => { + t.equal(chunk.toString(), 'asd') + }).on('end', () => { + t.pass() + }) + }) + } + }) +}) + +test('request args validation', (t) => { + t.plan(2) + + const client = new Client('http://localhost:5000') + + client.request(null, (err) => { + t.type(err, errors.InvalidArgumentError) + }) + + try { + client.request(null, 'asd') + } catch (err) { + t.type(err, errors.InvalidArgumentError) + } +}) + +test('request args validation promise', (t) => { + t.plan(1) + + const client = new Client('http://localhost:5000') + + client.request(null).catch((err) => { + t.type(err, errors.InvalidArgumentError) + }) +}) + +test('increase pipelining', (t) => { + t.plan(4) + + const server = createServer((req, res) => { + req.resume() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, () => { + if (!client.destroyed) { + t.fail() + } + }) + + client.request({ + path: '/', + method: 'GET' + }, () => { + if (!client.destroyed) { + t.fail() + } + }) + + t.equal(client[kRunning], 0) + client.on('connect', () => { + t.equal(client[kRunning], 0) + process.nextTick(() => { + t.equal(client[kRunning], 1) + client.pipelining = 3 + t.equal(client[kRunning], 2) + }) + }) + }) +}) + +test('destroy in push', (t) => { + t.plan(4) + + let _res + const server = createServer((req, res) => { + res.write('asd') + _res = res + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { body }) => { + t.error(err) + body.once('data', () => { + _res.write('asd') + body.on('data', (buf) => { + body.destroy() + _res.end() + }).on('error', (err) => { + t.ok(err) + }) + }) + }) + + client.request({ path: '/', method: 'GET' }, (err, { body }) => { + t.error(err) + let buf = '' + body.on('data', (chunk) => { + buf = chunk.toString() + _res.end() + }).on('end', () => { + t.equal('asd', buf) + }) + }) + }) +}) + +test('non recoverable socket error fails pending request', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.equal(err.message, 'kaboom') + }) + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.equal(err.message, 'kaboom') + }) + client.on('connect', () => { + client[kSocket].destroy(new Error('kaboom')) + }) + }) +}) + +test('POST empty with error', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const body = new Readable({ + read () { + } + }) + body.push(null) + client.on('connect', () => { + process.nextTick(() => { + body.emit('error', new Error('asd')) + }) + }) + + client.request({ path: '/', method: 'POST', body }, (err, data) => { + t.equal(err.message, 'asd') + }) + }) +}) + +test('busy', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 1 + }) + t.teardown(client.close.bind(client)) + + client[kConnect](() => { + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.error(err) + }) + t.equal(client[kBusy], true) + }) + }) +}) + +test('connected', (t) => { + t.plan(7) + + const server = createServer((req, res) => { + // needed so that disconnect is emitted + res.setHeader('connection', 'close') + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const url = new URL(`http://localhost:${server.address().port}`) + const client = new Client(url, { + pipelining: 1 + }) + t.teardown(client.close.bind(client)) + + client.on('connect', (origin, [self]) => { + t.equal(origin, url) + t.equal(client, self) + }) + client.on('disconnect', (origin, [self]) => { + t.equal(origin, url) + t.equal(client, self) + }) + + t.equal(client[kConnected], false) + client[kConnect](() => { + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.error(err) + }) + t.equal(client[kConnected], true) + }) + }) +}) + +test('emit disconnect after destroy', t => { + t.plan(4) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const url = new URL(`http://localhost:${server.address().port}`) + const client = new Client(url) + + t.equal(client[kConnected], false) + client[kConnect](() => { + t.equal(client[kConnected], true) + let disconnected = false + client.on('disconnect', () => { + disconnected = true + t.pass() + }) + client.destroy(() => { + t.equal(disconnected, true) + }) + }) + }) +}) + +test('end response before request', t => { + t.plan(2) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + const readable = new Readable({ + read () { + this.push('asd') + } + }) + const { body } = await client.request({ + method: 'GET', + path: '/', + body: readable + }) + body + .on('error', () => { + t.fail() + }) + .on('end', () => { + t.pass() + }) + .resume() + client.on('disconnect', (url, targets, err) => { + t.equal(err.code, 'UND_ERR_INFO') + }) + }) +}) + +test('parser pause with no body timeout', (t) => { + t.plan(2) + const server = createServer((req, res) => { + let counter = 0 + const t = setInterval(() => { + counter++ + const payload = Buffer.alloc(counter * 4096).fill(0) + if (counter === 3) { + clearInterval(t) + res.end(payload) + } else { + res.write(payload) + } + }, 20) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0 + }) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, body }) => { + t.error(err) + t.equal(statusCode, 200) + body.resume() + }) + }) +}) + +test('TypedArray and DataView body', (t) => { + t.plan(3) + const server = createServer((req, res) => { + t.equal(req.headers['content-length'], '8') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0 + }) + t.teardown(client.close.bind(client)) + + const body = Uint8Array.from(Buffer.alloc(8)) + client.request({ path: '/', method: 'POST', body }, (err, { statusCode, body }) => { + t.error(err) + t.equal(statusCode, 200) + body.resume() + }) + }) +}) + +test('async iterator empty chunk continues', (t) => { + t.plan(5) + const serverChunks = ['hello', 'world'] + const server = createServer((req, res) => { + let str = '' + let i = 0 + req.on('data', (chunk) => { + const content = chunk.toString() + t.equal(serverChunks[i++], content) + str += content + }).on('end', () => { + t.equal(str, serverChunks.join('')) + res.end() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0 + }) + t.teardown(client.close.bind(client)) + + const body = (async function * () { + yield serverChunks[0] + yield '' + yield serverChunks[1] + })() + client.request({ path: '/', method: 'POST', body }, (err, { statusCode, body }) => { + t.error(err) + t.equal(statusCode, 200) + body.resume() + }) + }) +}) + +test('async iterator error from server destroys early', (t) => { + t.plan(3) + const server = createServer((req, res) => { + req.on('data', (chunk) => { + res.destroy() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0 + }) + t.teardown(client.close.bind(client)) + let gotDestroyed + const body = (async function * () { + try { + const promise = new Promise(resolve => { + gotDestroyed = resolve + }) + yield 'hello' + await promise + yield 'inner-value' + t.fail('should not get here, iterator should be destroyed') + } finally { + t.ok(true) + } + })() + client.request({ path: '/', method: 'POST', body }, (err, { statusCode, body }) => { + t.ok(err) + t.equal(statusCode, undefined) + gotDestroyed() + }) + }) +}) + +test('regular iterator error from server closes early', (t) => { + t.plan(3) + const server = createServer((req, res) => { + req.on('data', () => { + process.nextTick(() => { + res.destroy() + }) + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0 + }) + t.teardown(client.close.bind(client)) + let gotDestroyed = false + const body = (function * () { + try { + yield 'start' + while (!gotDestroyed) { + yield 'zzz' + // for eslint + gotDestroyed = gotDestroyed || false + } + yield 'zzz' + t.fail('should not get here, iterator should be destroyed') + yield 'zzz' + } finally { + t.ok(true) + } + })() + client.request({ path: '/', method: 'POST', body }, (err, { statusCode, body }) => { + t.ok(err) + t.equal(statusCode, undefined) + gotDestroyed = true + }) + }) +}) + +test('async iterator early return closes early', (t) => { + t.plan(3) + const server = createServer((req, res) => { + req.on('data', () => { + res.writeHead(200) + res.end() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0 + }) + t.teardown(client.close.bind(client)) + let gotDestroyed + const body = (async function * () { + try { + const promise = new Promise(resolve => { + gotDestroyed = resolve + }) + yield 'hello' + await promise + yield 'inner-value' + t.fail('should not get here, iterator should be destroyed') + } finally { + t.ok(true) + } + })() + client.request({ path: '/', method: 'POST', body }, (err, { statusCode, body }) => { + t.error(err) + t.equal(statusCode, 200) + gotDestroyed() + }) + }) +}) + +test('async iterator yield unsupported TypedArray', (t) => { + t.plan(3) + const server = createServer((req, res) => { + req.on('end', () => { + res.writeHead(200) + res.end() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0 + }) + t.teardown(client.close.bind(client)) + const body = (async function * () { + try { + yield new Int32Array([1]) + t.fail('should not get here, iterator should be destroyed') + } finally { + t.ok(true) + } + })() + client.request({ path: '/', method: 'POST', body }, (err) => { + t.ok(err) + t.equal(err.code, 'ERR_INVALID_ARG_TYPE') + }) + }) +}) + +test('async iterator yield object error', (t) => { + t.plan(3) + const server = createServer((req, res) => { + req.on('end', () => { + res.writeHead(200) + res.end() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0 + }) + t.teardown(client.close.bind(client)) + const body = (async function * () { + try { + yield {} + t.fail('should not get here, iterator should be destroyed') + } finally { + t.ok(true) + } + })() + client.request({ path: '/', method: 'POST', body }, (err) => { + t.ok(err) + t.equal(err.code, 'ERR_INVALID_ARG_TYPE') + }) + }) +}) + +function buildParams (path) { + const cleanPath = path.replace('/?', '').replace('/', '').split('&') + const builtParams = cleanPath.reduce((acc, entry) => { + const [key, value] = entry.split('=') + if (key.length === 0) { + return acc + } + + if (acc[key]) { + if (Array.isArray(acc[key])) { + acc[key].push(value) + } else { + acc[key] = [acc[key], value] + } + } else { + acc[key] = value + } + return acc + }, {}) + + return builtParams +} + +test('\\r\\n in Headers', (t) => { + t.plan(1) + + const reqHeaders = { + bar: '\r\nbar' + } + + const client = new Client('http://localhost:4242', { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers: reqHeaders + }, (err) => { + t.equal(err.message, 'invalid bar header') + }) +}) + +test('\\r in Headers', (t) => { + t.plan(1) + + const reqHeaders = { + bar: '\rbar' + } + + const client = new Client('http://localhost:4242', { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers: reqHeaders + }, (err) => { + t.equal(err.message, 'invalid bar header') + }) +}) + +test('\\n in Headers', (t) => { + t.plan(1) + + const reqHeaders = { + bar: '\nbar' + } + + const client = new Client('http://localhost:4242', { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers: reqHeaders + }, (err) => { + t.equal(err.message, 'invalid bar header') + }) +}) + +test('\\n in Headers', (t) => { + t.plan(1) + + const reqHeaders = { + '\nbar': 'foo' + } + + const client = new Client('http://localhost:4242', { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers: reqHeaders + }, (err) => { + t.equal(err.message, 'invalid header key') + }) +}) + +test('\\n in Path', (t) => { + t.plan(1) + + const client = new Client('http://localhost:4242', { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/\n', + method: 'GET' + }, (err) => { + t.equal(err.message, 'invalid request path') + }) +}) + +test('\\n in Method', (t) => { + t.plan(1) + + const client = new Client('http://localhost:4242', { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET\n' + }, (err) => { + t.equal(err.message, 'invalid request method') + }) +}) diff --git a/test/close-and-destroy.js b/test/close-and-destroy.js new file mode 100644 index 0000000..bd50ebb --- /dev/null +++ b/test/close-and-destroy.js @@ -0,0 +1,344 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const { createServer } = require('http') +const { kSocket, kSize } = require('../lib/core/symbols') + +test('close waits for queued requests to finish', (t) => { + t.plan(16) + + const server = createServer() + + server.on('request', (req, res) => { + t.pass('request received') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, function (err, data) { + onRequest(err, data) + + client.request({ path: '/', method: 'GET' }, onRequest) + client.request({ path: '/', method: 'GET' }, onRequest) + client.request({ path: '/', method: 'GET' }, onRequest) + + // needed because the next element in the queue will be called + // after the current function completes + process.nextTick(function () { + client.close() + }) + }) + }) + + function onRequest (err, { statusCode, headers, body }) { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + } +}) + +test('destroy invoked all pending callbacks', (t) => { + t.plan(4) + + const server = createServer() + + server.on('request', (req, res) => { + res.write('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.error(err) + data.body.on('error', (err) => { + t.ok(err) + }).resume() + client.destroy() + }) + client.request({ path: '/', method: 'GET' }, (err) => { + t.type(err, errors.ClientDestroyedError) + }) + client.request({ path: '/', method: 'GET' }, (err) => { + t.type(err, errors.ClientDestroyedError) + }) + }) +}) + +test('destroy invoked all pending callbacks ticked', (t) => { + t.plan(4) + + const server = createServer() + + server.on('request', (req, res) => { + res.write('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.destroy.bind(client)) + + let ticked = false + client.request({ path: '/', method: 'GET' }, (err) => { + t.equal(ticked, true) + t.type(err, errors.ClientDestroyedError) + }) + client.request({ path: '/', method: 'GET' }, (err) => { + t.equal(ticked, true) + t.type(err, errors.ClientDestroyedError) + }) + client.destroy() + ticked = true + }) +}) + +test('close waits until socket is destroyed', (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.end(req.url) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + makeRequest() + + client.once('connect', () => { + let done = false + client[kSocket].on('close', () => { + done = true + }) + client.close((err) => { + t.error(err) + t.equal(client.closed, true) + t.equal(done, true) + }) + }) + + function makeRequest () { + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.error(err) + }) + return client[kSize] <= client.pipelining + } + }) +}) + +test('close should still reconnect', (t) => { + t.plan(6) + + const server = createServer((req, res) => { + res.end(req.url) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + t.ok(makeRequest()) + t.ok(!makeRequest()) + + client.close((err) => { + t.error(err) + t.equal(client.closed, true) + }) + client.once('connect', () => { + client[kSocket].destroy() + }) + + function makeRequest () { + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.error(err) + data.body.resume() + }) + return client[kSize] <= client.pipelining + } + }) +}) + +test('close should call callback once finished', (t) => { + t.plan(6) + + const server = createServer((req, res) => { + setImmediate(function () { + res.end(req.url) + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + t.ok(makeRequest()) + t.ok(!makeRequest()) + + client.close((err) => { + t.error(err) + t.equal(client.closed, true) + }) + + function makeRequest () { + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.error(err) + data.body.resume() + }) + return client[kSize] <= client.pipelining + } + }) +}) + +test('closed and destroyed errors', (t) => { + t.plan(4) + + const client = new Client('http://localhost:4000') + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err) => { + t.ok(err) + }) + client.close((err) => { + t.error(err) + }) + client.request({ path: '/', method: 'GET' }, (err) => { + t.type(err, errors.ClientClosedError) + client.destroy() + client.request({ path: '/', method: 'GET' }, (err) => { + t.type(err, errors.ClientDestroyedError) + }) + }) +}) + +test('close after and destroy should error', (t) => { + t.plan(2) + + const client = new Client('http://localhost:4000') + t.teardown(client.destroy.bind(client)) + + client.destroy() + client.close((err) => { + t.type(err, errors.ClientDestroyedError) + }) + client.close().catch((err) => { + t.type(err, errors.ClientDestroyedError) + }) +}) + +test('close socket and reconnect after maxRequestsPerClient reached', (t) => { + t.plan(5) + + const server = createServer((req, res) => { + res.end(req.url) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + let connections = 0 + server.on('connection', () => { + connections++ + }) + const client = new Client( + `http://localhost:${server.address().port}`, + { maxRequestsPerClient: 2 } + ) + t.teardown(client.destroy.bind(client)) + + await t.resolves(makeRequest()) + await t.resolves(makeRequest()) + await t.resolves(makeRequest()) + await t.resolves(makeRequest()) + t.equal(connections, 2) + + function makeRequest () { + return client.request({ path: '/', method: 'GET' }) + } + }) +}) + +test('close socket and reconnect after maxRequestsPerClient reached (async)', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end(req.url) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + let connections = 0 + server.on('connection', () => { + connections++ + }) + const client = new Client( + `http://localhost:${server.address().port}`, + { maxRequestsPerClient: 2 } + ) + t.teardown(client.destroy.bind(client)) + + await t.resolves( + Promise.all([ + makeRequest(), + makeRequest(), + makeRequest(), + makeRequest() + ]) + ) + t.equal(connections, 2) + + function makeRequest () { + return client.request({ path: '/', method: 'GET' }) + } + }) +}) + +test('should not close socket when no maxRequestsPerClient is provided', (t) => { + t.plan(5) + + const server = createServer((req, res) => { + res.end(req.url) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + let connections = 0 + server.on('connection', () => { + connections++ + }) + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + await t.resolves(makeRequest()) + await t.resolves(makeRequest()) + await t.resolves(makeRequest()) + await t.resolves(makeRequest()) + t.equal(connections, 1) + + function makeRequest () { + return client.request({ path: '/', method: 'GET' }) + } + }) +}) diff --git a/test/connect-abort.js b/test/connect-abort.js new file mode 100644 index 0000000..6eb3624 --- /dev/null +++ b/test/connect-abort.js @@ -0,0 +1,28 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { PassThrough } = require('stream') + +test(t => { + t.plan(2) + + const client = new Client('http://localhost:1234', { + connect: (_, cb) => { + client.destroy() + cb(null, new PassThrough({ + destroy (err, cb) { + t.same(err?.name, 'ClientDestroyedError') + cb(null) + } + })) + } + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.same(err?.name, 'ClientDestroyedError') + }) +}) diff --git a/test/connect-errconnect.js b/test/connect-errconnect.js new file mode 100644 index 0000000..defeda3 --- /dev/null +++ b/test/connect-errconnect.js @@ -0,0 +1,32 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const net = require('net') + +test('connect-connectionError', t => { + t.plan(2) + + const client = new Client('http://localhost:9000') + t.teardown(client.close.bind(client)) + + client.once('connectionError', () => { + t.pass() + }) + + const _err = new Error('kaboom') + net.connect = function (options) { + const socket = new net.Socket(options) + setImmediate(() => { + socket.destroy(_err) + }) + return socket + } + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.equal(err, _err) + }) +}) diff --git a/test/connect-timeout.js b/test/connect-timeout.js new file mode 100644 index 0000000..a736a54 --- /dev/null +++ b/test/connect-timeout.js @@ -0,0 +1,68 @@ +'use strict' + +const { test } = require('tap') +const { Client, Pool, errors } = require('..') +const net = require('net') +const sleep = require('atomic-sleep') + +test('priotorise socket errors over timeouts', (t) => { + t.plan(1) + const connectTimeout = 1000 + const client = new Pool('http://foobar.bar:1234', { connectTimeout: 2 }) + + client.request({ method: 'GET', path: '/foobar' }) + .then(() => t.fail()) + .catch((err) => { + t.equal(err.code, 'ENOTFOUND') + }) + + // block for 1s which is enough for the dns lookup to complete and TO to fire + sleep(connectTimeout) +}) + +// never connect +net.connect = function (options) { + return new net.Socket(options) +} + +test('connect-timeout', t => { + t.plan(1) + + const client = new Client('http://localhost:9000', { + connectTimeout: 1e3 + }) + t.teardown(client.close.bind(client)) + + const timeout = setTimeout(() => { + t.fail() + }, 2e3) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.type(err, errors.ConnectTimeoutError) + clearTimeout(timeout) + }) +}) + +test('connect-timeout', t => { + t.plan(1) + + const client = new Pool('http://localhost:9000', { + connectTimeout: 1e3 + }) + t.teardown(client.close.bind(client)) + + const timeout = setTimeout(() => { + t.fail() + }, 2e3) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.type(err, errors.ConnectTimeoutError) + clearTimeout(timeout) + }) +}) diff --git a/test/content-length.js b/test/content-length.js new file mode 100644 index 0000000..9ce7405 --- /dev/null +++ b/test/content-length.js @@ -0,0 +1,445 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const { createServer } = require('http') +const { Readable } = require('stream') +const { maybeWrapStream, consts } = require('./utils/async-iterators') + +test('request invalid content-length', (t) => { + t.plan(7) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: 'asd' + }, (err, data) => { + t.type(err, errors.RequestContentLengthMismatchError) + }) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: 'asdasdasdasdasdasda' + }, (err, data) => { + t.type(err, errors.RequestContentLengthMismatchError) + }) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: Buffer.alloc(9) + }, (err, data) => { + t.type(err, errors.RequestContentLengthMismatchError) + }) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: Buffer.alloc(11) + }, (err, data) => { + t.type(err, errors.RequestContentLengthMismatchError) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'content-length': 4 + }, + body: ['asd'] + }, (err, data) => { + t.type(err, errors.RequestContentLengthMismatchError) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'content-length': 4 + }, + body: ['asasdasdasdd'] + }, (err, data) => { + t.type(err, errors.RequestContentLengthMismatchError) + }) + + client.request({ + path: '/', + method: 'DELETE', + headers: { + 'content-length': 4 + }, + body: ['asasdasdasdd'] + }, (err, data) => { + t.type(err, errors.RequestContentLengthMismatchError) + }) + }) +}) + +function invalidContentLength (bodyType) { + test(`request streaming ${bodyType} invalid content-length`, (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.once('disconnect', () => { + t.pass() + client.once('disconnect', () => { + t.pass() + }) + }) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: maybeWrapStream(new Readable({ + read () { + setImmediate(() => { + this.push('asdasdasdkajsdnasdkjasnd') + this.push(null) + }) + } + }), bodyType) + }, (err, data) => { + t.type(err, errors.RequestContentLengthMismatchError) + }) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: maybeWrapStream(new Readable({ + read () { + setImmediate(() => { + this.push('asd') + this.push(null) + }) + } + }), bodyType) + }, (err, data) => { + t.type(err, errors.RequestContentLengthMismatchError) + }) + }) + }) +} + +invalidContentLength(consts.STREAM) +invalidContentLength(consts.ASYNC_ITERATOR) + +function zeroContentLength (bodyType) { + test(`request ${bodyType} streaming data when content-length=0`, (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 0 + }, + body: maybeWrapStream(new Readable({ + read () { + setImmediate(() => { + this.push('asdasdasdkajsdnasdkjasnd') + this.push(null) + }) + } + }), bodyType) + }, (err, data) => { + t.type(err, errors.RequestContentLengthMismatchError) + }) + }) + }) +} + +zeroContentLength(consts.STREAM) +zeroContentLength(consts.ASYNC_ITERATOR) + +test('request streaming no body data when content-length=0', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 0 + } + }, (err, data) => { + t.error(err) + data.body + .on('data', () => { + t.fail() + }) + .on('end', () => { + t.pass() + }) + }) + }) +}) + +test('response invalid content length with close', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.writeHead(200, { + 'content-length': 10 + }) + res.end('123') + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 0 + }) + t.teardown(client.destroy.bind(client)) + + client.on('disconnect', (origin, client, err) => { + t.equal(err.code, 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH') + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body + .on('end', () => { + t.fail() + }) + .on('error', (err) => { + t.equal(err.code, 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH') + }) + .resume() + }) + }) +}) + +test('request streaming with Readable.from(buf)', (t) => { + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + body: Readable.from(Buffer.from('hello')) + }, (err, data) => { + const chunks = [] + t.error(err) + data.body + .on('data', (chunk) => { + chunks.push(chunk) + }) + .on('end', () => { + t.equal(Buffer.concat(chunks).toString(), 'hello') + t.pass() + t.end() + }) + }) + }) +}) + +test('request DELETE, content-length=0, with body', (t) => { + t.plan(5) + const server = createServer((req, res) => { + res.end() + }) + server.on('request', (req, res) => { + t.equal(req.headers['content-length'], undefined) + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'DELETE', + headers: { + 'content-length': 0 + }, + body: new Readable({ + read () { + this.push('asd') + this.push(null) + } + }) + }, (err) => { + t.type(err, errors.RequestContentLengthMismatchError) + }) + + client.request({ + path: '/', + method: 'DELETE', + headers: { + 'content-length': 0 + } + }, (err, resp) => { + t.equal(resp.headers['content-length'], '0') + t.error(err) + }) + + client.on('disconnect', () => { + t.pass() + }) + }) +}) + +test('content-length shouldSendContentLength=false', (t) => { + t.plan(15) + const server = createServer((req, res) => { + res.end() + }) + server.on('request', (req, res) => { + switch (req.url) { + case '/put0': + t.equal(req.headers['content-length'], '0') + break + case '/head': + t.equal(req.headers['content-length'], undefined) + break + case '/get': + t.equal(req.headers['content-length'], undefined) + break + } + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/put0', + method: 'PUT', + headers: { + 'content-length': 0 + } + }, (err, resp) => { + t.equal(resp.headers['content-length'], '0') + t.error(err) + }) + + client.request({ + path: '/head', + method: 'HEAD', + headers: { + 'content-length': 10 + } + }, (err, resp) => { + t.equal(resp.headers['content-length'], undefined) + t.error(err) + }) + + client.request({ + path: '/get', + method: 'GET', + headers: { + 'content-length': 0 + } + }, (err) => { + t.error(err) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'content-length': 4 + }, + body: new Readable({ + read () { + this.push('asd') + this.push(null) + } + }) + }, (err) => { + t.error(err) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'content-length': 4 + }, + body: new Readable({ + read () { + this.push('asasdasdasdd') + this.push(null) + } + }) + }, (err) => { + t.error(err) + }) + + client.request({ + path: '/', + method: 'HEAD', + headers: { + 'content-length': 4 + }, + body: new Readable({ + read () { + this.push('asasdasdasdd') + this.push(null) + } + }) + }, (err) => { + t.error(err) + }) + + client.on('disconnect', () => { + t.pass() + }) + }) +}) diff --git a/test/cookie/cookies.js b/test/cookie/cookies.js new file mode 100644 index 0000000..70222fa --- /dev/null +++ b/test/cookie/cookies.js @@ -0,0 +1,616 @@ +// MIT License +// +// Copyright 2018-2022 the Deno authors. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +'use strict' + +const { test } = require('tap') +const { + deleteCookie, + getCookies, + getSetCookies, + setCookie, + Headers +} = require('../..') + +// https://raw.githubusercontent.com/denoland/deno_std/b4239898d6c6b4cdbfd659a4ea1838cf4e656336/http/cookie_test.ts + +test('Cookie parser', (t) => { + let headers = new Headers() + t.same(getCookies(headers), {}) + headers = new Headers() + headers.set('Cookie', 'foo=bar') + t.same(getCookies(headers), { foo: 'bar' }) + + headers = new Headers() + headers.set('Cookie', 'full=of ; tasty=chocolate') + t.same(getCookies(headers), { full: 'of ', tasty: 'chocolate' }) + + headers = new Headers() + headers.set('Cookie', 'igot=99; problems=but...') + t.same(getCookies(headers), { igot: '99', problems: 'but...' }) + + headers = new Headers() + headers.set('Cookie', 'PREF=al=en-GB&f1=123; wide=1; SID=123') + t.same(getCookies(headers), { + PREF: 'al=en-GB&f1=123', + wide: '1', + SID: '123' + }) + + t.end() +}) + +test('Cookie Name Validation', (t) => { + const tokens = [ + '"id"', + 'id\t', + 'i\td', + 'i d', + 'i;d', + '{id}', + '[id]', + '"', + 'id\u0091' + ] + const headers = new Headers() + tokens.forEach((name) => { + t.throws( + () => { + setCookie(headers, { + name, + value: 'Cat', + httpOnly: true, + secure: true, + maxAge: 3 + }) + }, + Error + ) + }) + + t.end() +}) + +test('Cookie Value Validation', (t) => { + const tokens = [ + '1f\tWa', + '\t', + '1f Wa', + '1f;Wa', + '"1fWa', + '1f\\Wa', + '1f"Wa', + '"', + '1fWa\u0005', + '1f\u0091Wa' + ] + + const headers = new Headers() + tokens.forEach((value) => { + t.throws( + () => { + setCookie( + headers, + { + name: 'Space', + value, + httpOnly: true, + secure: true, + maxAge: 3 + } + ) + }, + Error, + "RFC2616 cookie 'Space'" + ) + }) + + t.throws( + () => { + setCookie(headers, { + name: 'location', + value: 'United Kingdom' + }) + }, + Error, + "RFC2616 cookie 'location' cannot contain character ' '" + ) + + t.end() +}) + +test('Cookie Path Validation', (t) => { + const path = '/;domain=sub.domain.com' + const headers = new Headers() + t.throws( + () => { + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true, + path, + maxAge: 3 + }) + }, + Error, + path + ": Invalid cookie path char ';'" + ) + + t.end() +}) + +test('Cookie Domain Validation', (t) => { + const tokens = ['-domain.com', 'domain.org.', 'domain.org-'] + const headers = new Headers() + tokens.forEach((domain) => { + t.throws( + () => { + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true, + domain, + maxAge: 3 + }) + }, + Error, + 'Invalid first/last char in cookie domain: ' + domain + ) + }) + + t.end() +}) + +test('Cookie Delete', (t) => { + let headers = new Headers() + deleteCookie(headers, 'deno') + t.equal( + headers.get('Set-Cookie'), + 'deno=; Expires=Thu, 01 Jan 1970 00:00:00 GMT' + ) + headers = new Headers() + setCookie(headers, { + name: 'Space', + value: 'Cat', + domain: 'deno.land', + path: '/' + }) + deleteCookie(headers, 'Space', { domain: '', path: '' }) + t.equal( + headers.get('Set-Cookie'), + 'Space=Cat; Domain=deno.land; Path=/, Space=; Expires=Thu, 01 Jan 1970 00:00:00 GMT' + ) + + t.end() +}) + +test('Cookie Set', (t) => { + let headers = new Headers() + setCookie(headers, { name: 'Space', value: 'Cat' }) + t.equal(headers.get('Set-Cookie'), 'Space=Cat') + + headers = new Headers() + setCookie(headers, { name: 'Space', value: 'Cat', secure: true }) + t.equal(headers.get('Set-Cookie'), 'Space=Cat; Secure') + + headers = new Headers() + setCookie(headers, { name: 'Space', value: 'Cat', httpOnly: true }) + t.equal(headers.get('Set-Cookie'), 'Space=Cat; HttpOnly') + + headers = new Headers() + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true + }) + t.equal(headers.get('Set-Cookie'), 'Space=Cat; Secure; HttpOnly') + + headers = new Headers() + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true, + maxAge: 2 + }) + t.equal( + headers.get('Set-Cookie'), + 'Space=Cat; Secure; HttpOnly; Max-Age=2' + ) + + headers = new Headers() + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true, + maxAge: 0 + }) + t.equal( + headers.get('Set-Cookie'), + 'Space=Cat; Secure; HttpOnly; Max-Age=0' + ) + + let error = false + headers = new Headers() + try { + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true, + maxAge: -1 + }) + } catch { + error = true + } + t.ok(error) + + headers = new Headers() + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true, + maxAge: 2, + domain: 'deno.land' + }) + t.equal( + headers.get('Set-Cookie'), + 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land' + ) + + headers = new Headers() + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true, + maxAge: 2, + domain: 'deno.land', + sameSite: 'Strict' + }) + t.equal( + headers.get('Set-Cookie'), + 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land; ' + + 'SameSite=Strict' + ) + + headers = new Headers() + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true, + maxAge: 2, + domain: 'deno.land', + sameSite: 'Lax' + }) + t.equal( + headers.get('Set-Cookie'), + 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land; SameSite=Lax' + ) + + headers = new Headers() + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true, + maxAge: 2, + domain: 'deno.land', + path: '/' + }) + t.equal( + headers.get('Set-Cookie'), + 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land; Path=/' + ) + + headers = new Headers() + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true, + maxAge: 2, + domain: 'deno.land', + path: '/', + unparsed: ['unparsed=keyvalue', 'batman=Bruce'] + }) + t.equal( + headers.get('Set-Cookie'), + 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land; Path=/; ' + + 'unparsed=keyvalue; batman=Bruce' + ) + + headers = new Headers() + setCookie(headers, { + name: 'Space', + value: 'Cat', + httpOnly: true, + secure: true, + maxAge: 2, + domain: 'deno.land', + path: '/', + expires: new Date(Date.UTC(1983, 0, 7, 15, 32)) + }) + t.equal( + headers.get('Set-Cookie'), + 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land; Path=/; ' + + 'Expires=Fri, 07 Jan 1983 15:32:00 GMT' + ) + + headers = new Headers() + setCookie(headers, { + name: 'Space', + value: 'Cat', + expires: Date.UTC(1983, 0, 7, 15, 32) + }) + t.equal( + headers.get('Set-Cookie'), + 'Space=Cat; Expires=Fri, 07 Jan 1983 15:32:00 GMT' + ) + + headers = new Headers() + setCookie(headers, { name: '__Secure-Kitty', value: 'Meow' }) + t.equal(headers.get('Set-Cookie'), '__Secure-Kitty=Meow; Secure') + + headers = new Headers() + setCookie(headers, { + name: '__Host-Kitty', + value: 'Meow', + domain: 'deno.land' + }) + t.equal( + headers.get('Set-Cookie'), + '__Host-Kitty=Meow; Secure; Path=/' + ) + + headers = new Headers() + setCookie(headers, { name: 'cookie-1', value: 'value-1', secure: true }) + setCookie(headers, { name: 'cookie-2', value: 'value-2', maxAge: 3600 }) + t.equal( + headers.get('Set-Cookie'), + 'cookie-1=value-1; Secure, cookie-2=value-2; Max-Age=3600' + ) + + headers = new Headers() + setCookie(headers, { name: '', value: '' }) + t.equal(headers.get('Set-Cookie'), null) + + t.end() +}) + +test('Set-Cookie parser', (t) => { + let headers = new Headers({ 'set-cookie': 'Space=Cat' }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat' + }]) + + headers = new Headers({ 'set-cookie': 'Space=Cat; Secure' }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + secure: true + }]) + + headers = new Headers({ 'set-cookie': 'Space=Cat; HttpOnly' }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + httpOnly: true + }]) + + headers = new Headers({ 'set-cookie': 'Space=Cat; Secure; HttpOnly' }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + secure: true, + httpOnly: true + }]) + + headers = new Headers({ + 'set-cookie': 'Space=Cat; Secure; HttpOnly; Max-Age=2' + }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + secure: true, + httpOnly: true, + maxAge: 2 + }]) + + headers = new Headers({ + 'set-cookie': 'Space=Cat; Secure; HttpOnly; Max-Age=0' + }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + secure: true, + httpOnly: true, + maxAge: 0 + }]) + + headers = new Headers({ + 'set-cookie': 'Space=Cat; Secure; HttpOnly; Max-Age=-1' + }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + secure: true, + httpOnly: true + }]) + + headers = new Headers({ + 'set-cookie': 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land' + }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + secure: true, + httpOnly: true, + maxAge: 2, + domain: 'deno.land' + }]) + + headers = new Headers({ + 'set-cookie': + 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land; SameSite=Strict' + }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + secure: true, + httpOnly: true, + maxAge: 2, + domain: 'deno.land', + sameSite: 'Strict' + }]) + + headers = new Headers({ + 'set-cookie': + 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land; SameSite=Lax' + }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + secure: true, + httpOnly: true, + maxAge: 2, + domain: 'deno.land', + sameSite: 'Lax' + }]) + + headers = new Headers({ + 'set-cookie': + 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land; Path=/' + }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + secure: true, + httpOnly: true, + maxAge: 2, + domain: 'deno.land', + path: '/' + }]) + + headers = new Headers({ + 'set-cookie': + 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land; Path=/; unparsed=keyvalue; batman=Bruce' + }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + secure: true, + httpOnly: true, + maxAge: 2, + domain: 'deno.land', + path: '/', + unparsed: ['unparsed=keyvalue', 'batman=Bruce'] + }]) + + headers = new Headers({ + 'set-cookie': + 'Space=Cat; Secure; HttpOnly; Max-Age=2; Domain=deno.land; Path=/; ' + + 'Expires=Fri, 07 Jan 1983 15:32:00 GMT' + }) + t.same(getSetCookies(headers), [{ + name: 'Space', + value: 'Cat', + secure: true, + httpOnly: true, + maxAge: 2, + domain: 'deno.land', + path: '/', + expires: new Date(Date.UTC(1983, 0, 7, 15, 32)) + }]) + + headers = new Headers({ 'set-cookie': '__Secure-Kitty=Meow; Secure' }) + t.same(getSetCookies(headers), [{ + name: '__Secure-Kitty', + value: 'Meow', + secure: true + }]) + + headers = new Headers({ 'set-cookie': '__Secure-Kitty=Meow' }) + t.same(getSetCookies(headers), [{ + name: '__Secure-Kitty', + value: 'Meow' + }]) + + headers = new Headers({ + 'set-cookie': '__Host-Kitty=Meow; Secure; Path=/' + }) + t.same(getSetCookies(headers), [{ + name: '__Host-Kitty', + value: 'Meow', + secure: true, + path: '/' + }]) + + headers = new Headers({ 'set-cookie': '__Host-Kitty=Meow; Path=/' }) + t.same(getSetCookies(headers), [{ + name: '__Host-Kitty', + value: 'Meow', + path: '/' + }]) + + headers = new Headers({ + 'set-cookie': '__Host-Kitty=Meow; Secure; Domain=deno.land; Path=/' + }) + t.same(getSetCookies(headers), [{ + name: '__Host-Kitty', + value: 'Meow', + secure: true, + domain: 'deno.land', + path: '/' + }]) + + headers = new Headers({ + 'set-cookie': '__Host-Kitty=Meow; Secure; Path=/not-root' + }) + t.same(getSetCookies(headers), [{ + name: '__Host-Kitty', + value: 'Meow', + secure: true, + path: '/not-root' + }]) + + headers = new Headers([ + ['set-cookie', 'cookie-1=value-1; Secure'], + ['set-cookie', 'cookie-2=value-2; Max-Age=3600'] + ]) + t.same(getSetCookies(headers), [ + { name: 'cookie-1', value: 'value-1', secure: true }, + { name: 'cookie-2', value: 'value-2', maxAge: 3600 } + ]) + + headers = new Headers() + t.same(getSetCookies(headers), []) + + t.end() +}) diff --git a/test/cookie/global-headers.js b/test/cookie/global-headers.js new file mode 100644 index 0000000..1d58dce --- /dev/null +++ b/test/cookie/global-headers.js @@ -0,0 +1,70 @@ +'use strict' + +const { test, skip } = require('tap') +const { + deleteCookie, + getCookies, + getSetCookies, + setCookie +} = require('../..') +const { getHeadersList } = require('../../lib/cookies/util') + +/* global Headers */ + +if (!globalThis.Headers) { + skip('No global Headers to test') + process.exit(0) +} + +test('Using global Headers', (t) => { + t.test('deleteCookies', (t) => { + const headers = new Headers() + + t.equal(headers.get('set-cookie'), null) + deleteCookie(headers, 'undici') + t.equal(headers.get('set-cookie'), 'undici=; Expires=Thu, 01 Jan 1970 00:00:00 GMT') + + t.end() + }) + + t.test('getCookies', (t) => { + const headers = new Headers({ + cookie: 'get=cookies; and=attributes' + }) + + t.same(getCookies(headers), { get: 'cookies', and: 'attributes' }) + t.end() + }) + + t.test('getSetCookies', (t) => { + const headers = new Headers({ + 'set-cookie': 'undici=getSetCookies; Secure' + }) + + const supportsCookies = getHeadersList(headers).cookies + + if (!supportsCookies) { + t.same(getSetCookies(headers), []) + } else { + t.same(getSetCookies(headers), [ + { + name: 'undici', + value: 'getSetCookies', + secure: true + } + ]) + } + + t.end() + }) + + t.test('setCookie', (t) => { + const headers = new Headers() + + setCookie(headers, { name: 'undici', value: 'setCookie' }) + t.equal(headers.get('Set-Cookie'), 'undici=setCookie') + t.end() + }) + + t.end() +}) diff --git a/test/diagnostics-channel/connect-error.js b/test/diagnostics-channel/connect-error.js new file mode 100644 index 0000000..f7e842d --- /dev/null +++ b/test/diagnostics-channel/connect-error.js @@ -0,0 +1,61 @@ +'use strict' + +const t = require('tap') + +let diagnosticsChannel + +try { + diagnosticsChannel = require('diagnostics_channel') +} catch { + t.skip('missing diagnostics_channel') + process.exit(0) +} + +const { Client } = require('../..') + +t.plan(16) + +const connectError = new Error('custom error') + +let _connector +diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(({ connectParams, connector }) => { + _connector = connector + + t.equal(typeof _connector, 'function') + t.equal(Object.keys(connectParams).length, 6) + + const { host, hostname, protocol, port, servername } = connectParams + + t.equal(host, 'localhost:1234') + t.equal(hostname, 'localhost') + t.equal(port, '1234') + t.equal(protocol, 'http:') + t.equal(servername, null) +}) + +diagnosticsChannel.channel('undici:client:connectError').subscribe(({ error, connectParams, connector }) => { + t.equal(Object.keys(connectParams).length, 6) + t.equal(_connector, connector) + + const { host, hostname, protocol, port, servername } = connectParams + + t.equal(error, connectError) + t.equal(host, 'localhost:1234') + t.equal(hostname, 'localhost') + t.equal(port, '1234') + t.equal(protocol, 'http:') + t.equal(servername, null) +}) + +const client = new Client('http://localhost:1234', { + connect: (_, cb) => { cb(connectError, null) } +}) + +t.teardown(client.close.bind(client)) + +client.request({ + path: '/', + method: 'GET' +}, (err, data) => { + t.equal(err, connectError) +}) diff --git a/test/diagnostics-channel/error.js b/test/diagnostics-channel/error.js new file mode 100644 index 0000000..1f350b1 --- /dev/null +++ b/test/diagnostics-channel/error.js @@ -0,0 +1,52 @@ +'use strict' + +const t = require('tap') + +let diagnosticsChannel + +try { + diagnosticsChannel = require('diagnostics_channel') +} catch { + t.skip('missing diagnostics_channel') + process.exit(0) +} + +const { Client } = require('../..') +const { createServer } = require('http') + +t.plan(3) + +const server = createServer((req, res) => { + res.destroy() +}) +t.teardown(server.close.bind(server)) + +const reqHeaders = { + foo: undefined, + bar: 'bar' +} + +let _req +diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => { + _req = request +}) + +diagnosticsChannel.channel('undici:request:error').subscribe(({ request, error }) => { + t.equal(_req, request) + t.equal(error.code, 'UND_ERR_SOCKET') +}) + +server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers: reqHeaders + }, (err, data) => { + t.equal(err.code, 'UND_ERR_SOCKET') + }) +}) diff --git a/test/diagnostics-channel/get.js b/test/diagnostics-channel/get.js new file mode 100644 index 0000000..9d868c3 --- /dev/null +++ b/test/diagnostics-channel/get.js @@ -0,0 +1,141 @@ +'use strict' + +const t = require('tap') + +let diagnosticsChannel + +try { + diagnosticsChannel = require('diagnostics_channel') +} catch { + t.skip('missing diagnostics_channel') + process.exit(0) +} + +const { Client } = require('../..') +const { createServer } = require('http') + +t.plan(32) + +const server = createServer((req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.setHeader('trailer', 'foo') + res.write('hello') + res.addTrailers({ + foo: 'oof' + }) + res.end() +}) +t.teardown(server.close.bind(server)) + +const reqHeaders = { + foo: undefined, + bar: 'bar' +} + +let _req +diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => { + _req = request + t.equal(request.origin, `http://localhost:${server.address().port}`) + t.equal(request.completed, false) + t.equal(request.method, 'GET') + t.equal(request.path, '/') + t.equal(request.headers, 'bar: bar\r\n') + request.addHeader('hello', 'world') + t.equal(request.headers, 'bar: bar\r\nhello: world\r\n') +}) + +let _connector +diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(({ connectParams, connector }) => { + _connector = connector + + t.equal(typeof _connector, 'function') + t.equal(Object.keys(connectParams).length, 6) + + const { host, hostname, protocol, port, servername } = connectParams + + t.equal(host, `localhost:${server.address().port}`) + t.equal(hostname, 'localhost') + t.equal(port, String(server.address().port)) + t.equal(protocol, 'http:') + t.equal(servername, null) +}) + +let _socket +diagnosticsChannel.channel('undici:client:connected').subscribe(({ connectParams, socket, connector }) => { + _socket = socket + + t.equal(_connector, connector) + t.equal(Object.keys(connectParams).length, 6) + + const { host, hostname, protocol, port, servername } = connectParams + + t.equal(host, `localhost:${server.address().port}`) + t.equal(hostname, 'localhost') + t.equal(port, String(server.address().port)) + t.equal(protocol, 'http:') + t.equal(servername, null) +}) + +diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(({ request, headers, socket }) => { + t.equal(_req, request) + t.equal(_socket, socket) + + const expectedHeaders = [ + 'GET / HTTP/1.1', + `host: localhost:${server.address().port}`, + 'connection: keep-alive', + 'bar: bar', + 'hello: world' + ] + + t.equal(headers, expectedHeaders.join('\r\n') + '\r\n') +}) + +diagnosticsChannel.channel('undici:request:headers').subscribe(({ request, response }) => { + t.equal(_req, request) + t.equal(response.statusCode, 200) + const expectedHeaders = [ + Buffer.from('Content-Type'), + Buffer.from('text/plain'), + Buffer.from('trailer'), + Buffer.from('foo'), + Buffer.from('Date'), + response.headers[5], // This is a date + Buffer.from('Connection'), + Buffer.from('keep-alive'), + Buffer.from('Keep-Alive'), + Buffer.from('timeout=5'), + Buffer.from('Transfer-Encoding'), + Buffer.from('chunked') + ] + t.same(response.headers, expectedHeaders) + t.equal(response.statusText, 'OK') +}) + +let endEmitted = false +diagnosticsChannel.channel('undici:request:trailers').subscribe(({ request, trailers }) => { + t.equal(request.completed, true) + t.equal(_req, request) + // This event is emitted after the last chunk has been added to the body stream, + // not when it was consumed by the application + t.equal(endEmitted, false) + t.same(trailers, [Buffer.from('foo'), Buffer.from('oof')]) +}) + +server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers: reqHeaders + }, (err, data) => { + t.error(err) + data.body.on('end', function () { + endEmitted = true + }) + }) +}) diff --git a/test/diagnostics-channel/post-stream.js b/test/diagnostics-channel/post-stream.js new file mode 100644 index 0000000..236b9bb --- /dev/null +++ b/test/diagnostics-channel/post-stream.js @@ -0,0 +1,149 @@ +'use strict' + +const t = require('tap') +const { Readable } = require('stream') + +let diagnosticsChannel + +try { + diagnosticsChannel = require('diagnostics_channel') +} catch { + t.skip('missing diagnostics_channel') + process.exit(0) +} + +const { Client } = require('../..') +const { createServer } = require('http') + +t.plan(33) + +const server = createServer((req, res) => { + req.resume() + res.setHeader('Content-Type', 'text/plain') + res.setHeader('trailer', 'foo') + res.write('hello') + res.addTrailers({ + foo: 'oof' + }) + res.end() +}) +t.teardown(server.close.bind(server)) + +const reqHeaders = { + foo: undefined, + bar: 'bar' +} +const body = Readable.from(['hello', ' ', 'world']) + +let _req +diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => { + _req = request + t.equal(request.completed, false) + t.equal(request.method, 'POST') + t.equal(request.path, '/') + t.equal(request.headers, 'bar: bar\r\n') + request.addHeader('hello', 'world') + t.equal(request.headers, 'bar: bar\r\nhello: world\r\n') + t.same(request.body, body) +}) + +let _connector +diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(({ connectParams, connector }) => { + _connector = connector + + t.equal(typeof _connector, 'function') + t.equal(Object.keys(connectParams).length, 6) + + const { host, hostname, protocol, port, servername } = connectParams + + t.equal(host, `localhost:${server.address().port}`) + t.equal(hostname, 'localhost') + t.equal(port, String(server.address().port)) + t.equal(protocol, 'http:') + t.equal(servername, null) +}) + +let _socket +diagnosticsChannel.channel('undici:client:connected').subscribe(({ connectParams, socket, connector }) => { + _socket = socket + + t.equal(Object.keys(connectParams).length, 6) + t.equal(_connector, connector) + + const { host, hostname, protocol, port, servername } = connectParams + + t.equal(host, `localhost:${server.address().port}`) + t.equal(hostname, 'localhost') + t.equal(port, String(server.address().port)) + t.equal(protocol, 'http:') + t.equal(servername, null) +}) + +diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(({ request, headers, socket }) => { + t.equal(_req, request) + t.equal(_socket, socket) + + const expectedHeaders = [ + 'POST / HTTP/1.1', + `host: localhost:${server.address().port}`, + 'connection: keep-alive', + 'bar: bar', + 'hello: world' + ] + + t.equal(headers, expectedHeaders.join('\r\n') + '\r\n') +}) + +diagnosticsChannel.channel('undici:request:headers').subscribe(({ request, response }) => { + t.equal(_req, request) + t.equal(response.statusCode, 200) + const expectedHeaders = [ + Buffer.from('Content-Type'), + Buffer.from('text/plain'), + Buffer.from('trailer'), + Buffer.from('foo'), + Buffer.from('Date'), + response.headers[5], // This is a date + Buffer.from('Connection'), + Buffer.from('keep-alive'), + Buffer.from('Keep-Alive'), + Buffer.from('timeout=5'), + Buffer.from('Transfer-Encoding'), + Buffer.from('chunked') + ] + t.same(response.headers, expectedHeaders) + t.equal(response.statusText, 'OK') +}) + +diagnosticsChannel.channel('undici:request:bodySent').subscribe(({ request }) => { + t.equal(_req, request) +}) + +let endEmitted = false +diagnosticsChannel.channel('undici:request:trailers').subscribe(({ request, trailers }) => { + t.equal(request.completed, true) + t.equal(_req, request) + // This event is emitted after the last chunk has been added to the body stream, + // not when it was consumed by the application + t.equal(endEmitted, false) + t.same(trailers, [Buffer.from('foo'), Buffer.from('oof')]) +}) + +server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'POST', + headers: reqHeaders, + body + }, (err, data) => { + t.error(err) + data.body.on('end', function () { + endEmitted = true + }) + }) +}) diff --git a/test/diagnostics-channel/post.js b/test/diagnostics-channel/post.js new file mode 100644 index 0000000..fc02eb5 --- /dev/null +++ b/test/diagnostics-channel/post.js @@ -0,0 +1,147 @@ +'use strict' + +const t = require('tap') + +let diagnosticsChannel + +try { + diagnosticsChannel = require('diagnostics_channel') +} catch { + t.skip('missing diagnostics_channel') + process.exit(0) +} + +const { Client } = require('../..') +const { createServer } = require('http') + +t.plan(33) + +const server = createServer((req, res) => { + req.resume() + res.setHeader('Content-Type', 'text/plain') + res.setHeader('trailer', 'foo') + res.write('hello') + res.addTrailers({ + foo: 'oof' + }) + res.end() +}) +t.teardown(server.close.bind(server)) + +const reqHeaders = { + foo: undefined, + bar: 'bar' +} + +let _req +diagnosticsChannel.channel('undici:request:create').subscribe(({ request }) => { + _req = request + t.equal(request.completed, false) + t.equal(request.method, 'POST') + t.equal(request.path, '/') + t.equal(request.headers, 'bar: bar\r\n') + request.addHeader('hello', 'world') + t.equal(request.headers, 'bar: bar\r\nhello: world\r\n') + t.same(request.body, Buffer.from('hello world')) +}) + +let _connector +diagnosticsChannel.channel('undici:client:beforeConnect').subscribe(({ connectParams, connector }) => { + _connector = connector + + t.equal(typeof _connector, 'function') + t.equal(Object.keys(connectParams).length, 6) + + const { host, hostname, protocol, port, servername } = connectParams + + t.equal(host, `localhost:${server.address().port}`) + t.equal(hostname, 'localhost') + t.equal(port, String(server.address().port)) + t.equal(protocol, 'http:') + t.equal(servername, null) +}) + +let _socket +diagnosticsChannel.channel('undici:client:connected').subscribe(({ connectParams, socket, connector }) => { + _socket = socket + + t.equal(Object.keys(connectParams).length, 6) + t.equal(_connector, connector) + + const { host, hostname, protocol, port, servername } = connectParams + + t.equal(host, `localhost:${server.address().port}`) + t.equal(hostname, 'localhost') + t.equal(port, String(server.address().port)) + t.equal(protocol, 'http:') + t.equal(servername, null) +}) + +diagnosticsChannel.channel('undici:client:sendHeaders').subscribe(({ request, headers, socket }) => { + t.equal(_req, request) + t.equal(_socket, socket) + + const expectedHeaders = [ + 'POST / HTTP/1.1', + `host: localhost:${server.address().port}`, + 'connection: keep-alive', + 'bar: bar', + 'hello: world' + ] + + t.equal(headers, expectedHeaders.join('\r\n') + '\r\n') +}) + +diagnosticsChannel.channel('undici:request:headers').subscribe(({ request, response }) => { + t.equal(_req, request) + t.equal(response.statusCode, 200) + const expectedHeaders = [ + Buffer.from('Content-Type'), + Buffer.from('text/plain'), + Buffer.from('trailer'), + Buffer.from('foo'), + Buffer.from('Date'), + response.headers[5], // This is a date + Buffer.from('Connection'), + Buffer.from('keep-alive'), + Buffer.from('Keep-Alive'), + Buffer.from('timeout=5'), + Buffer.from('Transfer-Encoding'), + Buffer.from('chunked') + ] + t.same(response.headers, expectedHeaders) + t.equal(response.statusText, 'OK') +}) + +diagnosticsChannel.channel('undici:request:bodySent').subscribe(({ request }) => { + t.equal(_req, request) +}) + +let endEmitted = false +diagnosticsChannel.channel('undici:request:trailers').subscribe(({ request, trailers }) => { + t.equal(request.completed, true) + t.equal(_req, request) + // This event is emitted after the last chunk has been added to the body stream, + // not when it was consumed by the application + t.equal(endEmitted, false) + t.same(trailers, [Buffer.from('foo'), Buffer.from('oof')]) +}) + +server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeout: 300e3 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'POST', + headers: reqHeaders, + body: 'hello world' + }, (err, data) => { + t.error(err) + data.body.on('end', function () { + endEmitted = true + }) + }) +}) diff --git a/test/dispatcher.js b/test/dispatcher.js new file mode 100644 index 0000000..22750a1 --- /dev/null +++ b/test/dispatcher.js @@ -0,0 +1,22 @@ +'use strict' + +const t = require('tap') +const { test } = t + +const Dispatcher = require('../lib/dispatcher') + +class PoorImplementation extends Dispatcher {} + +test('dispatcher implementation', (t) => { + t.plan(6) + + const dispatcher = new Dispatcher() + t.throws(() => dispatcher.dispatch(), Error, 'throws on unimplemented dispatch') + t.throws(() => dispatcher.close(), Error, 'throws on unimplemented close') + t.throws(() => dispatcher.destroy(), Error, 'throws on unimplemented destroy') + + const poorImplementation = new PoorImplementation() + t.throws(() => poorImplementation.dispatch(), Error, 'throws on unimplemented dispatch') + t.throws(() => poorImplementation.close(), Error, 'throws on unimplemented close') + t.throws(() => poorImplementation.destroy(), Error, 'throws on unimplemented destroy') +}) diff --git a/test/errors.js b/test/errors.js new file mode 100644 index 0000000..a6f17ef --- /dev/null +++ b/test/errors.js @@ -0,0 +1,81 @@ +'use strict' + +const t = require('tap') +const { test } = t + +const errors = require('../lib/core/errors') + +const createScenario = (ErrorClass, defaultMessage, name, code) => ({ + ErrorClass, + defaultMessage, + name, + code +}) + +const scenarios = [ + createScenario(errors.UndiciError, '', 'UndiciError', 'UND_ERR'), + createScenario(errors.ConnectTimeoutError, 'Connect Timeout Error', 'ConnectTimeoutError', 'UND_ERR_CONNECT_TIMEOUT'), + createScenario(errors.HeadersTimeoutError, 'Headers Timeout Error', 'HeadersTimeoutError', 'UND_ERR_HEADERS_TIMEOUT'), + createScenario(errors.HeadersOverflowError, 'Headers Overflow Error', 'HeadersOverflowError', 'UND_ERR_HEADERS_OVERFLOW'), + createScenario(errors.InvalidArgumentError, 'Invalid Argument Error', 'InvalidArgumentError', 'UND_ERR_INVALID_ARG'), + createScenario(errors.InvalidReturnValueError, 'Invalid Return Value Error', 'InvalidReturnValueError', 'UND_ERR_INVALID_RETURN_VALUE'), + createScenario(errors.RequestAbortedError, 'Request aborted', 'AbortError', 'UND_ERR_ABORTED'), + createScenario(errors.InformationalError, 'Request information', 'InformationalError', 'UND_ERR_INFO'), + createScenario(errors.RequestContentLengthMismatchError, 'Request body length does not match content-length header', 'RequestContentLengthMismatchError', 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'), + createScenario(errors.ClientDestroyedError, 'The client is destroyed', 'ClientDestroyedError', 'UND_ERR_DESTROYED'), + createScenario(errors.ClientClosedError, 'The client is closed', 'ClientClosedError', 'UND_ERR_CLOSED'), + createScenario(errors.SocketError, 'Socket error', 'SocketError', 'UND_ERR_SOCKET'), + createScenario(errors.NotSupportedError, 'Not supported error', 'NotSupportedError', 'UND_ERR_NOT_SUPPORTED'), + createScenario(errors.ResponseContentLengthMismatchError, 'Response body length does not match content-length header', 'ResponseContentLengthMismatchError', 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'), + createScenario(errors.ResponseExceededMaxSizeError, 'Response content exceeded max size', 'ResponseExceededMaxSizeError', 'UND_ERR_RES_EXCEEDED_MAX_SIZE') +] + +scenarios.forEach(scenario => { + test(scenario.name, t => { + const SAMPLE_MESSAGE = 'sample message' + + const errorWithDefaultMessage = () => new scenario.ErrorClass() + const errorWithProvidedMessage = () => new scenario.ErrorClass(SAMPLE_MESSAGE) + + test('should use default message', t => { + t.plan(1) + + const error = errorWithDefaultMessage() + + t.equal(error.message, scenario.defaultMessage) + }) + + test('should use provided message', t => { + t.plan(1) + + const error = errorWithProvidedMessage() + + t.equal(error.message, SAMPLE_MESSAGE) + }) + + test('should have proper fields', t => { + t.plan(6) + const errorInstances = [errorWithDefaultMessage(), errorWithProvidedMessage()] + errorInstances.forEach(error => { + t.equal(error.name, scenario.name) + t.equal(error.code, scenario.code) + t.ok(error.stack) + }) + }) + + t.end() + }) +}) + +test('Default HTTPParseError Codes', t => { + test('code and data should be undefined when not set', t => { + t.plan(2) + + const error = new errors.HTTPParserError('HTTPParserError') + + t.equal(error.code, undefined) + t.equal(error.data, undefined) + }) + + t.end() +}) diff --git a/test/esm-wrapper.js b/test/esm-wrapper.js new file mode 100644 index 0000000..a593fbd --- /dev/null +++ b/test/esm-wrapper.js @@ -0,0 +1,19 @@ +'use strict' +const { nodeMajor, nodeMinor } = require('../lib/core/util') + +if (!((nodeMajor > 14 || (nodeMajor === 14 && nodeMajor > 13)) || (nodeMajor === 12 && nodeMinor > 20))) { + require('tap') // shows skipped +} else { + ;(async () => { + try { + await import('./utils/esm-wrapper.mjs') + } catch (e) { + if (e.message === 'Not supported') { + require('tap') // shows skipped + return + } + console.error(e.stack) + process.exitCode = 1 + } + })() +} diff --git a/test/fetch/407-statuscode-window-null.js b/test/fetch/407-statuscode-window-null.js new file mode 100644 index 0000000..e22554f --- /dev/null +++ b/test/fetch/407-statuscode-window-null.js @@ -0,0 +1,20 @@ +'use strict' + +const { fetch } = require('../..') +const { createServer } = require('http') +const { once } = require('events') +const { test } = require('tap') + +test('Receiving a 407 status code w/ a window option present should reject', async (t) => { + const server = createServer((req, res) => { + res.statusCode = 407 + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + // if init.window exists, the spec tells us to set request.window to 'no-window', + // which later causes the request to be rejected if the status code is 407 + await t.rejects(fetch(`http://localhost:${server.address().port}`, { window: null })) +}) diff --git a/test/fetch/abort.js b/test/fetch/abort.js new file mode 100644 index 0000000..e1ca1eb --- /dev/null +++ b/test/fetch/abort.js @@ -0,0 +1,82 @@ +'use strict' + +const { test } = require('tap') +const { fetch } = require('../..') +const { createServer } = require('http') +const { once } = require('events') +const { DOMException } = require('../../lib/fetch/constants') +const { nodeMajor } = require('../../lib/core/util') + +const { AbortController: NPMAbortController } = require('abort-controller') + +test('Allow the usage of custom implementation of AbortController', async (t) => { + const body = { + fixes: 1605 + } + + const server = createServer((req, res) => { + res.statusCode = 200 + res.end(JSON.stringify(body)) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0) + await once(server, 'listening') + + const controller = new NPMAbortController() + const signal = controller.signal + controller.abort() + + try { + await fetch(`http://localhost:${server.address().port}`, { + signal + }) + } catch (e) { + t.equal(e.code, DOMException.ABORT_ERR) + } +}) + +test('allows aborting with custom errors', { skip: nodeMajor === 16 }, async (t) => { + const server = createServer().listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + t.test('Using AbortSignal.timeout with cause', async (t) => { + t.plan(2) + + try { + await fetch(`http://localhost:${server.address().port}`, { + signal: AbortSignal.timeout(50) + }) + t.fail('should throw') + } catch (err) { + if (err.name === 'TypeError') { + const cause = err.cause + t.equal(cause.name, 'HeadersTimeoutError') + t.equal(cause.code, 'UND_ERR_HEADERS_TIMEOUT') + } else if (err.name === 'TimeoutError') { + t.equal(err.code, DOMException.TIMEOUT_ERR) + t.equal(err.cause, undefined) + } else { + t.error(err) + } + } + }) + + t.test('Error defaults to an AbortError DOMException', async (t) => { + const ac = new AbortController() + ac.abort() // no reason + + await t.rejects( + fetch(`http://localhost:${server.address().port}`, { + signal: ac.signal + }), + { + name: 'AbortError', + code: DOMException.ABORT_ERR + } + ) + }) +}) diff --git a/test/fetch/abort2.js b/test/fetch/abort2.js new file mode 100644 index 0000000..5f3853b --- /dev/null +++ b/test/fetch/abort2.js @@ -0,0 +1,60 @@ +'use strict' + +const { test } = require('tap') +const { fetch } = require('../..') +const { createServer } = require('http') +const { once } = require('events') +const { DOMException } = require('../../lib/fetch/constants') + +/* global AbortController */ + +test('parallel fetch with the same AbortController works as expected', async (t) => { + const body = { + fixes: 1389, + bug: 'Ensure request is not aborted before enqueueing bytes into stream.' + } + + const server = createServer((req, res) => { + res.statusCode = 200 + res.end(JSON.stringify(body)) + }) + + t.teardown(server.close.bind(server)) + + const abortController = new AbortController() + + async function makeRequest () { + const result = await fetch(`http://localhost:${server.address().port}`, { + signal: abortController.signal + }).then(response => response.json()) + + abortController.abort() + return result + } + + server.listen(0) + await once(server, 'listening') + + const requests = Array.from({ length: 10 }, makeRequest) + const result = await Promise.allSettled(requests) + + // since the requests are running parallel, any of them could resolve first. + // therefore we cannot rely on the order of the requests sent. + const { resolved, rejected } = result.reduce((a, b) => { + if (b.status === 'rejected') { + a.rejected.push(b) + } else { + a.resolved.push(b) + } + + return a + }, { resolved: [], rejected: [] }) + + t.equal(rejected.length, 9) // out of 10 requests, only 1 should succeed + t.equal(resolved.length, 1) + + t.ok(rejected.every(rej => rej.reason?.code === DOMException.ABORT_ERR)) + t.same(resolved[0].value, body) + + t.end() +}) diff --git a/test/fetch/about-uri.js b/test/fetch/about-uri.js new file mode 100644 index 0000000..ac9cbf2 --- /dev/null +++ b/test/fetch/about-uri.js @@ -0,0 +1,21 @@ +'use strict' + +const { test } = require('tap') +const { fetch } = require('../..') + +test('fetching about: uris', async (t) => { + t.test('about:blank', async (t) => { + await t.rejects(fetch('about:blank')) + }) + + t.test('All other about: urls should return an error', async (t) => { + try { + await fetch('about:config') + t.fail('fetching about:config should fail') + } catch (e) { + t.ok(e, 'this error was expected') + } finally { + t.end() + } + }) +}) diff --git a/test/fetch/blob-uri.js b/test/fetch/blob-uri.js new file mode 100644 index 0000000..f9db96c --- /dev/null +++ b/test/fetch/blob-uri.js @@ -0,0 +1,100 @@ +'use strict' + +const { test } = require('tap') +const { fetch } = require('../..') +const { Blob } = require('buffer') + +test('fetching blob: uris', async (t) => { + const blobContents = 'hello world' + /** @type {import('buffer').Blob} */ + let blob + /** @type {string} */ + let objectURL + + t.beforeEach(() => { + blob = new Blob([blobContents]) + objectURL = URL.createObjectURL(blob) + }) + + t.test('a normal fetch request works', async (t) => { + const res = await fetch(objectURL) + + t.equal(blobContents, await res.text()) + t.equal(blob.type, res.headers.get('Content-Type')) + t.equal(`${blob.size}`, res.headers.get('Content-Length')) + t.end() + }) + + t.test('non-GET method to blob: fails', async (t) => { + try { + await fetch(objectURL, { + method: 'POST' + }) + t.fail('expected POST to blob: uri to fail') + } catch (e) { + t.ok(e, 'Got the expected error') + } finally { + t.end() + } + }) + + // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L36-L41 + t.test('fetching revoked URL should fail', async (t) => { + URL.revokeObjectURL(objectURL) + + try { + await fetch(objectURL) + t.fail('expected revoked blob: url to fail') + } catch (e) { + t.ok(e, 'Got the expected error') + } finally { + t.end() + } + }) + + // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L28-L34 + t.test('works with a fragment', async (t) => { + const res = await fetch(objectURL + '#fragment') + + t.equal(blobContents, await res.text()) + t.end() + }) + + // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56 + t.test('Appending a query string to blob: url should cause fetch to fail', async (t) => { + try { + await fetch(objectURL + '?querystring') + t.fail('expected ?querystring blob: url to fail') + } catch (e) { + t.ok(e, 'Got the expected error') + } finally { + t.end() + } + }) + + // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L58-L62 + t.test('Appending a path should cause fetch to fail', async (t) => { + try { + await fetch(objectURL + '/path') + t.fail('expected /path blob: url to fail') + } catch (e) { + t.ok(e, 'Got the expected error') + } finally { + t.end() + } + }) + + // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L64-L70 + t.test('these http methods should fail', async (t) => { + for (const method of ['HEAD', 'POST', 'DELETE', 'OPTIONS', 'PUT', 'CUSTOM']) { + try { + await fetch(objectURL, { method }) + t.fail(`${method} fetch should have failed`) + } catch (e) { + t.ok(e, `${method} blob url - test succeeded`) + } + } + + t.end() + }) +}) diff --git a/test/fetch/bundle.js b/test/fetch/bundle.js new file mode 100644 index 0000000..aa1257a --- /dev/null +++ b/test/fetch/bundle.js @@ -0,0 +1,41 @@ +'use strict' + +const { test, skip } = require('tap') +const { nodeMajor } = require('../../lib/core/util') + +if (nodeMajor === 16) { + skip('esbuild uses static blocks with --keep-names which node 16.8 does not have') + process.exit() +} + +const { Response, Request, FormData, Headers } = require('../../undici-fetch') + +test('bundle sets constructor.name and .name properly', (t) => { + t.equal(new Response().constructor.name, 'Response') + t.equal(Response.name, 'Response') + + t.equal(new Request('http://a').constructor.name, 'Request') + t.equal(Request.name, 'Request') + + t.equal(new Headers().constructor.name, 'Headers') + t.equal(Headers.name, 'Headers') + + t.equal(new FormData().constructor.name, 'FormData') + t.equal(FormData.name, 'FormData') + + t.end() +}) + +test('regression test for https://github.com/nodejs/node/issues/50263', (t) => { + const request = new Request('https://a', { + headers: { + test: 'abc' + }, + method: 'POST' + }) + + const request1 = new Request(request, { body: 'does not matter' }) + + t.equal(request1.headers.get('test'), 'abc') + t.end() +}) diff --git a/test/fetch/client-error-stack-trace.js b/test/fetch/client-error-stack-trace.js new file mode 100644 index 0000000..7d94aa8 --- /dev/null +++ b/test/fetch/client-error-stack-trace.js @@ -0,0 +1,21 @@ +'use strict' + +const { test } = require('tap') +const { fetch } = require('../..') +const { fetch: fetchIndex } = require('../../index-fetch') + +test('FETCH: request errors and prints trimmed stack trace', async (t) => { + try { + await fetch('http://a.com') + } catch (error) { + t.match(error.stack, `at Test. (${__filename}`) + } +}) + +test('FETCH-index: request errors and prints trimmed stack trace', async (t) => { + try { + await fetchIndex('http://a.com') + } catch (error) { + t.match(error.stack, `at Test. (${__filename}`) + } +}) diff --git a/test/fetch/client-fetch.js b/test/fetch/client-fetch.js new file mode 100644 index 0000000..9009d54 --- /dev/null +++ b/test/fetch/client-fetch.js @@ -0,0 +1,688 @@ +/* globals AbortController */ + +'use strict' + +const { test, teardown } = require('tap') +const { createServer } = require('http') +const { ReadableStream } = require('stream/web') +const { Blob } = require('buffer') +const { fetch, Response, Request, FormData, File } = require('../..') +const { Client, setGlobalDispatcher, Agent } = require('../..') +const { nodeMajor, nodeMinor } = require('../../lib/core/util') +const nodeFetch = require('../../index-fetch') +const { once } = require('events') +const { gzipSync } = require('zlib') +const { promisify } = require('util') +const { randomFillSync, createHash } = require('crypto') + +setGlobalDispatcher(new Agent({ + keepAliveTimeout: 1, + keepAliveMaxTimeout: 1 +})) + +test('function signature', (t) => { + t.plan(2) + + t.equal(fetch.name, 'fetch') + t.equal(fetch.length, 1) +}) + +test('args validation', async (t) => { + t.plan(2) + + await t.rejects(fetch(), TypeError) + await t.rejects(fetch('ftp://unsupported'), TypeError) +}) + +test('request json', (t) => { + t.plan(1) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const body = await fetch(`http://localhost:${server.address().port}`) + t.strictSame(obj, await body.json()) + }) +}) + +test('request text', (t) => { + t.plan(1) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const body = await fetch(`http://localhost:${server.address().port}`) + t.strictSame(JSON.stringify(obj), await body.text()) + }) +}) + +test('request arrayBuffer', (t) => { + t.plan(1) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const body = await fetch(`http://localhost:${server.address().port}`) + t.strictSame(Buffer.from(JSON.stringify(obj)), Buffer.from(await body.arrayBuffer())) + }) +}) + +test('should set type of blob object to the value of the `Content-Type` header from response', (t) => { + t.plan(1) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.setHeader('Content-Type', 'application/json') + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const response = await fetch(`http://localhost:${server.address().port}`) + t.equal('application/json', (await response.blob()).type) + }) +}) + +test('pre aborted with readable request body', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const ac = new AbortController() + ac.abort() + await fetch(`http://localhost:${server.address().port}`, { + signal: ac.signal, + method: 'POST', + body: new ReadableStream({ + async cancel (reason) { + t.equal(reason.name, 'AbortError') + } + }), + duplex: 'half' + }).catch(err => { + t.equal(err.name, 'AbortError') + }) + }) +}) + +test('pre aborted with closed readable request body', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const ac = new AbortController() + ac.abort() + const body = new ReadableStream({ + async start (c) { + t.pass() + c.close() + }, + async cancel (reason) { + t.fail() + } + }) + queueMicrotask(() => { + fetch(`http://localhost:${server.address().port}`, { + signal: ac.signal, + method: 'POST', + body, + duplex: 'half' + }).catch(err => { + t.equal(err.name, 'AbortError') + }) + }) + }) +}) + +test('unsupported formData 1', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'asdasdsad') + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + fetch(`http://localhost:${server.address().port}`) + .then(res => res.formData()) + .catch(err => { + t.equal(err.name, 'TypeError') + }) + }) +}) + +test('multipart formdata not base64', async (t) => { + t.plan(2) + // Construct example form data, with text and blob fields + const formData = new FormData() + formData.append('field1', 'value1') + const blob = new Blob(['example\ntext file'], { type: 'text/plain' }) + formData.append('field2', blob, 'file.txt') + + const tempRes = new Response(formData) + const boundary = tempRes.headers.get('content-type').split('boundary=')[1] + const formRaw = await tempRes.text() + + const server = createServer((req, res) => { + res.setHeader('content-type', 'multipart/form-data; boundary=' + boundary) + res.write(formRaw) + res.end() + }) + t.teardown(server.close.bind(server)) + + const listen = promisify(server.listen.bind(server)) + await listen(0) + + const res = await fetch(`http://localhost:${server.address().port}`) + const form = await res.formData() + t.equal(form.get('field1'), 'value1') + + const text = await form.get('field2').text() + t.equal(text, 'example\ntext file') +}) + +// TODO(@KhafraDev): re-enable this test once the issue is fixed +// See https://github.com/nodejs/node/issues/47301 +test('multipart formdata base64', { skip: nodeMajor >= 19 && nodeMinor >= 8 }, (t) => { + t.plan(1) + + // Example form data with base64 encoding + const data = randomFillSync(Buffer.alloc(256)) + const formRaw = `------formdata-undici-0.5786922755719377\r\nContent-Disposition: form-data; name="file"; filename="test.txt"\r\nContent-Type: application/octet-stream\r\nContent-Transfer-Encoding: base64\r\n\r\n${data.toString('base64')}\r\n------formdata-undici-0.5786922755719377--` + const server = createServer(async (req, res) => { + res.setHeader('content-type', 'multipart/form-data; boundary=----formdata-undici-0.5786922755719377') + + for (let offset = 0; offset < formRaw.length;) { + res.write(formRaw.slice(offset, offset += 2)) + await new Promise(resolve => setTimeout(resolve)) + } + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + fetch(`http://localhost:${server.address().port}`) + .then(res => res.formData()) + .then(form => form.get('file').arrayBuffer()) + .then(buffer => createHash('sha256').update(Buffer.from(buffer)).digest('base64')) + .then(digest => { + t.equal(createHash('sha256').update(data).digest('base64'), digest) + }) + }) +}) + +test('multipart fromdata non-ascii filed names', async (t) => { + t.plan(1) + + const request = new Request('http://localhost', { + method: 'POST', + headers: { + 'Content-Type': 'multipart/form-data; boundary=----formdata-undici-0.6204674738279623' + }, + body: + '------formdata-undici-0.6204674738279623\r\n' + + 'Content-Disposition: form-data; name="fiÅo"\r\n' + + '\r\n' + + 'value1\r\n' + + '------formdata-undici-0.6204674738279623--' + }) + + const form = await request.formData() + t.equal(form.get('fiÅo'), 'value1') +}) + +test('busboy emit error', async (t) => { + t.plan(1) + const formData = new FormData() + formData.append('field1', 'value1') + + const tempRes = new Response(formData) + const formRaw = await tempRes.text() + + const server = createServer((req, res) => { + res.setHeader('content-type', 'multipart/form-data; boundary=wrongboundary') + res.write(formRaw) + res.end() + }) + t.teardown(server.close.bind(server)) + + const listen = promisify(server.listen.bind(server)) + await listen(0) + + const res = await fetch(`http://localhost:${server.address().port}`) + await t.rejects(res.formData(), 'Unexpected end of multipart data') +}) + +// https://github.com/nodejs/undici/issues/2244 +test('parsing formData preserve full path on files', async (t) => { + t.plan(1) + const formData = new FormData() + formData.append('field1', new File(['foo'], 'a/b/c/foo.txt')) + + const tempRes = new Response(formData) + const form = await tempRes.formData() + + t.equal(form.get('field1').name, 'a/b/c/foo.txt') +}) + +test('urlencoded formData', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'application/x-www-form-urlencoded') + res.end('field1=value1&field2=value2') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + fetch(`http://localhost:${server.address().port}`) + .then(res => res.formData()) + .then(formData => { + t.equal(formData.get('field1'), 'value1') + t.equal(formData.get('field2'), 'value2') + }) + }) +}) + +test('text with BOM', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'application/x-www-form-urlencoded') + res.end('\uFEFFtest=\uFEFF') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + fetch(`http://localhost:${server.address().port}`) + .then(res => res.text()) + .then(text => { + t.equal(text, 'test=\uFEFF') + }) + }) +}) + +test('formData with BOM', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'application/x-www-form-urlencoded') + res.end('\uFEFFtest=\uFEFF') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + fetch(`http://localhost:${server.address().port}`) + .then(res => res.formData()) + .then(formData => { + t.equal(formData.get('\uFEFFtest'), '\uFEFF') + }) + }) +}) + +test('locked blob body', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const res = await fetch(`http://localhost:${server.address().port}`) + const reader = res.body.getReader() + res.blob().catch(err => { + t.equal(err.message, 'Body is unusable') + reader.cancel() + }) + }) +}) + +test('disturbed blob body', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const res = await fetch(`http://localhost:${server.address().port}`) + res.blob().then(() => { + t.pass(2) + }) + res.blob().catch(err => { + t.equal(err.message, 'Body is unusable') + }) + }) +}) + +test('redirect with body', (t) => { + t.plan(3) + + let count = 0 + const server = createServer(async (req, res) => { + let body = '' + for await (const chunk of req) { + body += chunk + } + t.equal(body, 'asd') + if (count++ === 0) { + res.setHeader('location', 'asd') + res.statusCode = 302 + res.end() + } else { + res.end(String(count)) + } + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const res = await fetch(`http://localhost:${server.address().port}`, { + method: 'PUT', + body: 'asd' + }) + t.equal(await res.text(), '2') + }) +}) + +test('redirect with stream', (t) => { + t.plan(3) + + const location = '/asd' + const body = 'hello!' + const server = createServer(async (req, res) => { + res.writeHead(302, { location }) + let count = 0 + const l = setInterval(() => { + res.write(body[count++]) + if (count === body.length) { + res.end() + clearInterval(l) + } + }, 50) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const res = await fetch(`http://localhost:${server.address().port}`, { + redirect: 'manual' + }) + t.equal(res.status, 302) + t.equal(res.headers.get('location'), location) + t.equal(await res.text(), body) + }) +}) + +test('fail to extract locked body', (t) => { + t.plan(1) + + const stream = new ReadableStream({}) + const reader = stream.getReader() + try { + // eslint-disable-next-line + new Response(stream) + } catch (err) { + t.equal(err.name, 'TypeError') + } + reader.cancel() +}) + +test('fail to extract locked body', (t) => { + t.plan(1) + + const stream = new ReadableStream({}) + const reader = stream.getReader() + try { + // eslint-disable-next-line + new Request('http://asd', { + method: 'PUT', + body: stream, + keepalive: true + }) + } catch (err) { + t.equal(err.message, 'keepalive') + } + reader.cancel() +}) + +test('post FormData with Blob', (t) => { + t.plan(1) + + const body = new FormData() + body.append('field1', new Blob(['asd1'])) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const res = await fetch(`http://localhost:${server.address().port}`, { + method: 'PUT', + body + }) + t.ok(/asd1/.test(await res.text())) + }) +}) + +test('post FormData with File', (t) => { + t.plan(2) + + const body = new FormData() + body.append('field1', new File(['asd1'], 'filename123')) + + const server = createServer((req, res) => { + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const res = await fetch(`http://localhost:${server.address().port}`, { + method: 'PUT', + body + }) + const result = await res.text() + t.ok(/asd1/.test(result)) + t.ok(/filename123/.test(result)) + }) +}) + +test('invalid url', async (t) => { + t.plan(1) + + try { + await fetch('http://invalid') + } catch (e) { + t.match(e.cause.message, 'invalid') + } +}) + +test('custom agent', (t) => { + t.plan(2) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const dispatcher = new Client('http://localhost:' + server.address().port, { + keepAliveTimeout: 1, + keepAliveMaxTimeout: 1 + }) + const oldDispatch = dispatcher.dispatch + dispatcher.dispatch = function (options, handler) { + t.pass('custom dispatcher') + return oldDispatch.call(this, options, handler) + } + t.teardown(server.close.bind(server)) + const body = await fetch(`http://localhost:${server.address().port}`, { + dispatcher + }) + t.strictSame(obj, await body.json()) + }) +}) + +test('custom agent node fetch', (t) => { + t.plan(2) + + const obj = { asd: true } + const server = createServer((req, res) => { + res.end(JSON.stringify(obj)) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const dispatcher = new Client('http://localhost:' + server.address().port, { + keepAliveTimeout: 1, + keepAliveMaxTimeout: 1 + }) + const oldDispatch = dispatcher.dispatch + dispatcher.dispatch = function (options, handler) { + t.pass('custom dispatcher') + return oldDispatch.call(this, options, handler) + } + t.teardown(server.close.bind(server)) + const body = await nodeFetch.fetch(`http://localhost:${server.address().port}`, { + dispatcher + }) + t.strictSame(obj, await body.json()) + }) +}) + +test('error on redirect', async (t) => { + const server = createServer((req, res) => { + res.statusCode = 302 + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const errorCause = await fetch(`http://localhost:${server.address().port}`, { + redirect: 'error' + }).catch((e) => e.cause) + + t.equal(errorCause.message, 'unexpected redirect') + }) +}) + +// https://github.com/nodejs/undici/issues/1527 +test('fetching with Request object - issue #1527', async (t) => { + const server = createServer((req, res) => { + t.pass() + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const body = JSON.stringify({ foo: 'bar' }) + const request = new Request(`http://localhost:${server.address().port}`, { + method: 'POST', + body + }) + + await t.resolves(fetch(request)) + t.end() +}) + +test('do not decode redirect body', (t) => { + t.plan(3) + + const obj = { asd: true } + const server = createServer((req, res) => { + if (req.url === '/resource') { + t.pass('we redirect') + res.statusCode = 301 + res.setHeader('location', '/resource/') + // Some dumb http servers set the content-encoding gzip + // even if there is no response + res.setHeader('content-encoding', 'gzip') + res.end() + return + } + t.pass('actual response') + res.setHeader('content-encoding', 'gzip') + res.end(gzipSync(JSON.stringify(obj))) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const body = await fetch(`http://localhost:${server.address().port}/resource`) + t.strictSame(JSON.stringify(obj), await body.text()) + }) +}) + +test('decode non-redirect body with location header', (t) => { + t.plan(2) + + const obj = { asd: true } + const server = createServer((req, res) => { + t.pass('response') + res.statusCode = 201 + res.setHeader('location', '/resource/') + res.setHeader('content-encoding', 'gzip') + res.end(gzipSync(JSON.stringify(obj))) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const body = await fetch(`http://localhost:${server.address().port}/resource`) + t.strictSame(JSON.stringify(obj), await body.text()) + }) +}) + +test('Receiving non-Latin1 headers', async (t) => { + const ContentDisposition = [ + 'inline; filename=rock&roll.png', + 'inline; filename="rock\'n\'roll.png"', + 'inline; filename="image â\x80\x94 copy (1).png"; filename*=UTF-8\'\'image%20%E2%80%94%20copy%20(1).png', + 'inline; filename="_Ã¥\x9C\x96ç\x89\x87_ð\x9F\x96¼_image_.png"; filename*=UTF-8\'\'_%E5%9C%96%E7%89%87_%F0%9F%96%BC_image_.png', + 'inline; filename="100 % loading&perf.png"; filename*=UTF-8\'\'100%20%25%20loading%26perf.png' + ] + + const server = createServer((req, res) => { + for (let i = 0; i < ContentDisposition.length; i++) { + res.setHeader(`Content-Disposition-${i + 1}`, ContentDisposition[i]) + } + + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const url = `http://localhost:${server.address().port}` + const response = await fetch(url, { method: 'HEAD' }) + const cdHeaders = [...response.headers] + .filter(([k]) => k.startsWith('content-disposition')) + .map(([, v]) => v) + const lengths = cdHeaders.map(h => h.length) + + t.same(cdHeaders, ContentDisposition) + t.same(lengths, [30, 34, 94, 104, 90]) + t.end() +}) + +teardown(() => process.exit()) diff --git a/test/fetch/client-node-max-header-size.js b/test/fetch/client-node-max-header-size.js new file mode 100644 index 0000000..737bae8 --- /dev/null +++ b/test/fetch/client-node-max-header-size.js @@ -0,0 +1,29 @@ +'use strict' + +const { execSync } = require('node:child_process') +const { test, skip } = require('tap') +const { nodeMajor } = require('../../lib/core/util') + +if (nodeMajor === 16) { + skip('esbuild uses static blocks with --keep-names which node 16.8 does not have') + process.exit() +} + +const command = 'node -e "require(\'./undici-fetch.js\').fetch(\'https://httpbin.org/get\')"' + +test("respect Node.js' --max-http-header-size", async (t) => { + t.throws( + // TODO: Drop the `--unhandled-rejections=throw` once we drop Node.js 14 + () => execSync(`${command} --max-http-header-size=1 --unhandled-rejections=throw`), + /UND_ERR_HEADERS_OVERFLOW/, + 'max-http-header-size=1 should throw' + ) + + t.doesNotThrow( + () => execSync(command), + /UND_ERR_HEADERS_OVERFLOW/, + 'default max-http-header-size should not throw' + ) + + t.end() +}) diff --git a/test/fetch/content-length.js b/test/fetch/content-length.js new file mode 100644 index 0000000..9264091 --- /dev/null +++ b/test/fetch/content-length.js @@ -0,0 +1,29 @@ +'use strict' + +const { test } = require('tap') +const { createServer } = require('http') +const { once } = require('events') +const { Blob } = require('buffer') +const { fetch, FormData } = require('../..') + +// https://github.com/nodejs/undici/issues/1783 +test('Content-Length is set when using a FormData body with fetch', async (t) => { + const server = createServer((req, res) => { + // TODO: check the length's value once the boundary has a fixed length + t.ok('content-length' in req.headers) // request has content-length header + t.ok(!Number.isNaN(Number(req.headers['content-length']))) + res.end() + }).listen(0) + + await once(server, 'listening') + t.teardown(server.close.bind(server)) + + const fd = new FormData() + fd.set('file', new Blob(['hello world 👋'], { type: 'text/plain' }), 'readme.md') + fd.set('string', 'some string value') + + await fetch(`http://localhost:${server.address().port}`, { + method: 'POST', + body: fd + }) +}) diff --git a/test/fetch/cookies.js b/test/fetch/cookies.js new file mode 100644 index 0000000..18b001d --- /dev/null +++ b/test/fetch/cookies.js @@ -0,0 +1,69 @@ +'use strict' + +const { once } = require('events') +const { createServer } = require('http') +const { test } = require('tap') +const { fetch, Headers } = require('../..') + +test('Can receive set-cookie headers from a server using fetch - issue #1262', async (t) => { + const server = createServer((req, res) => { + res.setHeader('set-cookie', 'name=value; Domain=example.com') + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const response = await fetch(`http://localhost:${server.address().port}`) + + t.equal(response.headers.get('set-cookie'), 'name=value; Domain=example.com') + + const response2 = await fetch(`http://localhost:${server.address().port}`, { + credentials: 'include' + }) + + t.equal(response2.headers.get('set-cookie'), 'name=value; Domain=example.com') + + t.end() +}) + +test('Can send cookies to a server with fetch - issue #1463', async (t) => { + const server = createServer((req, res) => { + t.equal(req.headers.cookie, 'value') + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const headersInit = [ + new Headers([['cookie', 'value']]), + { cookie: 'value' }, + [['cookie', 'value']] + ] + + for (const headers of headersInit) { + await fetch(`http://localhost:${server.address().port}`, { headers }) + } + + t.end() +}) + +test('Cookie header is delimited with a semicolon rather than a comma - issue #1905', async (t) => { + t.plan(1) + + const server = createServer((req, res) => { + t.equal(req.headers.cookie, 'FOO=lorem-ipsum-dolor-sit-amet; BAR=the-quick-brown-fox') + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + await fetch(`http://localhost:${server.address().port}`, { + headers: [ + ['cookie', 'FOO=lorem-ipsum-dolor-sit-amet'], + ['cookie', 'BAR=the-quick-brown-fox'] + ] + }) +}) diff --git a/test/fetch/data-uri.js b/test/fetch/data-uri.js new file mode 100644 index 0000000..6191bfe --- /dev/null +++ b/test/fetch/data-uri.js @@ -0,0 +1,214 @@ +'use strict' + +const { test } = require('tap') +const { + URLSerializer, + collectASequenceOfCodePoints, + stringPercentDecode, + parseMIMEType, + collectAnHTTPQuotedString +} = require('../../lib/fetch/dataURL') +const { fetch } = require('../..') + +test('https://url.spec.whatwg.org/#concept-url-serializer', (t) => { + t.test('url scheme gets appended', (t) => { + const url = new URL('https://www.google.com/') + const serialized = URLSerializer(url) + + t.ok(serialized.startsWith(url.protocol)) + t.end() + }) + + t.test('non-null url host with authentication', (t) => { + const url = new URL('https://username:password@google.com') + const serialized = URLSerializer(url) + + t.ok(serialized.includes(`//${url.username}:${url.password}`)) + t.ok(serialized.endsWith('@google.com/')) + t.end() + }) + + t.test('null url host', (t) => { + for (const url of ['web+demo:/.//not-a-host/', 'web+demo:/path/..//not-a-host/']) { + t.equal( + URLSerializer(new URL(url)), + 'web+demo:/.//not-a-host/' + ) + } + + t.end() + }) + + t.test('url with query works', (t) => { + t.equal( + URLSerializer(new URL('https://www.google.com/?fetch=undici')), + 'https://www.google.com/?fetch=undici' + ) + + t.end() + }) + + t.test('exclude fragment', (t) => { + t.equal( + URLSerializer(new URL('https://www.google.com/#frag')), + 'https://www.google.com/#frag' + ) + + t.equal( + URLSerializer(new URL('https://www.google.com/#frag'), true), + 'https://www.google.com/' + ) + + t.end() + }) + + t.end() +}) + +test('https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points', (t) => { + const input = 'text/plain;base64,' + const position = { position: 0 } + const result = collectASequenceOfCodePoints( + (char) => char !== ';', + input, + position + ) + + t.strictSame(result, 'text/plain') + t.strictSame(position.position, input.indexOf(';')) + t.end() +}) + +test('https://url.spec.whatwg.org/#string-percent-decode', (t) => { + t.test('encodes %{2} in range properly', (t) => { + const input = '%FF' + const percentDecoded = stringPercentDecode(input) + + t.same(percentDecoded, new Uint8Array([255])) + t.end() + }) + + t.test('encodes %{2} not in range properly', (t) => { + const input = 'Hello %XD World' + const percentDecoded = stringPercentDecode(input) + const expected = [...input].map(c => c.charCodeAt(0)) + + t.same(percentDecoded, expected) + t.end() + }) + + t.test('normal string works', (t) => { + const input = 'Hello world' + const percentDecoded = stringPercentDecode(input) + const expected = [...input].map(c => c.charCodeAt(0)) + + t.same(percentDecoded, Uint8Array.from(expected)) + t.end() + }) + + t.end() +}) + +test('https://mimesniff.spec.whatwg.org/#parse-a-mime-type', (t) => { + t.same(parseMIMEType('text/plain'), { + type: 'text', + subtype: 'plain', + parameters: new Map(), + essence: 'text/plain' + }) + + t.same(parseMIMEType('text/html;charset="shift_jis"iso-2022-jp'), { + type: 'text', + subtype: 'html', + parameters: new Map([['charset', 'shift_jis']]), + essence: 'text/html' + }) + + t.same(parseMIMEType('application/javascript'), { + type: 'application', + subtype: 'javascript', + parameters: new Map(), + essence: 'application/javascript' + }) + + t.end() +}) + +test('https://fetch.spec.whatwg.org/#collect-an-http-quoted-string', (t) => { + // https://fetch.spec.whatwg.org/#example-http-quoted-string + t.test('first', (t) => { + const position = { position: 0 } + + t.strictSame(collectAnHTTPQuotedString('"\\', { + position: 0 + }), '"\\') + t.strictSame(collectAnHTTPQuotedString('"\\', position, true), '\\') + t.strictSame(position.position, 2) + t.end() + }) + + t.test('second', (t) => { + const position = { position: 0 } + const input = '"Hello" World' + + t.strictSame(collectAnHTTPQuotedString(input, { + position: 0 + }), '"Hello"') + t.strictSame(collectAnHTTPQuotedString(input, position, true), 'Hello') + t.strictSame(position.position, 7) + t.end() + }) + + t.end() +}) + +// https://github.com/nodejs/undici/issues/1574 +test('too long base64 url', async (t) => { + const inputStr = 'a'.repeat(1 << 20) + const base64 = Buffer.from(inputStr).toString('base64') + const dataURIPrefix = 'data:application/octet-stream;base64,' + const dataURL = dataURIPrefix + base64 + try { + const res = await fetch(dataURL) + const buf = await res.arrayBuffer() + const outputStr = Buffer.from(buf).toString('ascii') + t.same(outputStr, inputStr) + } catch (e) { + t.fail(`failed to fetch ${dataURL}`) + } +}) + +test('https://domain.com/#', (t) => { + t.plan(1) + const domain = 'https://domain.com/#a' + const serialized = URLSerializer(new URL(domain)) + t.equal(serialized, domain) +}) + +test('https://domain.com/?', (t) => { + t.plan(1) + const domain = 'https://domain.com/?a=b' + const serialized = URLSerializer(new URL(domain)) + t.equal(serialized, domain) +}) + +// https://github.com/nodejs/undici/issues/2474 +test('hash url', (t) => { + t.plan(1) + const domain = 'https://domain.com/#a#b' + const url = new URL(domain) + const serialized = URLSerializer(url, true) + t.equal(serialized, url.href.substring(0, url.href.length - url.hash.length)) +}) + +// https://github.com/nodejs/undici/issues/2474 +test('data url that includes the hash', async (t) => { + t.plan(1) + const dataURL = 'data:,node#js#' + try { + const res = await fetch(dataURL) + t.equal(await res.text(), 'node') + } catch (error) { + t.fail(`failed to fetch ${dataURL}`) + } +}) diff --git a/test/fetch/encoding.js b/test/fetch/encoding.js new file mode 100644 index 0000000..75d8fc3 --- /dev/null +++ b/test/fetch/encoding.js @@ -0,0 +1,58 @@ +'use strict' + +const { test } = require('tap') +const { createServer } = require('http') +const { once } = require('events') +const { fetch } = require('../..') +const { createBrotliCompress, createGzip, createDeflate } = require('zlib') + +test('content-encoding header is case-iNsENsITIve', async (t) => { + const contentCodings = 'GZiP, bR' + const text = 'Hello, World!' + + const server = createServer((req, res) => { + const gzip = createGzip() + const brotli = createBrotliCompress() + + res.setHeader('Content-Encoding', contentCodings) + res.setHeader('Content-Type', 'text/plain') + + brotli.pipe(gzip).pipe(res) + + brotli.write(text) + brotli.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const response = await fetch(`http://localhost:${server.address().port}`) + + t.equal(await response.text(), text) + t.equal(response.headers.get('content-encoding'), contentCodings) +}) + +test('response decompression according to content-encoding should be handled in a correct order', async (t) => { + const contentCodings = 'deflate, gzip' + const text = 'Hello, World!' + + const server = createServer((req, res) => { + const gzip = createGzip() + const deflate = createDeflate() + + res.setHeader('Content-Encoding', contentCodings) + res.setHeader('Content-Type', 'text/plain') + + gzip.pipe(deflate).pipe(res) + + gzip.write(text) + gzip.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const response = await fetch(`http://localhost:${server.address().port}`) + + t.equal(await response.text(), text) +}) diff --git a/test/fetch/fetch-leak.js b/test/fetch/fetch-leak.js new file mode 100644 index 0000000..b8e6b16 --- /dev/null +++ b/test/fetch/fetch-leak.js @@ -0,0 +1,44 @@ +'use strict' + +const { test } = require('tap') +const { fetch } = require('../..') +const { createServer } = require('http') + +test('do not leak', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + let url + let done = false + server.listen(0, function attack () { + if (done) { + return + } + url ??= new URL(`http://127.0.0.1:${server.address().port}`) + const controller = new AbortController() + fetch(url, { signal: controller.signal }) + .then(res => res.arrayBuffer()) + .catch(() => {}) + .then(attack) + }) + + let prev = Infinity + let count = 0 + const interval = setInterval(() => { + done = true + global.gc() + const next = process.memoryUsage().heapUsed + if (next <= prev) { + t.pass() + } else if (count++ > 20) { + t.fail() + } else { + prev = next + } + }, 1e3) + t.teardown(() => clearInterval(interval)) +}) diff --git a/test/fetch/fetch-timeouts.js b/test/fetch/fetch-timeouts.js new file mode 100644 index 0000000..b659aaa --- /dev/null +++ b/test/fetch/fetch-timeouts.js @@ -0,0 +1,56 @@ +'use strict' + +const { test } = require('tap') + +const { fetch, Agent } = require('../..') +const timers = require('../../lib/timers') +const { createServer } = require('http') +const FakeTimers = require('@sinonjs/fake-timers') + +test('Fetch very long request, timeout overridden so no error', (t) => { + const minutes = 6 + const msToDelay = 1000 * 60 * minutes + + t.setTimeout(undefined) + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, msToDelay) + clock.tick(msToDelay + 1) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + fetch(`http://localhost:${server.address().port}`, { + path: '/', + method: 'GET', + dispatcher: new Agent({ + headersTimeout: 0, + connectTimeout: 0, + bodyTimeout: 0 + }) + }) + .then((response) => response.text()) + .then((response) => { + t.equal('hello', response) + t.end() + }) + .catch((err) => { + // This should not happen, a timeout error should not occur + t.error(err) + }) + + clock.tick(msToDelay - 1) + }) +}) diff --git a/test/fetch/file.js b/test/fetch/file.js new file mode 100644 index 0000000..5901541 --- /dev/null +++ b/test/fetch/file.js @@ -0,0 +1,190 @@ +'use strict' + +const { Blob } = require('buffer') +const { test } = require('tap') +const { File, FileLike } = require('../../lib/fetch/file') + +test('args validation', (t) => { + t.plan(14) + + t.throws(() => { + File.prototype.name.toString() + }, TypeError) + t.throws(() => { + File.prototype.lastModified.toString() + }, TypeError) + t.doesNotThrow(() => { + File.prototype[Symbol.toStringTag].charAt(0) + }, TypeError) + + t.throws(() => { + FileLike.prototype.stream.call(null) + }, TypeError) + t.throws(() => { + FileLike.prototype.arrayBuffer.call(null) + }, TypeError) + t.throws(() => { + FileLike.prototype.slice.call(null) + }, TypeError) + t.throws(() => { + FileLike.prototype.text.call(null) + }, TypeError) + t.throws(() => { + FileLike.prototype.size.toString() + }, TypeError) + t.throws(() => { + FileLike.prototype.type.toString() + }, TypeError) + t.throws(() => { + FileLike.prototype.name.toString() + }, TypeError) + t.throws(() => { + FileLike.prototype.lastModified.toString() + }, TypeError) + t.doesNotThrow(() => { + FileLike.prototype[Symbol.toStringTag].charAt(0) + }, TypeError) + + t.equal(File.prototype[Symbol.toStringTag], 'File') + t.equal(FileLike.prototype[Symbol.toStringTag], 'File') +}) + +test('return value of File.lastModified', (t) => { + t.plan(2) + + const f = new File(['asd1'], 'filename123') + const lastModified = f.lastModified + t.ok(typeof lastModified === typeof Date.now()) + t.ok(lastModified >= 0 && lastModified <= Date.now()) +}) + +test('Symbol.toStringTag', (t) => { + t.plan(2) + t.equal(new File([], '')[Symbol.toStringTag], 'File') + t.equal(new FileLike()[Symbol.toStringTag], 'File') +}) + +test('arguments', (t) => { + t.throws(() => { + new File() // eslint-disable-line no-new + }, TypeError) + + t.throws(() => { + new File([]) // eslint-disable-line no-new + }, TypeError) + + t.end() +}) + +test('lastModified', (t) => { + const file = new File([], '') + const lastModified = Date.now() - 69_000 + + t.notOk(file === 0) + + const file1 = new File([], '', { lastModified }) + t.equal(file1.lastModified, lastModified) + + t.equal(new File([], '', { lastModified: 0 }).lastModified, 0) + + t.equal( + new File([], '', { + lastModified: true + }).lastModified, + 1 + ) + + t.end() +}) + +test('File.prototype.text', async (t) => { + t.test('With Blob', async (t) => { + const blob1 = new Blob(['hello']) + const blob2 = new Blob([' ']) + const blob3 = new Blob(['world']) + + const file = new File([blob1, blob2, blob3], 'hello_world.txt') + + t.equal(await file.text(), 'hello world') + t.end() + }) + + /* eslint-disable camelcase */ + t.test('With TypedArray', async (t) => { + const uint8_1 = new Uint8Array(Buffer.from('hello')) + const uint8_2 = new Uint8Array(Buffer.from(' ')) + const uint8_3 = new Uint8Array(Buffer.from('world')) + + const file = new File([uint8_1, uint8_2, uint8_3], 'hello_world.txt') + + t.equal(await file.text(), 'hello world') + t.end() + }) + + t.test('With TypedArray range', async (t) => { + const uint8_1 = new Uint8Array(Buffer.from('hello world')) + const uint8_2 = new Uint8Array(uint8_1.buffer, 1, 4) + + const file = new File([uint8_2], 'hello_world.txt') + + t.equal(await file.text(), 'ello') + t.end() + }) + /* eslint-enable camelcase */ + + t.test('With ArrayBuffer', async (t) => { + const uint8 = new Uint8Array([65, 66, 67]) + const ab = uint8.buffer + + const file = new File([ab], 'file.txt') + + t.equal(await file.text(), 'ABC') + t.end() + }) + + t.test('With string', async (t) => { + const string = 'hello world' + const file = new File([string], 'hello_world.txt') + + t.equal(await file.text(), 'hello world') + t.end() + }) + + t.test('With Buffer', async (t) => { + const buffer = Buffer.from('hello world') + + const file = new File([buffer], 'hello_world.txt') + + t.equal(await file.text(), 'hello world') + t.end() + }) + + t.test('Mixed', async (t) => { + const blob = new Blob(['Hello, ']) + const uint8 = new Uint8Array(Buffer.from('world! This')) + const string = ' is a test! Hope it passes!' + + const file = new File([blob, uint8, string], 'mixed-messages.txt') + + t.equal( + await file.text(), + 'Hello, world! This is a test! Hope it passes!' + ) + t.end() + }) + + t.end() +}) + +test('endings=native', async (t) => { + const file = new File(['Hello\nWorld'], 'text.txt', { endings: 'native' }) + const text = await file.text() + + if (process.platform === 'win32') { + t.equal(text, 'Hello\r\nWorld', 'on windows, LF is replace with CRLF') + } else { + t.equal(text, 'Hello\nWorld', `on ${process.platform} LF stays LF`) + } + + t.end() +}) diff --git a/test/fetch/formdata.js b/test/fetch/formdata.js new file mode 100644 index 0000000..fed95bf --- /dev/null +++ b/test/fetch/formdata.js @@ -0,0 +1,401 @@ +'use strict' + +const { test } = require('tap') +const { FormData, File, Response } = require('../../') +const { Blob: ThirdPartyBlob } = require('formdata-node') +const { Blob } = require('buffer') +const { isFormDataLike } = require('../../lib/core/util') +const ThirdPartyFormDataInvalid = require('form-data') + +test('arg validation', (t) => { + const form = new FormData() + + // constructor + t.throws(() => { + // eslint-disable-next-line + new FormData('asd') + }, TypeError) + + // append + t.throws(() => { + FormData.prototype.append.call(null) + }, TypeError) + t.throws(() => { + form.append() + }, TypeError) + t.throws(() => { + form.append('k', 'not usv', '') + }, TypeError) + + // delete + t.throws(() => { + FormData.prototype.delete.call(null) + }, TypeError) + t.throws(() => { + form.delete() + }, TypeError) + + // get + t.throws(() => { + FormData.prototype.get.call(null) + }, TypeError) + t.throws(() => { + form.get() + }, TypeError) + + // getAll + t.throws(() => { + FormData.prototype.getAll.call(null) + }, TypeError) + t.throws(() => { + form.getAll() + }, TypeError) + + // has + t.throws(() => { + FormData.prototype.has.call(null) + }, TypeError) + t.throws(() => { + form.has() + }, TypeError) + + // set + t.throws(() => { + FormData.prototype.set.call(null) + }, TypeError) + t.throws(() => { + form.set('k') + }, TypeError) + t.throws(() => { + form.set('k', 'not usv', '') + }, TypeError) + + // iterator + t.throws(() => { + Reflect.apply(FormData.prototype[Symbol.iterator], null) + }, TypeError) + + // toStringTag + t.doesNotThrow(() => { + FormData.prototype[Symbol.toStringTag].charAt(0) + }) + + t.end() +}) + +test('append file', (t) => { + const form = new FormData() + form.set('asd', new File([], 'asd1', { type: 'text/plain' }), 'asd2') + form.append('asd2', new File([], 'asd1'), 'asd2') + + t.equal(form.has('asd'), true) + t.equal(form.has('asd2'), true) + t.equal(form.get('asd').name, 'asd2') + t.equal(form.get('asd2').name, 'asd2') + t.equal(form.get('asd').type, 'text/plain') + form.delete('asd') + t.equal(form.get('asd'), null) + t.equal(form.has('asd2'), true) + t.equal(form.has('asd'), false) + + t.end() +}) + +test('append blob', async (t) => { + const form = new FormData() + form.set('asd', new Blob(['asd1'], { type: 'text/plain' })) + + t.equal(form.has('asd'), true) + t.equal(form.get('asd').type, 'text/plain') + t.equal(await form.get('asd').text(), 'asd1') + form.delete('asd') + t.equal(form.get('asd'), null) + + t.end() +}) + +test('append third-party blob', async (t) => { + const form = new FormData() + form.set('asd', new ThirdPartyBlob(['asd1'], { type: 'text/plain' })) + + t.equal(form.has('asd'), true) + t.equal(form.get('asd').type, 'text/plain') + t.equal(await form.get('asd').text(), 'asd1') + form.delete('asd') + t.equal(form.get('asd'), null) + + t.end() +}) + +test('append string', (t) => { + const form = new FormData() + form.set('k1', 'v1') + form.set('k2', 'v2') + t.same([...form], [['k1', 'v1'], ['k2', 'v2']]) + t.equal(form.has('k1'), true) + t.equal(form.get('k1'), 'v1') + form.append('k1', 'v1+') + t.same(form.getAll('k1'), ['v1', 'v1+']) + form.set('k2', 'v1++') + t.equal(form.get('k2'), 'v1++') + form.delete('asd') + t.equal(form.get('asd'), null) + t.end() +}) + +test('formData.entries', (t) => { + t.plan(2) + const form = new FormData() + + t.test('with 0 entries', (t) => { + t.plan(1) + + const entries = [...form.entries()] + t.same(entries, []) + }) + + t.test('with 1+ entries', (t) => { + t.plan(2) + + form.set('k1', 'v1') + form.set('k2', 'v2') + + const entries = [...form.entries()] + const entries2 = [...form.entries()] + t.same(entries, [['k1', 'v1'], ['k2', 'v2']]) + t.same(entries, entries2) + }) +}) + +test('formData.keys', (t) => { + t.plan(2) + const form = new FormData() + + t.test('with 0 keys', (t) => { + t.plan(1) + + const keys = [...form.entries()] + t.same(keys, []) + }) + + t.test('with 1+ keys', (t) => { + t.plan(2) + + form.set('k1', 'v1') + form.set('k2', 'v2') + + const keys = [...form.keys()] + const keys2 = [...form.keys()] + t.same(keys, ['k1', 'k2']) + t.same(keys, keys2) + }) +}) + +test('formData.values', (t) => { + t.plan(2) + const form = new FormData() + + t.test('with 0 values', (t) => { + t.plan(1) + + const values = [...form.values()] + t.same(values, []) + }) + + t.test('with 1+ values', (t) => { + t.plan(2) + + form.set('k1', 'v1') + form.set('k2', 'v2') + + const values = [...form.values()] + const values2 = [...form.values()] + t.same(values, ['v1', 'v2']) + t.same(values, values2) + }) +}) + +test('formData forEach', (t) => { + t.test('invalid arguments', (t) => { + t.throws(() => { + FormData.prototype.forEach.call({}) + }, TypeError('Illegal invocation')) + + t.throws(() => { + const fd = new FormData() + + fd.forEach({}) + }, TypeError) + + t.end() + }) + + t.test('with a callback', (t) => { + const fd = new FormData() + + fd.set('a', 'b') + fd.set('c', 'd') + + let i = 0 + fd.forEach((value, key, self) => { + if (i++ === 0) { + t.equal(value, 'b') + t.equal(key, 'a') + } else { + t.equal(value, 'd') + t.equal(key, 'c') + } + + t.equal(fd, self) + }) + + t.end() + }) + + t.test('with a thisArg', (t) => { + const fd = new FormData() + fd.set('b', 'a') + + fd.forEach(function (value, key, self) { + t.equal(this, globalThis) + t.equal(fd, self) + t.equal(key, 'b') + t.equal(value, 'a') + }) + + const thisArg = Symbol('thisArg') + fd.forEach(function () { + t.equal(this, thisArg) + }, thisArg) + + t.end() + }) + + t.end() +}) + +test('formData toStringTag', (t) => { + const form = new FormData() + t.equal(form[Symbol.toStringTag], 'FormData') + t.equal(FormData.prototype[Symbol.toStringTag], 'FormData') + t.end() +}) + +test('formData.constructor.name', (t) => { + const form = new FormData() + t.equal(form.constructor.name, 'FormData') + t.end() +}) + +test('formData should be an instance of FormData', (t) => { + t.plan(3) + + t.test('Invalid class FormData', (t) => { + class FormData { + constructor () { + this.data = [] + } + + append (key, value) { + this.data.push([key, value]) + } + + get (key) { + return this.data.find(([k]) => k === key) + } + } + + const form = new FormData() + t.equal(isFormDataLike(form), false) + t.end() + }) + + t.test('Invalid function FormData', (t) => { + function FormData () { + const data = [] + return { + append (key, value) { + data.push([key, value]) + }, + get (key) { + return data.find(([k]) => k === key) + } + } + } + + const form = new FormData() + t.equal(isFormDataLike(form), false) + t.end() + }) + + test('Invalid third-party FormData', (t) => { + const form = new ThirdPartyFormDataInvalid() + t.equal(isFormDataLike(form), false) + t.end() + }) + + t.test('Valid FormData', (t) => { + const form = new FormData() + t.equal(isFormDataLike(form), true) + t.end() + }) +}) + +test('FormData should be compatible with third-party libraries', (t) => { + t.plan(1) + + class FormData { + constructor () { + this.data = [] + } + + get [Symbol.toStringTag] () { + return 'FormData' + } + + append () {} + delete () {} + get () {} + getAll () {} + has () {} + set () {} + entries () {} + keys () {} + values () {} + forEach () {} + } + + const form = new FormData() + t.equal(isFormDataLike(form), true) +}) + +test('arguments', (t) => { + t.equal(FormData.constructor.length, 1) + t.equal(FormData.prototype.append.length, 2) + t.equal(FormData.prototype.delete.length, 1) + t.equal(FormData.prototype.get.length, 1) + t.equal(FormData.prototype.getAll.length, 1) + t.equal(FormData.prototype.has.length, 1) + t.equal(FormData.prototype.set.length, 2) + + t.end() +}) + +// https://github.com/nodejs/undici/pull/1814 +test('FormData returned from bodyMixin.formData is not a clone', async (t) => { + const fd = new FormData() + fd.set('foo', 'bar') + + const res = new Response(fd) + fd.set('foo', 'foo') + + const fd2 = await res.formData() + + t.equal(fd2.get('foo'), 'bar') + t.equal(fd.get('foo'), 'foo') + + fd2.set('foo', 'baz') + + t.equal(fd2.get('foo'), 'baz') + t.equal(fd.get('foo'), 'foo') +}) diff --git a/test/fetch/general.js b/test/fetch/general.js new file mode 100644 index 0000000..0469875 --- /dev/null +++ b/test/fetch/general.js @@ -0,0 +1,30 @@ +'use strict' + +const { test } = require('tap') +const { + File, + FormData, + Headers, + Request, + Response +} = require('../../index') + +test('Symbol.toStringTag descriptor', (t) => { + for (const cls of [ + File, + FormData, + Headers, + Request, + Response + ]) { + const desc = Object.getOwnPropertyDescriptor(cls.prototype, Symbol.toStringTag) + t.same(desc, { + value: cls.name, + writable: false, + enumerable: false, + configurable: true + }) + } + + t.end() +}) diff --git a/test/fetch/headers.js b/test/fetch/headers.js new file mode 100644 index 0000000..4846110 --- /dev/null +++ b/test/fetch/headers.js @@ -0,0 +1,743 @@ +'use strict' + +const tap = require('tap') +const { Headers, fill } = require('../../lib/fetch/headers') +const { kGuard } = require('../../lib/fetch/symbols') +const { once } = require('events') +const { fetch } = require('../..') +const { createServer } = require('http') + +tap.test('Headers initialization', t => { + t.plan(8) + + t.test('allows undefined', t => { + t.plan(1) + + t.doesNotThrow(() => new Headers()) + }) + + t.test('with array of header entries', t => { + t.plan(3) + + t.test('fails on invalid array-based init', t => { + t.plan(3) + t.throws( + () => new Headers([['undici', 'fetch'], ['fetch']]), + TypeError('Headers constructor: expected name/value pair to be length 2, found 1.') + ) + t.throws(() => new Headers(['undici', 'fetch', 'fetch']), TypeError) + t.throws( + () => new Headers([0, 1, 2]), + TypeError('Sequence: Value of type Number is not an Object.') + ) + }) + + t.test('allows even length init', t => { + t.plan(1) + const init = [['undici', 'fetch'], ['fetch', 'undici']] + t.doesNotThrow(() => new Headers(init)) + }) + + t.test('fails for event flattened init', t => { + t.plan(1) + const init = ['undici', 'fetch', 'fetch', 'undici'] + t.throws( + () => new Headers(init), + TypeError('Sequence: Value of type String is not an Object.') + ) + }) + }) + + t.test('with object of header entries', t => { + t.plan(1) + const init = { + undici: 'fetch', + fetch: 'undici' + } + t.doesNotThrow(() => new Headers(init)) + }) + + t.test('fails silently if a boxed primitive object is passed', t => { + t.plan(3) + /* eslint-disable no-new-wrappers */ + t.doesNotThrow(() => new Headers(new Number())) + t.doesNotThrow(() => new Headers(new Boolean())) + t.doesNotThrow(() => new Headers(new String())) + /* eslint-enable no-new-wrappers */ + }) + + t.test('fails if primitive is passed', t => { + t.plan(2) + const expectedTypeError = TypeError + t.throws(() => new Headers(1), expectedTypeError) + t.throws(() => new Headers('1'), expectedTypeError) + }) + + t.test('allows some weird stuff (because of webidl)', t => { + t.doesNotThrow(() => { + new Headers(function () {}) // eslint-disable-line no-new + }) + + t.doesNotThrow(() => { + new Headers(Function) // eslint-disable-line no-new + }) + + t.end() + }) + + t.test('allows a myriad of header values to be passed', t => { + t.plan(4) + + // Headers constructor uses Headers.append + + t.doesNotThrow(() => new Headers([ + ['a', ['b', 'c']], + ['d', ['e', 'f']] + ]), 'allows any array values') + t.doesNotThrow(() => new Headers([ + ['key', null] + ]), 'allows null values') + t.throws(() => new Headers([ + ['key'] + ]), 'throws when 2 arguments are not passed') + t.throws(() => new Headers([ + ['key', 'value', 'value2'] + ]), 'throws when too many arguments are passed') + }) + + t.test('accepts headers as objects with array values', t => { + t.plan(1) + const headers = new Headers({ + c: '5', + b: ['3', '4'], + a: ['1', '2'] + }) + + t.same([...headers.entries()], [ + ['a', '1,2'], + ['b', '3,4'], + ['c', '5'] + ]) + }) +}) + +tap.test('Headers append', t => { + t.plan(3) + + t.test('adds valid header entry to instance', t => { + t.plan(2) + const headers = new Headers() + + const name = 'undici' + const value = 'fetch' + t.doesNotThrow(() => headers.append(name, value)) + t.equal(headers.get(name), value) + }) + + t.test('adds valid header to existing entry', t => { + t.plan(4) + const headers = new Headers() + + const name = 'undici' + const value1 = 'fetch1' + const value2 = 'fetch2' + const value3 = 'fetch3' + headers.append(name, value1) + t.equal(headers.get(name), value1) + t.doesNotThrow(() => headers.append(name, value2)) + t.doesNotThrow(() => headers.append(name, value3)) + t.equal(headers.get(name), [value1, value2, value3].join(', ')) + }) + + t.test('throws on invalid entry', t => { + t.plan(3) + const headers = new Headers() + + t.throws(() => headers.append(), 'throws on missing name and value') + t.throws(() => headers.append('undici'), 'throws on missing value') + t.throws(() => headers.append('invalid @ header ? name', 'valid value'), 'throws on invalid name') + }) +}) + +tap.test('Headers delete', t => { + t.plan(4) + + t.test('deletes valid header entry from instance', t => { + t.plan(3) + const headers = new Headers() + + const name = 'undici' + const value = 'fetch' + headers.append(name, value) + t.equal(headers.get(name), value) + t.doesNotThrow(() => headers.delete(name)) + t.equal(headers.get(name), null) + }) + + t.test('does not mutate internal list when no match is found', t => { + t.plan(3) + + const headers = new Headers() + const name = 'undici' + const value = 'fetch' + headers.append(name, value) + t.equal(headers.get(name), value) + t.doesNotThrow(() => headers.delete('not-undici')) + t.equal(headers.get(name), value) + }) + + t.test('throws on invalid entry', t => { + t.plan(2) + const headers = new Headers() + + t.throws(() => headers.delete(), 'throws on missing namee') + t.throws(() => headers.delete('invalid @ header ? name'), 'throws on invalid name') + }) + + // https://github.com/nodejs/undici/issues/2429 + t.test('`Headers#delete` returns undefined', t => { + t.plan(2) + const headers = new Headers({ test: 'test' }) + + t.same(headers.delete('test'), undefined) + t.same(headers.delete('test2'), undefined) + }) +}) + +tap.test('Headers get', t => { + t.plan(3) + + t.test('returns null if not found in instance', t => { + t.plan(1) + const headers = new Headers() + headers.append('undici', 'fetch') + + t.equal(headers.get('not-undici'), null) + }) + + t.test('returns header values from valid header name', t => { + t.plan(2) + const headers = new Headers() + + const name = 'undici'; const value1 = 'fetch1'; const value2 = 'fetch2' + headers.append(name, value1) + t.equal(headers.get(name), value1) + headers.append(name, value2) + t.equal(headers.get(name), [value1, value2].join(', ')) + }) + + t.test('throws on invalid entry', t => { + t.plan(2) + const headers = new Headers() + + t.throws(() => headers.get(), 'throws on missing name') + t.throws(() => headers.get('invalid @ header ? name'), 'throws on invalid name') + }) +}) + +tap.test('Headers has', t => { + t.plan(2) + + t.test('returns boolean existence for a header name', t => { + t.plan(2) + const headers = new Headers() + + const name = 'undici' + headers.append('not-undici', 'fetch') + t.equal(headers.has(name), false) + headers.append(name, 'fetch') + t.equal(headers.has(name), true) + }) + + t.test('throws on invalid entry', t => { + t.plan(2) + const headers = new Headers() + + t.throws(() => headers.has(), 'throws on missing name') + t.throws(() => headers.has('invalid @ header ? name'), 'throws on invalid name') + }) +}) + +tap.test('Headers set', t => { + t.plan(5) + + t.test('sets valid header entry to instance', t => { + t.plan(2) + const headers = new Headers() + + const name = 'undici' + const value = 'fetch' + headers.append('not-undici', 'fetch') + t.doesNotThrow(() => headers.set(name, value)) + t.equal(headers.get(name), value) + }) + + t.test('overwrites existing entry', t => { + t.plan(4) + const headers = new Headers() + + const name = 'undici' + const value1 = 'fetch1' + const value2 = 'fetch2' + t.doesNotThrow(() => headers.set(name, value1)) + t.equal(headers.get(name), value1) + t.doesNotThrow(() => headers.set(name, value2)) + t.equal(headers.get(name), value2) + }) + + t.test('allows setting a myriad of values', t => { + t.plan(4) + const headers = new Headers() + + t.doesNotThrow(() => headers.set('a', ['b', 'c']), 'sets array values properly') + t.doesNotThrow(() => headers.set('b', null), 'allows setting null values') + t.throws(() => headers.set('c'), 'throws when 2 arguments are not passed') + t.doesNotThrow(() => headers.set('c', 'd', 'e'), 'ignores extra arguments') + }) + + t.test('throws on invalid entry', t => { + t.plan(3) + const headers = new Headers() + + t.throws(() => headers.set(), 'throws on missing name and value') + t.throws(() => headers.set('undici'), 'throws on missing value') + t.throws(() => headers.set('invalid @ header ? name', 'valid value'), 'throws on invalid name') + }) + + // https://github.com/nodejs/undici/issues/2431 + t.test('`Headers#set` returns undefined', t => { + t.plan(2) + const headers = new Headers() + + t.same(headers.set('a', 'b'), undefined) + + t.notOk(headers.set('c', 'd') instanceof Map) + }) +}) + +tap.test('Headers forEach', t => { + const headers = new Headers([['a', 'b'], ['c', 'd']]) + + t.test('standard', t => { + t.equal(typeof headers.forEach, 'function') + + headers.forEach((value, key, headerInstance) => { + t.ok(value === 'b' || value === 'd') + t.ok(key === 'a' || key === 'c') + t.equal(headers, headerInstance) + }) + + t.end() + }) + + t.test('when no thisArg is set, it is globalThis', (t) => { + headers.forEach(function () { + t.equal(this, globalThis) + }) + + t.end() + }) + + t.test('with thisArg', t => { + const thisArg = { a: Math.random() } + headers.forEach(function () { + t.equal(this, thisArg) + }, thisArg) + + t.end() + }) + + t.end() +}) + +tap.test('Headers as Iterable', t => { + t.plan(7) + + t.test('should freeze values while iterating', t => { + t.plan(1) + const init = [ + ['foo', '123'], + ['bar', '456'] + ] + const expected = [ + ['foo', '123'], + ['x-x-bar', '456'] + ] + const headers = new Headers(init) + for (const [key, val] of headers) { + headers.delete(key) + headers.set(`x-${key}`, val) + } + t.strictSame([...headers], expected) + }) + + t.test('returns combined and sorted entries using .forEach()', t => { + t.plan(8) + const init = [ + ['a', '1'], + ['b', '2'], + ['c', '3'], + ['abc', '4'], + ['b', '5'] + ] + const expected = [ + ['a', '1'], + ['abc', '4'], + ['b', '2, 5'], + ['c', '3'] + ] + const headers = new Headers(init) + const that = {} + let i = 0 + headers.forEach(function (value, key, _headers) { + t.strictSame(expected[i++], [key, value]) + t.equal(this, that) + }, that) + }) + + t.test('returns combined and sorted entries using .entries()', t => { + t.plan(4) + const init = [ + ['a', '1'], + ['b', '2'], + ['c', '3'], + ['abc', '4'], + ['b', '5'] + ] + const expected = [ + ['a', '1'], + ['abc', '4'], + ['b', '2, 5'], + ['c', '3'] + ] + const headers = new Headers(init) + let i = 0 + for (const header of headers.entries()) { + t.strictSame(header, expected[i++]) + } + }) + + t.test('returns combined and sorted keys using .keys()', t => { + t.plan(4) + const init = [ + ['a', '1'], + ['b', '2'], + ['c', '3'], + ['abc', '4'], + ['b', '5'] + ] + const expected = ['a', 'abc', 'b', 'c'] + const headers = new Headers(init) + let i = 0 + for (const key of headers.keys()) { + t.strictSame(key, expected[i++]) + } + }) + + t.test('returns combined and sorted values using .values()', t => { + t.plan(4) + const init = [ + ['a', '1'], + ['b', '2'], + ['c', '3'], + ['abc', '4'], + ['b', '5'] + ] + const expected = ['1', '4', '2, 5', '3'] + const headers = new Headers(init) + let i = 0 + for (const value of headers.values()) { + t.strictSame(value, expected[i++]) + } + }) + + t.test('returns combined and sorted entries using for...of loop', t => { + t.plan(5) + const init = [ + ['a', '1'], + ['b', '2'], + ['c', '3'], + ['abc', '4'], + ['b', '5'], + ['d', ['6', '7']] + ] + const expected = [ + ['a', '1'], + ['abc', '4'], + ['b', '2, 5'], + ['c', '3'], + ['d', '6,7'] + ] + let i = 0 + for (const header of new Headers(init)) { + t.strictSame(header, expected[i++]) + } + }) + + t.test('validate append ordering', t => { + t.plan(1) + const headers = new Headers([['b', '2'], ['c', '3'], ['e', '5']]) + headers.append('d', '4') + headers.append('a', '1') + headers.append('f', '6') + headers.append('c', '7') + headers.append('abc', '8') + + const expected = [...new Map([ + ['a', '1'], + ['abc', '8'], + ['b', '2'], + ['c', '3, 7'], + ['d', '4'], + ['e', '5'], + ['f', '6'] + ])] + + t.same([...headers], expected) + }) +}) + +tap.test('arg validation', (t) => { + // fill + t.throws(() => { + fill({}, 0) + }, TypeError) + + const headers = new Headers() + + // constructor + t.throws(() => { + // eslint-disable-next-line + new Headers(0) + }, TypeError) + + // get [Symbol.toStringTag] + t.doesNotThrow(() => { + Object.prototype.toString.call(Headers.prototype) + }) + + // toString + t.doesNotThrow(() => { + Headers.prototype.toString.call(null) + }) + + // append + t.throws(() => { + Headers.prototype.append.call(null) + }, TypeError) + t.throws(() => { + headers.append() + }, TypeError) + + // delete + t.throws(() => { + Headers.prototype.delete.call(null) + }, TypeError) + t.throws(() => { + headers.delete() + }, TypeError) + + // get + t.throws(() => { + Headers.prototype.get.call(null) + }, TypeError) + t.throws(() => { + headers.get() + }, TypeError) + + // has + t.throws(() => { + Headers.prototype.has.call(null) + }, TypeError) + t.throws(() => { + headers.has() + }, TypeError) + + // set + t.throws(() => { + Headers.prototype.set.call(null) + }, TypeError) + t.throws(() => { + headers.set() + }, TypeError) + + // forEach + t.throws(() => { + Headers.prototype.forEach.call(null) + }, TypeError) + t.throws(() => { + headers.forEach() + }, TypeError) + t.throws(() => { + headers.forEach(1) + }, TypeError) + + // inspect + t.throws(() => { + Headers.prototype[Symbol.for('nodejs.util.inspect.custom')].call(null) + }, TypeError) + + t.end() +}) + +tap.test('function signature verification', (t) => { + t.test('function length', (t) => { + t.equal(Headers.prototype.append.length, 2) + t.equal(Headers.prototype.constructor.length, 0) + t.equal(Headers.prototype.delete.length, 1) + t.equal(Headers.prototype.entries.length, 0) + t.equal(Headers.prototype.forEach.length, 1) + t.equal(Headers.prototype.get.length, 1) + t.equal(Headers.prototype.has.length, 1) + t.equal(Headers.prototype.keys.length, 0) + t.equal(Headers.prototype.set.length, 2) + t.equal(Headers.prototype.values.length, 0) + t.equal(Headers.prototype[Symbol.iterator].length, 0) + t.equal(Headers.prototype.toString.length, 0) + + t.end() + }) + + t.test('function equality', (t) => { + t.equal(Headers.prototype.entries, Headers.prototype[Symbol.iterator]) + t.equal(Headers.prototype.toString, Object.prototype.toString) + + t.end() + }) + + t.test('toString and Symbol.toStringTag', (t) => { + t.equal(Object.prototype.toString.call(Headers.prototype), '[object Headers]') + t.equal(Headers.prototype[Symbol.toStringTag], 'Headers') + t.equal(Headers.prototype.toString.call(null), '[object Null]') + + t.end() + }) + + t.end() +}) + +tap.test('various init paths of Headers', (t) => { + const h1 = new Headers() + const h2 = new Headers({}) + const h3 = new Headers(undefined) + t.equal([...h1.entries()].length, 0) + t.equal([...h2.entries()].length, 0) + t.equal([...h3.entries()].length, 0) + + t.end() +}) + +tap.test('immutable guard', (t) => { + const headers = new Headers() + headers.set('key', 'val') + headers[kGuard] = 'immutable' + + t.throws(() => { + headers.set('asd', 'asd') + }) + t.throws(() => { + headers.append('asd', 'asd') + }) + t.throws(() => { + headers.delete('asd') + }) + t.equal(headers.get('key'), 'val') + t.equal(headers.has('key'), true) + + t.end() +}) + +tap.test('request-no-cors guard', (t) => { + const headers = new Headers() + headers[kGuard] = 'request-no-cors' + t.doesNotThrow(() => { headers.set('key', 'val') }) + t.doesNotThrow(() => { headers.append('key', 'val') }) + t.doesNotThrow(() => { headers.delete('key') }) + t.end() +}) + +tap.test('invalid headers', (t) => { + t.doesNotThrow(() => new Headers({ "abcdefghijklmnopqrstuvwxyz0123456789!#$%&'*+-.^_`|~": 'test' })) + + const chars = '"(),/:;<=>?@[\\]{}'.split('') + + for (const char of chars) { + t.throws(() => new Headers({ [char]: 'test' }), TypeError, `The string "${char}" should throw an error.`) + } + + for (const byte of ['\r', '\n', '\t', ' ', String.fromCharCode(128), '']) { + t.throws(() => { + new Headers().set(byte, 'test') + }, TypeError, 'invalid header name') + } + + for (const byte of [ + '\0', + '\r', + '\n' + ]) { + t.throws(() => { + new Headers().set('a', `a${byte}b`) + }, TypeError, 'not allowed at all in header value') + } + + t.doesNotThrow(() => { + new Headers().set('a', '\r') + }) + + t.doesNotThrow(() => { + new Headers().set('a', '\n') + }) + + t.throws(() => { + new Headers().set('a', Symbol('symbol')) + }, TypeError, 'symbols should throw') + + t.end() +}) + +tap.test('headers that might cause a ReDoS', (t) => { + t.doesNotThrow(() => { + // This test will time out if the ReDoS attack is successful. + const headers = new Headers() + const attack = 'a' + '\t'.repeat(500_000) + '\ta' + headers.append('fhqwhgads', attack) + }) + + t.end() +}) + +tap.test('Headers.prototype.getSetCookie', (t) => { + t.test('Mutating the returned list does not affect the set-cookie list', (t) => { + const h = new Headers([ + ['set-cookie', 'a=b'], + ['set-cookie', 'c=d'] + ]) + + const old = h.getSetCookie() + h.getSetCookie().push('oh=no') + const now = h.getSetCookie() + + t.same(old, now) + t.end() + }) + + // https://github.com/nodejs/undici/issues/1935 + t.test('When Headers are cloned, so are the cookies', async (t) => { + const server = createServer((req, res) => { + res.setHeader('Set-Cookie', 'test=onetwo') + res.end('Hello World!') + }).listen(0) + + await once(server, 'listening') + t.teardown(server.close.bind(server)) + + const res = await fetch(`http://localhost:${server.address().port}`) + const entries = Object.fromEntries(res.headers.entries()) + + t.same(res.headers.getSetCookie(), ['test=onetwo']) + t.ok('set-cookie' in entries) + }) + + t.end() +}) diff --git a/test/fetch/http2.js b/test/fetch/http2.js new file mode 100644 index 0000000..9f6997f --- /dev/null +++ b/test/fetch/http2.js @@ -0,0 +1,415 @@ +'use strict' + +const { createSecureServer } = require('node:http2') +const { createReadStream, readFileSync } = require('node:fs') +const { once } = require('node:events') +const { Blob } = require('node:buffer') +const { Readable } = require('node:stream') + +const { test, plan } = require('tap') +const pem = require('https-pem') + +const { Client, fetch, Headers } = require('../..') + +const nodeVersion = Number(process.version.split('v')[1].split('.')[0]) + +plan(7) + +test('[Fetch] Issue#2311', async t => { + const expectedBody = 'hello from client!' + + const server = createSecureServer(pem, async (req, res) => { + let body = '' + + req.setEncoding('utf8') + + res.writeHead(200, { + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': req.headers['x-my-header'] + }) + + for await (const chunk of req) { + body += chunk + } + + res.end(body) + }) + + t.plan(1) + + server.listen() + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + const response = await fetch( + `https://localhost:${server.address().port}/`, + // Needs to be passed to disable the reject unauthorized + { + method: 'POST', + dispatcher: client, + headers: { + 'x-my-header': 'foo', + 'content-type': 'text-plain' + }, + body: expectedBody + } + ) + + const responseBody = await response.text() + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + t.equal(responseBody, expectedBody) +}) + +test('[Fetch] Simple GET with h2', async t => { + const server = createSecureServer(pem) + const expectedRequestBody = 'hello h2!' + + server.on('stream', async (stream, headers) => { + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': headers['x-my-header'], + 'x-method': headers[':method'], + ':status': 200 + }) + + stream.end(expectedRequestBody) + }) + + t.plan(5) + + server.listen() + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + const response = await fetch( + `https://localhost:${server.address().port}/`, + // Needs to be passed to disable the reject unauthorized + { + method: 'GET', + dispatcher: client, + headers: { + 'x-my-header': 'foo', + 'content-type': 'text-plain' + } + } + ) + + const responseBody = await response.text() + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + t.equal(responseBody, expectedRequestBody) + t.equal(response.headers.get('x-method'), 'GET') + t.equal(response.headers.get('x-custom-h2'), 'foo') + // https://github.com/nodejs/undici/issues/2415 + t.throws(() => { + response.headers.get(':status') + }, TypeError) + + // See https://fetch.spec.whatwg.org/#concept-response-status-message + t.equal(response.statusText, '') +}) + +test('[Fetch] Should handle h2 request with body (string or buffer)', async t => { + const server = createSecureServer(pem) + const expectedBody = 'hello from client!' + const expectedRequestBody = 'hello h2!' + const requestBody = [] + + server.on('stream', async (stream, headers) => { + stream.on('data', chunk => requestBody.push(chunk)) + + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': headers['x-my-header'], + ':status': 200 + }) + + stream.end(expectedRequestBody) + }) + + t.plan(2) + + server.listen() + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + const response = await fetch( + `https://localhost:${server.address().port}/`, + // Needs to be passed to disable the reject unauthorized + { + method: 'POST', + dispatcher: client, + headers: { + 'x-my-header': 'foo', + 'content-type': 'text-plain' + }, + body: expectedBody + } + ) + + const responseBody = await response.text() + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + t.equal(Buffer.concat(requestBody).toString('utf-8'), expectedBody) + t.equal(responseBody, expectedRequestBody) +}) + +// Skipping for now, there is something odd in the way the body is handled +test( + '[Fetch] Should handle h2 request with body (stream)', + { skip: nodeVersion === 16 }, + async t => { + const server = createSecureServer(pem) + const expectedBody = readFileSync(__filename, 'utf-8') + const stream = createReadStream(__filename) + const requestChunks = [] + + server.on('stream', async (stream, headers) => { + t.equal(headers[':method'], 'PUT') + t.equal(headers[':path'], '/') + t.equal(headers[':scheme'], 'https') + + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': headers['x-my-header'], + ':status': 200 + }) + + for await (const chunk of stream) { + requestChunks.push(chunk) + } + + stream.end('hello h2!') + }) + + t.plan(8) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await fetch( + `https://localhost:${server.address().port}/`, + // Needs to be passed to disable the reject unauthorized + { + method: 'PUT', + dispatcher: client, + headers: { + 'x-my-header': 'foo', + 'content-type': 'text-plain' + }, + body: Readable.toWeb(stream), + duplex: 'half' + } + ) + + const responseBody = await response.text() + + t.equal(response.status, 200) + t.equal(response.headers.get('content-type'), 'text/plain; charset=utf-8') + t.equal(response.headers.get('x-custom-h2'), 'foo') + t.equal(responseBody, 'hello h2!') + t.equal(Buffer.concat(requestChunks).toString('utf-8'), expectedBody) + } +) +test('Should handle h2 request with body (Blob)', { skip: !Blob }, async t => { + const server = createSecureServer(pem) + const expectedBody = 'asd' + const requestChunks = [] + const body = new Blob(['asd'], { + type: 'text/plain' + }) + + server.on('stream', async (stream, headers) => { + t.equal(headers[':method'], 'POST') + t.equal(headers[':path'], '/') + t.equal(headers[':scheme'], 'https') + + stream.on('data', chunk => requestChunks.push(chunk)) + + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': headers['x-my-header'], + ':status': 200 + }) + + stream.end('hello h2!') + }) + + t.plan(8) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await fetch( + `https://localhost:${server.address().port}/`, + // Needs to be passed to disable the reject unauthorized + { + body, + method: 'POST', + dispatcher: client, + headers: { + 'x-my-header': 'foo', + 'content-type': 'text-plain' + } + } + ) + + const responseBody = await response.arrayBuffer() + + t.equal(response.status, 200) + t.equal(response.headers.get('content-type'), 'text/plain; charset=utf-8') + t.equal(response.headers.get('x-custom-h2'), 'foo') + t.same(new TextDecoder().decode(responseBody).toString(), 'hello h2!') + t.equal(Buffer.concat(requestChunks).toString('utf-8'), expectedBody) +}) + +test( + 'Should handle h2 request with body (Blob:ArrayBuffer)', + { skip: !Blob }, + async t => { + const server = createSecureServer(pem) + const expectedBody = 'hello' + const requestChunks = [] + const expectedResponseBody = { hello: 'h2' } + const buf = Buffer.from(expectedBody) + const body = new ArrayBuffer(buf.byteLength) + + buf.copy(new Uint8Array(body)) + + server.on('stream', async (stream, headers) => { + t.equal(headers[':method'], 'PUT') + t.equal(headers[':path'], '/') + t.equal(headers[':scheme'], 'https') + + stream.on('data', chunk => requestChunks.push(chunk)) + + stream.respond({ + 'content-type': 'application/json', + 'x-custom-h2': headers['x-my-header'], + ':status': 200 + }) + + stream.end(JSON.stringify(expectedResponseBody)) + }) + + t.plan(8) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await fetch( + `https://localhost:${server.address().port}/`, + // Needs to be passed to disable the reject unauthorized + { + body, + method: 'PUT', + dispatcher: client, + headers: { + 'x-my-header': 'foo', + 'content-type': 'text-plain' + } + } + ) + + const responseBody = await response.json() + + t.equal(response.status, 200) + t.equal(response.headers.get('content-type'), 'application/json') + t.equal(response.headers.get('x-custom-h2'), 'foo') + t.same(responseBody, expectedResponseBody) + t.equal(Buffer.concat(requestChunks).toString('utf-8'), expectedBody) + } +) + +test('Issue#2415', async (t) => { + t.plan(1) + const server = createSecureServer(pem) + + server.on('stream', async (stream, headers) => { + stream.respond({ + ':status': 200 + }) + stream.end('test') + }) + + server.listen() + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + const response = await fetch( + `https://localhost:${server.address().port}/`, + // Needs to be passed to disable the reject unauthorized + { + method: 'GET', + dispatcher: client + } + ) + + await response.text() + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + t.doesNotThrow(() => new Headers(response.headers)) +}) diff --git a/test/fetch/integrity.js b/test/fetch/integrity.js new file mode 100644 index 0000000..f91f693 --- /dev/null +++ b/test/fetch/integrity.js @@ -0,0 +1,150 @@ +'use strict' + +const { test } = require('tap') +const { createServer } = require('http') +const { createHash, getHashes } = require('crypto') +const { gzipSync } = require('zlib') +const { fetch, setGlobalDispatcher, Agent } = require('../..') +const { once } = require('events') + +const supportedHashes = getHashes() + +setGlobalDispatcher(new Agent({ + keepAliveTimeout: 1, + keepAliveMaxTimeout: 1 +})) + +test('request with correct integrity checksum', (t) => { + const body = 'Hello world!' + const hash = createHash('sha256').update(body).digest('base64') + + const server = createServer((req, res) => { + res.end(body) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const response = await fetch(`http://localhost:${server.address().port}`, { + integrity: `sha256-${hash}` + }) + t.strictSame(body, await response.text()) + t.end() + }) +}) + +test('request with wrong integrity checksum', (t) => { + const body = 'Hello world!' + const hash = 'c0535e4be2b79ffd93291305436bf889314e4a3faec05ecffcbb7df31ad9e51b' + + const server = createServer((req, res) => { + res.end(body) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + fetch(`http://localhost:${server.address().port}`, { + integrity: `sha256-${hash}` + }).then(response => { + t.pass('request did not fail') + }).catch((err) => { + t.equal(err.cause.message, 'integrity mismatch') + }).finally(() => { + t.end() + }) + }) +}) + +test('request with integrity checksum on encoded body', (t) => { + const body = 'Hello world!' + const hash = createHash('sha256').update(body).digest('base64') + + const server = createServer((req, res) => { + res.setHeader('content-encoding', 'gzip') + res.end(gzipSync(body)) + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const response = await fetch(`http://localhost:${server.address().port}`, { + integrity: `sha256-${hash}` + }) + t.strictSame(body, await response.text()) + t.end() + }) +}) + +test('request with a totally incorrect integrity', async (t) => { + const server = createServer((req, res) => { + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + await t.resolves(fetch(`http://localhost:${server.address().port}`, { + integrity: 'what-integrityisthis' + })) +}) + +test('request with mixed in/valid integrities', async (t) => { + const body = 'Hello world!' + const hash = createHash('sha256').update(body).digest('base64') + + const server = createServer((req, res) => { + res.end(body) + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + await t.resolves(fetch(`http://localhost:${server.address().port}`, { + integrity: `invalid-integrity sha256-${hash}` + })) +}) + +test('request with sha384 hash', { skip: !supportedHashes.includes('sha384') }, async (t) => { + const body = 'Hello world!' + const hash = createHash('sha384').update(body).digest('base64') + + const server = createServer((req, res) => { + res.end(body) + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + // request should succeed + await t.resolves(fetch(`http://localhost:${server.address().port}`, { + integrity: `sha384-${hash}` + })) + + // request should fail + await t.rejects(fetch(`http://localhost:${server.address().port}`, { + integrity: 'sha384-ypeBEsobvcr6wjGzmiPcTaeG7/gUfE5yuYB3ha/uSLs=' + })) +}) + +test('request with sha512 hash', { skip: !supportedHashes.includes('sha512') }, async (t) => { + const body = 'Hello world!' + const hash = createHash('sha512').update(body).digest('base64') + + const server = createServer((req, res) => { + res.end(body) + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + // request should succeed + await t.resolves(fetch(`http://localhost:${server.address().port}`, { + integrity: `sha512-${hash}` + })) + + // request should fail + await t.rejects(fetch(`http://localhost:${server.address().port}`, { + integrity: 'sha512-ypeBEsobvcr6wjGzmiPcTaeG7/gUfE5yuYB3ha/uSLs=' + })) +}) diff --git a/test/fetch/issue-1447.js b/test/fetch/issue-1447.js new file mode 100644 index 0000000..503b344 --- /dev/null +++ b/test/fetch/issue-1447.js @@ -0,0 +1,46 @@ +'use strict' + +const { test, skip } = require('tap') +const { nodeMajor } = require('../../lib/core/util') + +if (nodeMajor === 16) { + skip('esbuild uses static blocks with --keep-names which node 16.8 does not have') + process.exit() +} + +const undici = require('../..') +const { fetch: theoreticalGlobalFetch } = require('../../undici-fetch') + +test('Mocking works with both fetches', async (t) => { + const mockAgent = new undici.MockAgent() + const body = JSON.stringify({ foo: 'bar' }) + + mockAgent.disableNetConnect() + undici.setGlobalDispatcher(mockAgent) + const pool = mockAgent.get('https://example.com') + + pool.intercept({ + path: '/path', + method: 'POST', + body (bodyString) { + t.equal(bodyString, body) + return true + } + }).reply(200, { ok: 1 }).times(2) + + const url = new URL('https://example.com/path').href + + // undici fetch from node_modules + await undici.fetch(url, { + method: 'POST', + body + }) + + // the global fetch bundled with esbuild + await theoreticalGlobalFetch(url, { + method: 'POST', + body + }) + + t.end() +}) diff --git a/test/fetch/issue-2009.js b/test/fetch/issue-2009.js new file mode 100644 index 0000000..0b7b3e9 --- /dev/null +++ b/test/fetch/issue-2009.js @@ -0,0 +1,28 @@ +'use strict' + +const { test } = require('tap') +const { fetch } = require('../..') +const { createServer } = require('http') +const { once } = require('events') + +test('issue 2009', async (t) => { + const server = createServer((req, res) => { + res.setHeader('a', 'b') + res.flushHeaders() + + res.socket.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + for (let i = 0; i < 10; i++) { + await t.resolves( + fetch(`http://localhost:${server.address().port}`).then( + async (resp) => { + await resp.body.cancel('Some message') + } + ) + ) + } +}) diff --git a/test/fetch/issue-2021.js b/test/fetch/issue-2021.js new file mode 100644 index 0000000..cd28a71 --- /dev/null +++ b/test/fetch/issue-2021.js @@ -0,0 +1,32 @@ +'use strict' + +const { test } = require('tap') +const { once } = require('events') +const { createServer } = require('http') +const { fetch } = require('../..') + +// https://github.com/nodejs/undici/issues/2021 +test('content-length header is removed on redirect', async (t) => { + const server = createServer((req, res) => { + if (req.url === '/redirect') { + res.writeHead(302, { Location: '/redirect2' }) + res.end() + return + } + + res.end() + }).listen(0).unref() + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const body = 'a+b+c' + + await t.resolves(fetch(`http://localhost:${server.address().port}/redirect`, { + method: 'POST', + body, + headers: { + 'content-length': Buffer.byteLength(body) + } + })) +}) diff --git a/test/fetch/issue-2171.js b/test/fetch/issue-2171.js new file mode 100644 index 0000000..b04ae0e --- /dev/null +++ b/test/fetch/issue-2171.js @@ -0,0 +1,25 @@ +'use strict' + +const { fetch } = require('../..') +const { DOMException } = require('../../lib/fetch/constants') +const { once } = require('events') +const { createServer } = require('http') +const { test } = require('tap') + +test('error reason is forwarded - issue #2171', { skip: !AbortSignal.timeout }, async (t) => { + const server = createServer(() => {}).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const timeout = AbortSignal.timeout(100) + await t.rejects( + fetch(`http://localhost:${server.address().port}`, { + signal: timeout + }), + { + name: 'TimeoutError', + code: DOMException.TIMEOUT_ERR + } + ) +}) diff --git a/test/fetch/issue-2242.js b/test/fetch/issue-2242.js new file mode 100644 index 0000000..fe70412 --- /dev/null +++ b/test/fetch/issue-2242.js @@ -0,0 +1,8 @@ +'use strict' + +const { test } = require('tap') +const { fetch } = require('../..') + +test('fetch with signal already aborted', async (t) => { + await t.rejects(fetch('http://localhost', { signal: AbortSignal.abort('Already aborted') }), 'Already aborted') +}) diff --git a/test/fetch/issue-2318.js b/test/fetch/issue-2318.js new file mode 100644 index 0000000..e4f610d --- /dev/null +++ b/test/fetch/issue-2318.js @@ -0,0 +1,25 @@ +'use strict' + +const { test } = require('tap') +const { once } = require('events') +const { createServer } = require('http') +const { fetch } = require('../..') + +test('Undici overrides user-provided `Host` header', async (t) => { + t.plan(1) + + const server = createServer((req, res) => { + t.equal(req.headers.host, `localhost:${server.address().port}`) + + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + await fetch(`http://localhost:${server.address().port}`, { + headers: { + host: 'www.idk.org' + } + }) +}) diff --git a/test/fetch/issue-node-46525.js b/test/fetch/issue-node-46525.js new file mode 100644 index 0000000..6fd9810 --- /dev/null +++ b/test/fetch/issue-node-46525.js @@ -0,0 +1,28 @@ +'use strict' + +const { once } = require('events') +const { createServer } = require('http') +const { test } = require('tap') +const { fetch } = require('../..') + +// https://github.com/nodejs/node/issues/46525 +test('No warning when reusing AbortController', async (t) => { + function onWarning (error) { + t.error(error, 'Got warning') + } + + const server = createServer((req, res) => res.end()).listen(0) + + await once(server, 'listening') + + process.on('warning', onWarning) + t.teardown(() => { + process.off('warning', onWarning) + return server.close() + }) + + const controller = new AbortController() + for (let i = 0; i < 15; i++) { + await fetch(`http://localhost:${server.address().port}`, { signal: controller.signal }) + } +}) diff --git a/test/fetch/iterators.js b/test/fetch/iterators.js new file mode 100644 index 0000000..6c6761d --- /dev/null +++ b/test/fetch/iterators.js @@ -0,0 +1,140 @@ +'use strict' + +const { test } = require('tap') +const { Headers, FormData } = require('../..') + +test('Implements " Iterator" properly', (t) => { + t.test('all Headers iterators implement Headers Iterator', (t) => { + const headers = new Headers([['a', 'b'], ['c', 'd']]) + + for (const iterable of ['keys', 'values', 'entries', Symbol.iterator]) { + const gen = headers[iterable]() + // https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object + const IteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())) + const iteratorProto = Object.getPrototypeOf(gen) + + t.ok(gen.constructor === Object) + t.ok(gen.prototype === undefined) + // eslint-disable-next-line no-proto + t.equal(gen.__proto__[Symbol.toStringTag], 'Headers Iterator') + // https://github.com/node-fetch/node-fetch/issues/1119#issuecomment-100222049 + t.notOk(Headers.prototype[iterable] instanceof function * () {}.constructor) + // eslint-disable-next-line no-proto + t.ok(gen.__proto__.next.__proto__ === Function.prototype) + // https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object + // "The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%." + t.equal(gen[Symbol.iterator], IteratorPrototype[Symbol.iterator]) + t.equal(Object.getPrototypeOf(iteratorProto), IteratorPrototype) + } + + t.end() + }) + + t.test('all FormData iterators implement FormData Iterator', (t) => { + const fd = new FormData() + + for (const iterable of ['keys', 'values', 'entries', Symbol.iterator]) { + const gen = fd[iterable]() + // https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object + const IteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())) + const iteratorProto = Object.getPrototypeOf(gen) + + t.ok(gen.constructor === Object) + t.ok(gen.prototype === undefined) + // eslint-disable-next-line no-proto + t.equal(gen.__proto__[Symbol.toStringTag], 'FormData Iterator') + // https://github.com/node-fetch/node-fetch/issues/1119#issuecomment-100222049 + t.notOk(Headers.prototype[iterable] instanceof function * () {}.constructor) + // eslint-disable-next-line no-proto + t.ok(gen.__proto__.next.__proto__ === Function.prototype) + // https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object + // "The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%." + t.equal(gen[Symbol.iterator], IteratorPrototype[Symbol.iterator]) + t.equal(Object.getPrototypeOf(iteratorProto), IteratorPrototype) + } + + t.end() + }) + + t.test('Iterator symbols are properly set', (t) => { + t.test('Headers', (t) => { + const headers = new Headers([['a', 'b'], ['c', 'd']]) + const gen = headers.entries() + + t.equal(typeof gen[Symbol.toStringTag], 'string') + t.equal(typeof gen[Symbol.iterator], 'function') + t.end() + }) + + t.test('FormData', (t) => { + const fd = new FormData() + const gen = fd.entries() + + t.equal(typeof gen[Symbol.toStringTag], 'string') + t.equal(typeof gen[Symbol.iterator], 'function') + t.end() + }) + + t.end() + }) + + t.test('Iterator does not inherit Generator prototype methods', (t) => { + t.test('Headers', (t) => { + const headers = new Headers([['a', 'b'], ['c', 'd']]) + const gen = headers.entries() + + t.equal(gen.return, undefined) + t.equal(gen.throw, undefined) + t.equal(typeof gen.next, 'function') + + t.end() + }) + + t.test('FormData', (t) => { + const fd = new FormData() + const gen = fd.entries() + + t.equal(gen.return, undefined) + t.equal(gen.throw, undefined) + t.equal(typeof gen.next, 'function') + + t.end() + }) + + t.end() + }) + + t.test('Symbol.iterator', (t) => { + // Headers + const headerValues = new Headers([['a', 'b']]).entries()[Symbol.iterator]() + t.same(Array.from(headerValues), [['a', 'b']]) + + // FormData + const formdata = new FormData() + formdata.set('a', 'b') + const formdataValues = formdata.entries()[Symbol.iterator]() + t.same(Array.from(formdataValues), [['a', 'b']]) + + t.end() + }) + + t.test('brand check', (t) => { + // Headers + t.throws(() => { + const gen = new Headers().entries() + // eslint-disable-next-line no-proto + gen.__proto__.next() + }, TypeError) + + // FormData + t.throws(() => { + const gen = new FormData().entries() + // eslint-disable-next-line no-proto + gen.__proto__.next() + }, TypeError) + + t.end() + }) + + t.end() +}) diff --git a/test/fetch/jsdom-abortcontroller-1910-1464495619.js b/test/fetch/jsdom-abortcontroller-1910-1464495619.js new file mode 100644 index 0000000..e5a86ab --- /dev/null +++ b/test/fetch/jsdom-abortcontroller-1910-1464495619.js @@ -0,0 +1,26 @@ +'use strict' + +const { test } = require('tap') +const { createServer } = require('http') +const { once } = require('events') +const { fetch } = require('../..') +const { JSDOM } = require('jsdom') + +// https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619 +test('third party AbortControllers', async (t) => { + const server = createServer((_, res) => res.end()).listen(0) + + const { AbortController } = new JSDOM().window + let controller = new AbortController() + + t.teardown(() => { + controller.abort() + controller = null + return server.close() + }) + await once(server, 'listening') + + await t.resolves(fetch(`http://localhost:${server.address().port}`, { + signal: controller.signal + })) +}) diff --git a/test/fetch/redirect-cross-origin-header.js b/test/fetch/redirect-cross-origin-header.js new file mode 100644 index 0000000..fca48c4 --- /dev/null +++ b/test/fetch/redirect-cross-origin-header.js @@ -0,0 +1,48 @@ +'use strict' + +const { test } = require('tap') +const { createServer } = require('http') +const { once } = require('events') +const { fetch } = require('../..') + +test('Cross-origin redirects clear forbidden headers', async (t) => { + t.plan(5) + + const server1 = createServer((req, res) => { + t.equal(req.headers.cookie, undefined) + t.equal(req.headers.authorization, undefined) + + res.end('redirected') + }).listen(0) + + const server2 = createServer((req, res) => { + t.equal(req.headers.authorization, 'test') + t.equal(req.headers.cookie, 'ddd=dddd') + + res.writeHead(302, { + ...req.headers, + Location: `http://localhost:${server1.address().port}` + }) + res.end() + }).listen(0) + + t.teardown(() => { + server1.close() + server2.close() + }) + + await Promise.all([ + once(server1, 'listening'), + once(server2, 'listening') + ]) + + const res = await fetch(`http://localhost:${server2.address().port}`, { + headers: { + Authorization: 'test', + Cookie: 'ddd=dddd' + } + }) + + const text = await res.text() + t.equal(text, 'redirected') +}) diff --git a/test/fetch/redirect.js b/test/fetch/redirect.js new file mode 100644 index 0000000..7e3681b --- /dev/null +++ b/test/fetch/redirect.js @@ -0,0 +1,50 @@ +'use strict' + +const { test } = require('tap') +const { createServer } = require('http') +const { once } = require('events') +const { fetch } = require('../..') + +// https://github.com/nodejs/undici/issues/1776 +test('Redirecting with a body does not cancel the current request - #1776', async (t) => { + const server = createServer((req, res) => { + if (req.url === '/redirect') { + res.statusCode = 301 + res.setHeader('location', '/redirect/') + res.write('Moved Permanently') + setTimeout(() => res.end(), 500) + return + } + + res.write(req.url) + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const resp = await fetch(`http://localhost:${server.address().port}/redirect`) + t.equal(await resp.text(), '/redirect/') + t.ok(resp.redirected) +}) + +test('Redirecting with an empty body does not throw an error - #2027', async (t) => { + const server = createServer((req, res) => { + if (req.url === '/redirect') { + res.statusCode = 307 + res.setHeader('location', '/redirect/') + res.write('Moved Permanently') + res.end() + return + } + res.write(req.url) + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const resp = await fetch(`http://localhost:${server.address().port}/redirect`, { method: 'PUT', body: '' }) + t.equal(await resp.text(), '/redirect/') + t.ok(resp.redirected) +}) diff --git a/test/fetch/relative-url.js b/test/fetch/relative-url.js new file mode 100644 index 0000000..1a4f819 --- /dev/null +++ b/test/fetch/relative-url.js @@ -0,0 +1,110 @@ +'use strict' + +const { test, afterEach } = require('tap') +const { createServer } = require('http') +const { once } = require('events') +const { + getGlobalOrigin, + setGlobalOrigin, + Response, + Request, + fetch +} = require('../..') + +afterEach(() => setGlobalOrigin(undefined)) + +test('setGlobalOrigin & getGlobalOrigin', (t) => { + t.equal(getGlobalOrigin(), undefined) + + setGlobalOrigin('http://localhost:3000') + t.same(getGlobalOrigin(), new URL('http://localhost:3000')) + + setGlobalOrigin(undefined) + t.equal(getGlobalOrigin(), undefined) + + setGlobalOrigin(new URL('http://localhost:3000')) + t.same(getGlobalOrigin(), new URL('http://localhost:3000')) + + t.throws(() => { + setGlobalOrigin('invalid.url') + }, TypeError) + + t.throws(() => { + setGlobalOrigin('wss://invalid.protocol') + }, TypeError) + + t.throws(() => setGlobalOrigin(true)) + + t.end() +}) + +test('Response.redirect', (t) => { + t.throws(() => { + Response.redirect('/relative/path', 302) + }, TypeError('Failed to parse URL from /relative/path')) + + t.doesNotThrow(() => { + setGlobalOrigin('http://localhost:3000') + Response.redirect('/relative/path', 302) + }) + + setGlobalOrigin('http://localhost:3000') + const response = Response.redirect('/relative/path', 302) + // See step #7 of https://fetch.spec.whatwg.org/#dom-response-redirect + t.equal(response.headers.get('location'), 'http://localhost:3000/relative/path') + + t.end() +}) + +test('new Request', (t) => { + t.throws( + () => new Request('/relative/path'), + TypeError('Failed to parse URL from /relative/path') + ) + + t.doesNotThrow(() => { + setGlobalOrigin('http://localhost:3000') + // eslint-disable-next-line no-new + new Request('/relative/path') + }) + + setGlobalOrigin('http://localhost:3000') + const request = new Request('/relative/path') + t.equal(request.url, 'http://localhost:3000/relative/path') + + t.end() +}) + +test('fetch', async (t) => { + await t.rejects(async () => { + await fetch('/relative/path') + }, TypeError('Failed to parse URL from /relative/path')) + + t.test('Basic fetch', async (t) => { + const server = createServer((req, res) => { + t.equal(req.url, '/relative/path') + res.end() + }).listen(0) + + setGlobalOrigin(`http://localhost:${server.address().port}`) + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + await t.resolves(fetch('/relative/path')) + }) + + t.test('fetch return', async (t) => { + const server = createServer((req, res) => { + t.equal(req.url, '/relative/path') + res.end() + }).listen(0) + + setGlobalOrigin(`http://localhost:${server.address().port}`) + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const response = await fetch('/relative/path') + + t.equal(response.url, `http://localhost:${server.address().port}/relative/path`) + }) +}) diff --git a/test/fetch/request.js b/test/fetch/request.js new file mode 100644 index 0000000..db2c8e8 --- /dev/null +++ b/test/fetch/request.js @@ -0,0 +1,514 @@ +/* globals AbortController */ + +'use strict' + +const { test, teardown } = require('tap') +const { + Request, + Headers, + fetch +} = require('../../') +const { + Blob: ThirdPartyBlob, + FormData: ThirdPartyFormData +} = require('formdata-node') + +const hasSignalReason = 'reason' in AbortSignal.prototype + +test('arg validation', async (t) => { + // constructor + t.throws(() => { + // eslint-disable-next-line + new Request() + }, TypeError) + t.throws(() => { + // eslint-disable-next-line + new Request('http://asd', 0) + }, TypeError) + t.throws(() => { + const url = new URL('http://asd') + url.password = 'asd' + // eslint-disable-next-line + new Request(url) + }, TypeError) + t.throws(() => { + const url = new URL('http://asd') + url.username = 'asd' + // eslint-disable-next-line + new Request(url) + }, TypeError) + t.doesNotThrow(() => { + // eslint-disable-next-line + new Request('http://asd', undefined) + }, TypeError) + t.throws(() => { + // eslint-disable-next-line + new Request('http://asd', { + window: {} + }) + }, TypeError) + t.throws(() => { + // eslint-disable-next-line + new Request('http://asd', { + window: 1 + }) + }, TypeError) + t.throws(() => { + // eslint-disable-next-line + new Request('http://asd', { + mode: 'navigate' + }) + }) + + t.throws(() => { + // eslint-disable-next-line + new Request('http://asd', { + referrerPolicy: 'agjhagna' + }) + }, TypeError) + + t.throws(() => { + // eslint-disable-next-line + new Request('http://asd', { + mode: 'agjhagna' + }) + }, TypeError) + + t.throws(() => { + // eslint-disable-next-line + new Request('http://asd', { + credentials: 'agjhagna' + }) + }, TypeError) + + t.throws(() => { + // eslint-disable-next-line + new Request('http://asd', { + cache: 'agjhagna' + }) + }, TypeError) + + t.throws(() => { + // eslint-disable-next-line + new Request('http://asd', { + method: 'agjhagnaöööö' + }) + }, TypeError) + + t.throws(() => { + // eslint-disable-next-line + new Request('http://asd', { + method: 'TRACE' + }) + }, TypeError) + + t.throws(() => { + Request.prototype.destination.toString() + }, TypeError) + + t.throws(() => { + Request.prototype.referrer.toString() + }, TypeError) + + t.throws(() => { + Request.prototype.referrerPolicy.toString() + }, TypeError) + + t.throws(() => { + Request.prototype.mode.toString() + }, TypeError) + + t.throws(() => { + Request.prototype.credentials.toString() + }, TypeError) + + t.throws(() => { + Request.prototype.cache.toString() + }, TypeError) + + t.throws(() => { + Request.prototype.redirect.toString() + }, TypeError) + + t.throws(() => { + Request.prototype.integrity.toString() + }, TypeError) + + t.throws(() => { + Request.prototype.keepalive.toString() + }, TypeError) + + t.throws(() => { + Request.prototype.isReloadNavigation.toString() + }, TypeError) + + t.throws(() => { + Request.prototype.isHistoryNavigation.toString() + }, TypeError) + + t.throws(() => { + Request.prototype.signal.toString() + }, TypeError) + + t.throws(() => { + // eslint-disable-next-line no-unused-expressions + Request.prototype.body + }, TypeError) + + t.throws(() => { + // eslint-disable-next-line no-unused-expressions + Request.prototype.bodyUsed + }, TypeError) + + t.throws(() => { + Request.prototype.clone.call(null) + }, TypeError) + + t.doesNotThrow(() => { + Request.prototype[Symbol.toStringTag].charAt(0) + }) + + for (const method of [ + 'text', + 'json', + 'arrayBuffer', + 'blob', + 'formData' + ]) { + await t.rejects(async () => { + await new Request('http://localhost')[method].call({ + blob () { + return { + text () { + return Promise.resolve('emulating this') + } + } + } + }) + }, TypeError) + } + + t.end() +}) + +test('undefined window', t => { + t.doesNotThrow(() => new Request('http://asd', { window: undefined })) + t.end() +}) + +test('undefined body', t => { + const req = new Request('http://asd', { body: undefined }) + t.equal(req.body, null) + t.end() +}) + +test('undefined method', t => { + const req = new Request('http://asd', { method: undefined }) + t.equal(req.method, 'GET') + t.end() +}) + +test('undefined headers', t => { + const req = new Request('http://asd', { headers: undefined }) + t.equal([...req.headers.entries()].length, 0) + t.end() +}) + +test('undefined referrer', t => { + const req = new Request('http://asd', { referrer: undefined }) + t.equal(req.referrer, 'about:client') + t.end() +}) + +test('undefined referrerPolicy', t => { + const req = new Request('http://asd', { referrerPolicy: undefined }) + t.equal(req.referrerPolicy, '') + t.end() +}) + +test('undefined mode', t => { + const req = new Request('http://asd', { mode: undefined }) + t.equal(req.mode, 'cors') + t.end() +}) + +test('undefined credentials', t => { + const req = new Request('http://asd', { credentials: undefined }) + t.equal(req.credentials, 'same-origin') + t.end() +}) + +test('undefined cache', t => { + const req = new Request('http://asd', { cache: undefined }) + t.equal(req.cache, 'default') + t.end() +}) + +test('undefined redirect', t => { + const req = new Request('http://asd', { redirect: undefined }) + t.equal(req.redirect, 'follow') + t.end() +}) + +test('undefined keepalive', t => { + const req = new Request('http://asd', { keepalive: undefined }) + t.equal(req.keepalive, false) + t.end() +}) + +test('undefined integrity', t => { + const req = new Request('http://asd', { integrity: undefined }) + t.equal(req.integrity, '') + t.end() +}) + +test('null integrity', t => { + const req = new Request('http://asd', { integrity: null }) + t.equal(req.integrity, 'null') + t.end() +}) + +test('undefined signal', t => { + const req = new Request('http://asd', { signal: undefined }) + t.equal(req.signal.aborted, false) + t.end() +}) + +test('pre aborted signal', t => { + const ac = new AbortController() + ac.abort('gwak') + const req = new Request('http://asd', { signal: ac.signal }) + t.equal(req.signal.aborted, true) + if (hasSignalReason) { + t.equal(req.signal.reason, 'gwak') + } + t.end() +}) + +test('post aborted signal', t => { + t.plan(2) + + const ac = new AbortController() + const req = new Request('http://asd', { signal: ac.signal }) + t.equal(req.signal.aborted, false) + ac.signal.addEventListener('abort', () => { + if (hasSignalReason) { + t.equal(req.signal.reason, 'gwak') + } else { + t.pass() + } + }, { once: true }) + ac.abort('gwak') +}) + +test('pre aborted signal cloned', t => { + const ac = new AbortController() + ac.abort('gwak') + const req = new Request('http://asd', { signal: ac.signal }).clone() + t.equal(req.signal.aborted, true) + if (hasSignalReason) { + t.equal(req.signal.reason, 'gwak') + } + t.end() +}) + +test('URLSearchParams body with Headers object - issue #1407', async (t) => { + const body = new URLSearchParams({ + abc: 123 + }) + + const request = new Request( + 'http://localhost', + { + method: 'POST', + body, + headers: { + Authorization: 'test' + } + } + ) + + t.equal(request.headers.get('content-type'), 'application/x-www-form-urlencoded;charset=UTF-8') + t.equal(request.headers.get('authorization'), 'test') + t.equal(await request.text(), 'abc=123') +}) + +test('post aborted signal cloned', t => { + t.plan(2) + + const ac = new AbortController() + const req = new Request('http://asd', { signal: ac.signal }).clone() + t.equal(req.signal.aborted, false) + ac.signal.addEventListener('abort', () => { + if (hasSignalReason) { + t.equal(req.signal.reason, 'gwak') + } else { + t.pass() + } + }, { once: true }) + ac.abort('gwak') +}) + +test('Passing headers in init', (t) => { + // https://github.com/nodejs/undici/issues/1400 + t.test('Headers instance', (t) => { + const req = new Request('http://localhost', { + headers: new Headers({ key: 'value' }) + }) + + t.equal(req.headers.get('key'), 'value') + t.end() + }) + + t.test('key:value object', (t) => { + const req = new Request('http://localhost', { + headers: { key: 'value' } + }) + + t.equal(req.headers.get('key'), 'value') + t.end() + }) + + t.test('[key, value][]', (t) => { + const req = new Request('http://localhost', { + headers: [['key', 'value']] + }) + + t.equal(req.headers.get('key'), 'value') + t.end() + }) + + t.end() +}) + +test('Symbol.toStringTag', (t) => { + const req = new Request('http://localhost') + + t.equal(req[Symbol.toStringTag], 'Request') + t.equal(Request.prototype[Symbol.toStringTag], 'Request') + t.end() +}) + +test('invalid RequestInit values', (t) => { + /* eslint-disable no-new */ + t.throws(() => { + new Request('http://l', { mode: 'CoRs' }) + }, TypeError, 'not exact case = error') + + t.throws(() => { + new Request('http://l', { mode: 'random' }) + }, TypeError) + + t.throws(() => { + new Request('http://l', { credentials: 'OMIt' }) + }, TypeError, 'not exact case = error') + + t.throws(() => { + new Request('http://l', { credentials: 'random' }) + }, TypeError) + + t.throws(() => { + new Request('http://l', { cache: 'DeFaULt' }) + }, TypeError, 'not exact case = error') + + t.throws(() => { + new Request('http://l', { cache: 'random' }) + }, TypeError) + + t.throws(() => { + new Request('http://l', { redirect: 'FOllOW' }) + }, TypeError, 'not exact case = error') + + t.throws(() => { + new Request('http://l', { redirect: 'random' }) + }, TypeError) + /* eslint-enable no-new */ + + t.end() +}) + +test('RequestInit.signal option', async (t) => { + t.throws(() => { + // eslint-disable-next-line no-new + new Request('http://asd', { + signal: true + }) + }, TypeError) + + await t.rejects(fetch('http://asd', { + signal: false + }), TypeError) +}) + +test('constructing Request with third party Blob body', async (t) => { + const blob = new ThirdPartyBlob(['text']) + const req = new Request('http://asd', { + method: 'POST', + body: blob + }) + t.equal(await req.text(), 'text') +}) +test('constructing Request with third party FormData body', async (t) => { + const form = new ThirdPartyFormData() + form.set('key', 'value') + const req = new Request('http://asd', { + method: 'POST', + body: form + }) + const contentType = req.headers.get('content-type').split('=') + t.equal(contentType[0], 'multipart/form-data; boundary') + t.ok((await req.text()).startsWith(`--${contentType[1]}`)) +}) + +// https://github.com/nodejs/undici/issues/2050 +test('set-cookie headers get cleared when passing a Request as first param', (t) => { + const req1 = new Request('http://localhost', { + headers: { + 'set-cookie': 'a=1' + } + }) + + t.same([...req1.headers], [['set-cookie', 'a=1']]) + const req2 = new Request(req1, { headers: {} }) + + t.same([...req2.headers], []) + t.same(req2.headers.getSetCookie(), []) + t.end() +}) + +// https://github.com/nodejs/undici/issues/2124 +test('request.referrer', (t) => { + for (const referrer of ['about://client', 'about://client:1234']) { + const request = new Request('http://a', { referrer }) + + t.equal(request.referrer, 'about:client') + } + + t.end() +}) + +// https://github.com/nodejs/undici/issues/2445 +test('Clone the set-cookie header when Request is passed as the first parameter and no header is passed.', (t) => { + t.plan(2) + const request = new Request('http://localhost', { headers: { 'set-cookie': 'A' } }) + const request2 = new Request(request) + request2.headers.append('set-cookie', 'B') + t.equal(request.headers.getSetCookie().join(', '), request.headers.get('set-cookie')) + t.equal(request2.headers.getSetCookie().join(', '), request2.headers.get('set-cookie')) +}) + +// Tests for optimization introduced in https://github.com/nodejs/undici/pull/2456 +test('keys to object prototypes method', (t) => { + t.plan(1) + const request = new Request('http://localhost', { method: 'hasOwnProperty' }) + t.ok(typeof request.method === 'string') +}) + +// https://github.com/nodejs/undici/issues/2465 +test('Issue#2465', async (t) => { + t.plan(1) + const request = new Request('http://localhost', { body: new SharedArrayBuffer(0), method: 'POST' }) + t.equal(await request.text(), '[object SharedArrayBuffer]') +}) + +teardown(() => process.exit()) diff --git a/test/fetch/resource-timing.js b/test/fetch/resource-timing.js new file mode 100644 index 0000000..d266f28 --- /dev/null +++ b/test/fetch/resource-timing.js @@ -0,0 +1,72 @@ +'use strict' + +const { test } = require('tap') +const { createServer } = require('http') +const { nodeMajor, nodeMinor } = require('../../lib/core/util') +const { fetch } = require('../..') + +const { + PerformanceObserver, + performance +} = require('perf_hooks') + +const skip = nodeMajor < 18 || (nodeMajor === 18 && nodeMinor < 2) + +test('should create a PerformanceResourceTiming after each fetch request', { skip }, (t) => { + t.plan(8) + + const obs = new PerformanceObserver(list => { + const expectedResourceEntryName = `http://localhost:${server.address().port}/` + + const entries = list.getEntries() + t.equal(entries.length, 1) + const [entry] = entries + t.same(entry.name, expectedResourceEntryName) + t.strictSame(entry.entryType, 'resource') + + t.ok(entry.duration >= 0) + t.ok(entry.startTime >= 0) + + const entriesByName = list.getEntriesByName(expectedResourceEntryName) + t.equal(entriesByName.length, 1) + t.strictSame(entriesByName[0], entry) + + obs.disconnect() + performance.clearResourceTimings() + }) + + obs.observe({ entryTypes: ['resource'] }) + + const server = createServer((req, res) => { + res.end('ok') + }).listen(0, async () => { + const body = await fetch(`http://localhost:${server.address().port}`) + t.strictSame('ok', await body.text()) + }) + + t.teardown(server.close.bind(server)) +}) + +test('should include encodedBodySize in performance entry', { skip }, (t) => { + t.plan(4) + const obs = new PerformanceObserver(list => { + const [entry] = list.getEntries() + t.equal(entry.encodedBodySize, 2) + t.equal(entry.decodedBodySize, 2) + t.equal(entry.transferSize, 2 + 300) + + obs.disconnect() + performance.clearResourceTimings() + }) + + obs.observe({ entryTypes: ['resource'] }) + + const server = createServer((req, res) => { + res.end('ok') + }).listen(0, async () => { + const body = await fetch(`http://localhost:${server.address().port}`) + t.strictSame('ok', await body.text()) + }) + + t.teardown(server.close.bind(server)) +}) diff --git a/test/fetch/response-json.js b/test/fetch/response-json.js new file mode 100644 index 0000000..6244fbf --- /dev/null +++ b/test/fetch/response-json.js @@ -0,0 +1,113 @@ +'use strict' + +const { test } = require('tap') +const { Response } = require('../../') + +// https://github.com/web-platform-tests/wpt/pull/32825/ + +const APPLICATION_JSON = 'application/json' +const FOO_BAR = 'foo/bar' + +const INIT_TESTS = [ + [undefined, 200, '', APPLICATION_JSON, {}], + [{ status: 400 }, 400, '', APPLICATION_JSON, {}], + [{ statusText: 'foo' }, 200, 'foo', APPLICATION_JSON, {}], + [{ headers: {} }, 200, '', APPLICATION_JSON, {}], + [{ headers: { 'content-type': FOO_BAR } }, 200, '', FOO_BAR, {}], + [{ headers: { 'x-foo': 'bar' } }, 200, '', APPLICATION_JSON, { 'x-foo': 'bar' }] +] + +test('Check response returned by static json() with init', async (t) => { + for (const [init, expectedStatus, expectedStatusText, expectedContentType, expectedHeaders] of INIT_TESTS) { + const response = Response.json('hello world', init) + t.equal(response.type, 'default', "Response's type is default") + t.equal(response.status, expectedStatus, "Response's status is " + expectedStatus) + t.equal(response.statusText, expectedStatusText, "Response's statusText is " + JSON.stringify(expectedStatusText)) + t.equal(response.headers.get('content-type'), expectedContentType, "Response's content-type is " + expectedContentType) + for (const key in expectedHeaders) { + t.equal(response.headers.get(key), expectedHeaders[key], "Response's header " + key + ' is ' + JSON.stringify(expectedHeaders[key])) + } + + const data = await response.json() + t.equal(data, 'hello world', "Response's body is 'hello world'") + } + + t.end() +}) + +test('Throws TypeError when calling static json() with an invalid status', (t) => { + const nullBodyStatus = [204, 205, 304] + + for (const status of nullBodyStatus) { + t.throws(() => { + Response.json('hello world', { status }) + }, TypeError, `Throws TypeError when calling static json() with a status of ${status}`) + } + + t.end() +}) + +test('Check static json() encodes JSON objects correctly', async (t) => { + const response = Response.json({ foo: 'bar' }) + const data = await response.json() + t.equal(typeof data, 'object', "Response's json body is an object") + t.equal(data.foo, 'bar', "Response's json body is { foo: 'bar' }") + + t.end() +}) + +test('Check static json() throws when data is not encodable', (t) => { + t.throws(() => { + Response.json(Symbol('foo')) + }, TypeError) + + t.end() +}) + +test('Check static json() throws when data is circular', (t) => { + const a = { b: 1 } + a.a = a + + t.throws(() => { + Response.json(a) + }, TypeError) + + t.end() +}) + +test('Check static json() propagates JSON serializer errors', (t) => { + class CustomError extends Error { + name = 'CustomError' + } + + t.throws(() => { + Response.json({ get foo () { throw new CustomError('bar') } }) + }, CustomError) + + t.end() +}) + +// note: these tests are not part of any WPTs +test('unserializable values', (t) => { + t.throws(() => { + Response.json(Symbol('symbol')) + }, TypeError) + + t.throws(() => { + Response.json(undefined) + }, TypeError) + + t.throws(() => { + Response.json() + }, TypeError) + + t.end() +}) + +test('invalid init', (t) => { + t.throws(() => { + Response.json(null, 3) + }, TypeError) + + t.end() +}) diff --git a/test/fetch/response.js b/test/fetch/response.js new file mode 100644 index 0000000..422c7ef --- /dev/null +++ b/test/fetch/response.js @@ -0,0 +1,257 @@ +'use strict' + +const { test } = require('tap') +const { + Response +} = require('../../') +const { ReadableStream } = require('stream/web') +const { + Blob: ThirdPartyBlob, + FormData: ThirdPartyFormData +} = require('formdata-node') + +test('arg validation', async (t) => { + // constructor + t.throws(() => { + // eslint-disable-next-line + new Response(null, 0) + }, TypeError) + t.throws(() => { + // eslint-disable-next-line + new Response(null, { + status: 99 + }) + }, RangeError) + t.throws(() => { + // eslint-disable-next-line + new Response(null, { + status: 600 + }) + }, RangeError) + t.throws(() => { + // eslint-disable-next-line + new Response(null, { + status: '600' + }) + }, RangeError) + t.throws(() => { + // eslint-disable-next-line + new Response(null, { + statusText: '\u0000' + }) + }, TypeError) + + for (const nullStatus of [204, 205, 304]) { + t.throws(() => { + // eslint-disable-next-line + new Response(new ArrayBuffer(16), { + status: nullStatus + }) + }, TypeError) + } + + t.doesNotThrow(() => { + Response.prototype[Symbol.toStringTag].charAt(0) + }, TypeError) + + t.throws(() => { + Response.prototype.type.toString() + }, TypeError) + + t.throws(() => { + Response.prototype.url.toString() + }, TypeError) + + t.throws(() => { + Response.prototype.redirected.toString() + }, TypeError) + + t.throws(() => { + Response.prototype.status.toString() + }, TypeError) + + t.throws(() => { + Response.prototype.ok.toString() + }, TypeError) + + t.throws(() => { + Response.prototype.statusText.toString() + }, TypeError) + + t.throws(() => { + Response.prototype.headers.toString() + }, TypeError) + + t.throws(() => { + // eslint-disable-next-line no-unused-expressions + Response.prototype.body + }, TypeError) + + t.throws(() => { + // eslint-disable-next-line no-unused-expressions + Response.prototype.bodyUsed + }, TypeError) + + t.throws(() => { + Response.prototype.clone.call(null) + }, TypeError) + + await t.rejects(async () => { + await new Response('http://localhost').text.call({ + blob () { + return { + text () { + return Promise.resolve('emulating response.blob()') + } + } + } + }) + }, TypeError) + + t.end() +}) + +test('response clone', (t) => { + // https://github.com/nodejs/undici/issues/1122 + const response1 = new Response(null, { status: 201 }) + const response2 = new Response(undefined, { status: 201 }) + + t.equal(response1.body, response1.clone().body) + t.equal(response2.body, response2.clone().body) + t.equal(response2.body, null) + t.end() +}) + +test('Symbol.toStringTag', (t) => { + const resp = new Response() + + t.equal(resp[Symbol.toStringTag], 'Response') + t.equal(Response.prototype[Symbol.toStringTag], 'Response') + t.end() +}) + +test('async iterable body', async (t) => { + const asyncIterable = { + async * [Symbol.asyncIterator] () { + yield 'a' + yield 'b' + yield 'c' + } + } + + const response = new Response(asyncIterable) + t.equal(await response.text(), 'abc') + t.end() +}) + +// https://github.com/nodejs/node/pull/43752#issuecomment-1179678544 +test('Modifying headers using Headers.prototype.set', (t) => { + const response = new Response('body', { + headers: { + 'content-type': 'test/test', + 'Content-Encoding': 'hello/world' + } + }) + + const response2 = response.clone() + + response.headers.set('content-type', 'application/wasm') + response.headers.set('Content-Encoding', 'world/hello') + + t.equal(response.headers.get('content-type'), 'application/wasm') + t.equal(response.headers.get('Content-Encoding'), 'world/hello') + + response2.headers.delete('content-type') + response2.headers.delete('Content-Encoding') + + t.equal(response2.headers.get('content-type'), null) + t.equal(response2.headers.get('Content-Encoding'), null) + + t.end() +}) + +// https://github.com/nodejs/node/issues/43838 +test('constructing a Response with a ReadableStream body', { skip: process.version.startsWith('v16.') }, async (t) => { + const text = '{"foo":"bar"}' + const uint8 = new TextEncoder().encode(text) + + t.test('Readable stream with Uint8Array chunks', async (t) => { + const readable = new ReadableStream({ + start (controller) { + controller.enqueue(uint8) + controller.close() + } + }) + + const response1 = new Response(readable) + const response2 = response1.clone() + const response3 = response1.clone() + + t.equal(await response1.text(), text) + t.same(await response2.arrayBuffer(), uint8.buffer) + t.same(await response3.json(), JSON.parse(text)) + + t.end() + }) + + t.test('Readable stream with non-Uint8Array chunks', async (t) => { + const readable = new ReadableStream({ + start (controller) { + controller.enqueue(text) // string + controller.close() + } + }) + + const response = new Response(readable) + + await t.rejects(response.text(), TypeError) + + t.end() + }) + + t.test('Readable with ArrayBuffer chunk still throws', { skip: process.version.startsWith('v16.') }, async (t) => { + const readable = new ReadableStream({ + start (controller) { + controller.enqueue(uint8.buffer) + controller.close() + } + }) + + const response1 = new Response(readable) + const response2 = response1.clone() + const response3 = response1.clone() + // const response4 = response1.clone() + + await t.rejects(response1.arrayBuffer(), TypeError) + await t.rejects(response2.text(), TypeError) + await t.rejects(response3.json(), TypeError) + // TODO: on Node v16.8.0, this throws a TypeError + // because the body is detected as disturbed. + // await t.rejects(response4.blob(), TypeError) + + t.end() + }) + + t.end() +}) + +test('constructing Response with third party Blob body', async (t) => { + const blob = new ThirdPartyBlob(['text']) + const res = new Response(blob) + t.equal(await res.text(), 'text') +}) +test('constructing Response with third party FormData body', async (t) => { + const form = new ThirdPartyFormData() + form.set('key', 'value') + const res = new Response(form) + const contentType = res.headers.get('content-type').split('=') + t.equal(contentType[0], 'multipart/form-data; boundary') + t.ok((await res.text()).startsWith(`--${contentType[1]}`)) +}) + +// https://github.com/nodejs/undici/issues/2465 +test('Issue#2465', async (t) => { + t.plan(1) + const response = new Response(new SharedArrayBuffer(0)) + t.equal(await response.text(), '[object SharedArrayBuffer]') +}) diff --git a/test/fetch/user-agent.js b/test/fetch/user-agent.js new file mode 100644 index 0000000..2e37ea5 --- /dev/null +++ b/test/fetch/user-agent.js @@ -0,0 +1,32 @@ +'use strict' + +const { test, skip } = require('tap') +const events = require('events') +const http = require('http') +const undici = require('../../') +const { nodeMajor } = require('../../lib/core/util') + +if (nodeMajor === 16) { + skip('esbuild uses static blocks with --keep-names which node 16.8 does not have') + process.exit() +} + +const nodeBuild = require('../../undici-fetch.js') + +test('user-agent defaults correctly', async (t) => { + const server = http.createServer((req, res) => { + res.end(JSON.stringify({ userAgentHeader: req.headers['user-agent'] })) + }) + t.teardown(server.close.bind(server)) + + server.listen(0) + await events.once(server, 'listening') + const url = `http://localhost:${server.address().port}` + const [nodeBuildJSON, undiciJSON] = await Promise.all([ + nodeBuild.fetch(url).then((body) => body.json()), + undici.fetch(url).then((body) => body.json()) + ]) + + t.same(nodeBuildJSON.userAgentHeader, 'node') + t.same(undiciJSON.userAgentHeader, 'undici') +}) diff --git a/test/fetch/util.js b/test/fetch/util.js new file mode 100644 index 0000000..02b75bc --- /dev/null +++ b/test/fetch/util.js @@ -0,0 +1,281 @@ +'use strict' + +const t = require('tap') +const { test } = t + +const util = require('../../lib/fetch/util') +const { HeadersList } = require('../../lib/fetch/headers') + +test('responseURL', (t) => { + t.plan(2) + + t.ok(util.responseURL({ + urlList: [ + new URL('http://asd'), + new URL('http://fgh') + ] + })) + t.notOk(util.responseURL({ + urlList: [] + })) +}) + +test('responseLocationURL', (t) => { + t.plan(3) + + const acceptHeaderList = new HeadersList() + acceptHeaderList.append('Accept', '*/*') + + const locationHeaderList = new HeadersList() + locationHeaderList.append('Location', 'http://asd') + + t.notOk(util.responseLocationURL({ + status: 200 + })) + t.notOk(util.responseLocationURL({ + status: 301, + headersList: acceptHeaderList + })) + t.ok(util.responseLocationURL({ + status: 301, + headersList: locationHeaderList, + urlList: [ + new URL('http://asd'), + new URL('http://fgh') + ] + })) +}) + +test('requestBadPort', (t) => { + t.plan(3) + + t.equal('allowed', util.requestBadPort({ + urlList: [new URL('https://asd')] + })) + t.equal('blocked', util.requestBadPort({ + urlList: [new URL('http://asd:7')] + })) + t.equal('blocked', util.requestBadPort({ + urlList: [new URL('https://asd:7')] + })) +}) + +// https://html.spec.whatwg.org/multipage/origin.html#same-origin +// look at examples +test('sameOrigin', (t) => { + t.test('first test', (t) => { + const A = { + protocol: 'https:', + hostname: 'example.org', + port: '' + } + + const B = { + protocol: 'https:', + hostname: 'example.org', + port: '' + } + + t.ok(util.sameOrigin(A, B)) + t.end() + }) + + t.test('second test', (t) => { + const A = { + protocol: 'https:', + hostname: 'example.org', + port: '314' + } + + const B = { + protocol: 'https:', + hostname: 'example.org', + port: '420' + } + + t.notOk(util.sameOrigin(A, B)) + t.end() + }) + + t.test('obviously shouldn\'t be equal', (t) => { + t.notOk(util.sameOrigin( + { protocol: 'http:', hostname: 'example.org' }, + { protocol: 'https:', hostname: 'example.org' } + )) + + t.notOk(util.sameOrigin( + { protocol: 'https:', hostname: 'example.org' }, + { protocol: 'https:', hostname: 'example.com' } + )) + + t.end() + }) + + t.test('file:// urls', (t) => { + // urls with opaque origins should return true + + const a = new URL('file:///C:/undici') + const b = new URL('file:///var/undici') + + t.ok(util.sameOrigin(a, b)) + t.end() + }) + + t.end() +}) + +test('isURLPotentiallyTrustworthy', (t) => { + const valid = ['http://127.0.0.1', 'http://localhost.localhost', + 'http://[::1]', 'http://adb.localhost', 'https://something.com', 'wss://hello.com', + 'file:///link/to/file.txt', 'data:text/plain;base64,randomstring', 'about:blank', 'about:srcdoc'] + const invalid = ['http://121.3.4.5:55', 'null:8080', 'something:8080'] + + t.plan(valid.length + invalid.length + 1) + t.notOk(util.isURLPotentiallyTrustworthy('string')) + + for (const url of valid) { + const instance = new URL(url) + t.ok(util.isURLPotentiallyTrustworthy(instance)) + } + + for (const url of invalid) { + const instance = new URL(url) + t.notOk(util.isURLPotentiallyTrustworthy(instance)) + } +}) + +test('setRequestReferrerPolicyOnRedirect', nested => { + nested.plan(7) + + nested.test('should set referrer policy from response headers on redirect', t => { + const request = { + referrerPolicy: 'no-referrer, strict-origin-when-cross-origin' + } + + const actualResponse = { + headersList: new HeadersList() + } + + t.plan(1) + + actualResponse.headersList.append('Connection', 'close') + actualResponse.headersList.append('Location', 'https://some-location.com/redirect') + actualResponse.headersList.append('Referrer-Policy', 'origin') + util.setRequestReferrerPolicyOnRedirect(request, actualResponse) + + t.equal(request.referrerPolicy, 'origin') + }) + + nested.test('should select the first valid policy from a response', t => { + const request = { + referrerPolicy: 'no-referrer, strict-origin-when-cross-origin' + } + + const actualResponse = { + headersList: new HeadersList() + } + + t.plan(1) + + actualResponse.headersList.append('Connection', 'close') + actualResponse.headersList.append('Location', 'https://some-location.com/redirect') + actualResponse.headersList.append('Referrer-Policy', 'asdas, origin') + util.setRequestReferrerPolicyOnRedirect(request, actualResponse) + + t.equal(request.referrerPolicy, 'origin') + }) + + nested.test('should select the first valid policy from a response#2', t => { + const request = { + referrerPolicy: 'no-referrer, strict-origin-when-cross-origin' + } + + const actualResponse = { + headersList: new HeadersList() + } + + t.plan(1) + + actualResponse.headersList.append('Connection', 'close') + actualResponse.headersList.append('Location', 'https://some-location.com/redirect') + actualResponse.headersList.append('Referrer-Policy', 'no-referrer, asdas, origin, 0943sd') + util.setRequestReferrerPolicyOnRedirect(request, actualResponse) + + t.equal(request.referrerPolicy, 'origin') + }) + + nested.test('should pick the last fallback over invalid policy tokens', t => { + const request = { + referrerPolicy: 'no-referrer, strict-origin-when-cross-origin' + } + + const actualResponse = { + headersList: new HeadersList() + } + + t.plan(1) + + actualResponse.headersList.append('Connection', 'close') + actualResponse.headersList.append('Location', 'https://some-location.com/redirect') + actualResponse.headersList.append('Referrer-Policy', 'origin, asdas, asdaw34') + util.setRequestReferrerPolicyOnRedirect(request, actualResponse) + + t.equal(request.referrerPolicy, 'origin') + }) + + nested.test('should set not change request referrer policy if no Referrer-Policy from initial redirect response', t => { + const request = { + referrerPolicy: 'no-referrer, strict-origin-when-cross-origin' + } + + const actualResponse = { + headersList: new HeadersList() + } + + t.plan(1) + + actualResponse.headersList.append('Connection', 'close') + actualResponse.headersList.append('Location', 'https://some-location.com/redirect') + util.setRequestReferrerPolicyOnRedirect(request, actualResponse) + + t.equal(request.referrerPolicy, 'no-referrer, strict-origin-when-cross-origin') + }) + + nested.test('should set not change request referrer policy if the policy is a non-valid Referrer Policy', t => { + const initial = 'no-referrer, strict-origin-when-cross-origin' + const request = { + referrerPolicy: initial + } + const actualResponse = { + headersList: new HeadersList() + } + + t.plan(1) + + actualResponse.headersList.append('Connection', 'close') + actualResponse.headersList.append('Location', 'https://some-location.com/redirect') + actualResponse.headersList.append('Referrer-Policy', 'asdasd') + util.setRequestReferrerPolicyOnRedirect(request, actualResponse) + + t.equal(request.referrerPolicy, initial) + }) + + nested.test('should set not change request referrer policy if the policy is a non-valid Referrer Policy', t => { + const initial = 'no-referrer, strict-origin-when-cross-origin' + const request = { + referrerPolicy: initial + } + const actualResponse = { + headersList: new HeadersList() + } + + t.plan(1) + + actualResponse.headersList.append('Connection', 'close') + actualResponse.headersList.append('Location', 'https://some-location.com/redirect') + actualResponse.headersList.append('Referrer-Policy', 'asdasd, asdasa, 12daw,') + util.setRequestReferrerPolicyOnRedirect(request, actualResponse) + + t.equal(request.referrerPolicy, initial) + }) +}) diff --git a/test/fixed-queue.js b/test/fixed-queue.js new file mode 100644 index 0000000..812f421 --- /dev/null +++ b/test/fixed-queue.js @@ -0,0 +1,38 @@ +'use strict' + +const { test } = require('tap') + +const FixedQueue = require('../lib/node/fixed-queue') + +test('fixed queue 1', (t) => { + t.plan(5) + + const queue = new FixedQueue() + t.equal(queue.head, queue.tail) + t.ok(queue.isEmpty()) + queue.push('a') + t.ok(!queue.isEmpty()) + t.equal(queue.shift(), 'a') + t.equal(queue.shift(), null) +}) + +test('fixed queue 2', (t) => { + t.plan(7 + 2047) + + const queue = new FixedQueue() + for (let i = 0; i < 2047; i++) { + queue.push('a') + } + t.ok(queue.head.isFull()) + queue.push('a') + t.ok(!queue.head.isFull()) + + t.not(queue.head, queue.tail) + for (let i = 0; i < 2047; i++) { + t.equal(queue.shift(), 'a') + } + t.equal(queue.head, queue.tail) + t.ok(!queue.isEmpty()) + t.equal(queue.shift(), 'a') + t.ok(queue.isEmpty()) +}) diff --git a/test/fixtures/ca.pem b/test/fixtures/ca.pem new file mode 100644 index 0000000..c126543 --- /dev/null +++ b/test/fixtures/ca.pem @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIIChDCCAe2gAwIBAgIJAMsVOuISYJ/GMA0GCSqGSIb3DQEBCwUAMHoxCzAJBgNV +BAYTAlVTMQswCQYDVQQIDAJDQTELMAkGA1UEBwwCU0YxDzANBgNVBAoMBkpveWVu +dDEQMA4GA1UECwwHTm9kZS5qczEMMAoGA1UEAwwDY2ExMSAwHgYJKoZIhvcNAQkB +FhFyeUB0aW55Y2xvdWRzLm9yZzAgFw0xODExMTYxODQyMjBaGA8yMjkyMDgzMDE4 +NDIyMFowejELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQswCQYDVQQHDAJTRjEP +MA0GA1UECgwGSm95ZW50MRAwDgYDVQQLDAdOb2RlLmpzMQwwCgYDVQQDDANjYTEx +IDAeBgkqhkiG9w0BCQEWEXJ5QHRpbnljbG91ZHMub3JnMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDrNdKjVKhbxKbrDRLdy45u9vsU3IH8C3qFcLF5wqf+g7OC +vMOOrFDM6mL5iYwuYaLRvAtsC0mtGPzBGyFflxGhiBYaOhi7nCKEsUkFuNYlCzX+ +FflT04JYT3qWPLL7rT32GXpABND/8DEnj5D5liYYNR05PjV1fUnGg1gPqXVxbwID +AQABoxAwDjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4GBAHhsWFy6m6VO +AjK14n0XCSM66ltk9qMKpOryXneLhmmkOQbJd7oavueUWzMdszWLMKhrBoXjmvuW +QceutP9IUq1Kzw7a/B+lLPD90xfLMr7tNLAxZoJmq/NAUI63M3nJGpX0HkjnYwoU +ekzNkKt5TggwcqqzK+cCSG1wDvJ+wjiD +-----END CERTIFICATE----- diff --git a/test/fixtures/cert.pem b/test/fixtures/cert.pem new file mode 100644 index 0000000..664d00c --- /dev/null +++ b/test/fixtures/cert.pem @@ -0,0 +1,18 @@ +-----BEGIN CERTIFICATE----- +MIIC2DCCAkGgAwIBAgIJAOzJuFYnDamoMA0GCSqGSIb3DQEBCwUAMHoxCzAJBgNV +BAYTAlVTMQswCQYDVQQIDAJDQTELMAkGA1UEBwwCU0YxDzANBgNVBAoMBkpveWVu +dDEQMA4GA1UECwwHTm9kZS5qczEMMAoGA1UEAwwDY2ExMSAwHgYJKoZIhvcNAQkB +FhFyeUB0aW55Y2xvdWRzLm9yZzAgFw0xODExMTYxODQyMjFaGA8yMjkyMDgzMDE4 +NDIyMVowfTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQswCQYDVQQHDAJTRjEP +MA0GA1UECgwGSm95ZW50MRAwDgYDVQQLDAdOb2RlLmpzMQ8wDQYDVQQDDAZhZ2Vu +dDExIDAeBgkqhkiG9w0BCQEWEXJ5QHRpbnljbG91ZHMub3JnMIGfMA0GCSqGSIb3 +DQEBAQUAA4GNADCBiQKBgQDvVEBwFjfiirsDjlZB+CjYNMNCqdJe27hqK/b72AnL +jgN6mLcXCOABJC5N61TGFkiF9Zndh6IyFXRZVb4gQX4zxNDRuAydo95BmiYHGV0v +t1ZXsLv7XrfQu6USLRtpZMe1cNULjsAB7raN+1hEN1CPMSmSjWc7MKPgv09QYJ5j +cQIDAQABo2EwXzBdBggrBgEFBQcBAQRRME8wIwYIKwYBBQUHMAGGF2h0dHA6Ly9v +Y3NwLm5vZGVqcy5vcmcvMCgGCCsGAQUFBzAChhxodHRwOi8vY2Eubm9kZWpzLm9y +Zy9jYS5jZXJ0MA0GCSqGSIb3DQEBCwUAA4GBAHrKvx2Z4fsF7b3VRgiIbdbFCfxY +ICvoJ0+BObYPjqIZZm9+/5c36SpzKzGO9CN9qUEj3KxPmijnb+Zjsm1CSCrG1m04 +C73+AjAIPnQ+eWZnF1K4L2kuEDTpv8nQzYKYiGxsmW58PSMeAq1TmaFwtSW3TxHX +7ROnqBX0uXQlOo1m +-----END CERTIFICATE----- diff --git a/test/fixtures/client-ca-crt.pem b/test/fixtures/client-ca-crt.pem new file mode 100644 index 0000000..3abfd04 --- /dev/null +++ b/test/fixtures/client-ca-crt.pem @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICqDCCAZACCQC0Hman8CosTDANBgkqhkiG9w0BAQsFADAVMRMwEQYDVQQDDApu +b2RlanMub3JnMCAXDTIyMDcxOTE2MzQwMloYDzIxMjIwNzIwMTYzNDAyWjAVMRMw +EQYDVQQDDApub2RlanMub3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAyrmvIOhsVJAinUZ0Np4o5cPz09arWAZnnDsMnU0d+NtI0lWOFCnpzJbER9eB +gJpRkOdkcsQFr0OcalExG4lQrj+yGdtLGSXVcE0aNsVSBNbNgaLbOFWfpA4c7pTF +SBLJdJ7pZ2LDrM2mXaQA30di3INsZOvuTnDSAEE8bwxnM7jDnTCOGD4asgzgknHa +NqYWJqrfEPoMcEtThX9XjBLlRq5X3YFAR8SRbMQDt2xbDLWO8mGo/y4Ezp+ol9dP +OdkX3f728EIgfk8fM7rpvHzJb8E6NPdKK/kqCjQxRJ4RMsRqKwiTgPcEqut0L6Kg +jGoDvOnc3dZ2QBrxGTYPrgZF2QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQA2DC4n +GNqQIABC82e3CovVH/LYB8M/PaqMwmXDI8kAKwk3j3lTHYD0WIyaFtCL4z/2GyDs +sgRmMlx5xVgXNv+8e793TMOqJ/0zixijguatR8r9GWdPAPhqCyCNrmUA26eyHEUV +Hx9mU7RNjv+qVe7fNXBkDorsyecclnDcxUd9k2C+RbjitnSKvhP64XqxAGk49HUH +3gw5uZw9uVlmD/dPSeKeSO4TX1HECH+WmPBKrBrcFGXNwGNzst8pFe3YVLLuseIq +4d5ngaOThGzVDJdsGIxhDfDBfH5FzDTMgEJxQQ3yXYwPR3zF4Ntn13oDkIu/vgbH +4n1eYIau6/1Y9OLX +-----END CERTIFICATE----- diff --git a/test/fixtures/client-crt-2048.pem b/test/fixtures/client-crt-2048.pem new file mode 100644 index 0000000..6d07ec1 --- /dev/null +++ b/test/fixtures/client-crt-2048.pem @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDkzCCAnugAwIBAgIUF2CLbUCxPnxARRlO7pANiXtZoLIwDQYJKoZIhvcNAQEL +BQAwWTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDESMBAGA1UEAwwJbG9jYWxob3N0MB4X +DTIyMDYwOTE0Mzc0N1oXDTI1MDMwNDE0Mzc0N1owWTELMAkGA1UEBhMCQVUxEzAR +BgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5 +IEx0ZDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEA4PbcFnMY0FC1wzsyMf04GhOx/KNcOalHu4Wy76Wys+WoJ6hO5z87 +ZIcmsg0hbys1l6DGxloTXeZwcBDoOndUg3FBZvAXRKimhXA7Qf31a9efq9GXic2W +7Kyn1jPa724Vkr/zzlWb5I/Qkk6xcQmEFCDhilbMtpnPz/BwOwn/2vbcbiHNirUk +Dn+s0pUcQlin1f2AR4Jq7/K1xsqjjB6cU0chuzrwzwrglQS7jpXQxCsRaAAIZQJB +DTVQBEo/skqWwv8xABlVQgolxABIX3Wc3RUk7xRItdWCMe92/BJCGhWVXb2hUCBu +y/yz5hX9p353JlxmXEKQlhfPzhcdDv2sdwIDAQABo1MwUTAdBgNVHQ4EFgQUQ0di +dFnBDLhSDgHpM+/KBn+WmI4wHwYDVR0jBBgwFoAUQ0didFnBDLhSDgHpM+/KBn+W +mI4wDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAoCQJci8G+cUF +n030frY/OgnLJXUGC2ed9Tu+dYhEE7XQkX9WO3IK8As+jGY0kKzX7ZsvWAHHbSa3 +8qmHh1vWflU9HEc0MO0Toy6Ale2/BCjs0Oy3q2vd6t9pl3Pq2JTHyJNYu44h45we +ufQ+ttylHGZSmAqeHz4yGp1xVvjbfriDYuc0kW9UTwMpdpzR9RmqQEVD4ySxpuYV +FTj/ZiY89GdIJvsz1pmAhTUcUfuMgSlWS1nt0YR4yMkFS8KqQ1iKEApjrdDCU48W +eABaPeTCUlBCFEDuKxFVPduYVVvOHtkX/8LPH3CO7EDMoSZ1iCDZ7b2+AZbwh9j+ +dXqw+WFi7w== +-----END CERTIFICATE----- diff --git a/test/fixtures/client-crt.pem b/test/fixtures/client-crt.pem new file mode 100644 index 0000000..2bd94df --- /dev/null +++ b/test/fixtures/client-crt.pem @@ -0,0 +1,17 @@ +-----BEGIN CERTIFICATE----- +MIICpDCCAYwCCQCWvC2NnLEpZjANBgkqhkiG9w0BAQUFADAVMRMwEQYDVQQDDApu +b2RlanMub3JnMCAXDTIyMDcxOTE2NDE1OFoYDzIxMjIwNzIwMTY0MTU4WjARMQ8w +DQYDVQQLDAZVbmRpY2kwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDR +SJvCSXTHrmnGz/CN94nxgmnUD17jYzfJH+lbcJkw4RDHpb6KZ85LEijeKoYoGw+c +Z7a4LfmpIR4rcN3sJWGvafJyFx4DtLYPZiNrCaMsdMWiHbbMwrpvSsf5Fq3vVeUz +Py7wxzSRiM4VOwZ7fhCJdj2YIeQJgeIZh+NN/4mpyWehS4hQSHG+cbS4c44vkET0 +Hv48G7m+4ULFCZzmG2AIW8Drh73Wymmm3kymD3kDCAY4SDSJDArxNt6lJ3sGJGO6 +jobefLFyqvLj5544Lvk4C8hD3O+e9M3OHcdyqRXf55dZ8SIWgpoGVGXb5V5g3WL/ +ncXF87jm05pMZXqOz0wdAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBAK2YxxGEDgqG +tp8uX/n0nFAj1p8sfkuD+FqYg7+PN/HYqCq6Ibrz/vVABL5Khb4qQzZN/ckJhY3k +bfwEjRTOoXMhPv+IkShMDdbTunwSQUXqeLe+qmPbLt5ZccxcYVIzEhJMlnjeJ4nk +NHg3BXt8y6mIIfY0Sv4znTkV995GHLK3Ax/Fd/2aio6aRCzkBCdaXY8j0SOzFHVy ++AvgRj04K2yBEEHd4bQTdLCJQR/gFQnGj37gXQp9I4qq+/1qj4sTs8BufnGKTDVT +/jYeycIY3l4A8/72NmDSIohaJTPwFUoXNBYywOnW71+Y05PXT45lJuaOJUf2s9iH +p/eTiEsfHsk= +-----END CERTIFICATE----- diff --git a/test/fixtures/client-key-2048.pem b/test/fixtures/client-key-2048.pem new file mode 100644 index 0000000..b7dffa6 --- /dev/null +++ b/test/fixtures/client-key-2048.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEA4PbcFnMY0FC1wzsyMf04GhOx/KNcOalHu4Wy76Wys+WoJ6hO +5z87ZIcmsg0hbys1l6DGxloTXeZwcBDoOndUg3FBZvAXRKimhXA7Qf31a9efq9GX +ic2W7Kyn1jPa724Vkr/zzlWb5I/Qkk6xcQmEFCDhilbMtpnPz/BwOwn/2vbcbiHN +irUkDn+s0pUcQlin1f2AR4Jq7/K1xsqjjB6cU0chuzrwzwrglQS7jpXQxCsRaAAI +ZQJBDTVQBEo/skqWwv8xABlVQgolxABIX3Wc3RUk7xRItdWCMe92/BJCGhWVXb2h +UCBuy/yz5hX9p353JlxmXEKQlhfPzhcdDv2sdwIDAQABAoIBAFVfeaCPZ2BO8Nu5 +UFBGP48t4EL3H93GDzHsCD8IC+xXgFwkdGUvyvNYkufJMeIFbN4xJp5JusXM2Oi+ +kdL2TD1hsqdFAB+PPTqwn9xoa0XU24SSEsc6HUeOMleI8FIi3c8GR5kLRhEUPtv3 +P0GdkeEtpUohrKizcHkCTyUoo09N35MFoH3Nb1iyMd10uq0iQlusljkTuukcHstK +MZQAYYcslqzyz9468O/cvsk23Ynd5FfjLgYKmdJ09qaxm4ptnF9NNJ2cLqwElbUF +xI3H5L/t1zxdwI0xZFFgDA4Ccpeq9QsRhRJGAOV94tN+4PxWXEPeQk4PM1EFDrNU +yysi/XkCgYEA+ElKG6cWQZydsb5Tk1vdJ/k18gZa5sv+WUGXkfm9EVecftGjtKQO +c7GwHO1IsLoZkhKfPpa/oifBR97DZRzw1ManEQPS980TZYei3Y9/8uPEpvgvRmm9 +MCHA5wp6YMlkZ5VN0SBRWnPhLtZ8L2/cqHOUCQf6YsIJU9/fewufrbUCgYEA5/QU +/tDBDl/f4A2R1HlIkGd1jS//CJLCc3riy0SQxcWIq6/cqflyfvRWiax5DwcO7qfh +3WbJldu9H0IWZjBCqX0v/jHvWBzaKNQCKbFFcL76Lr8bJCwlUMTH9MOhHf3uCOHD +J7YSTVJdvgzLN8K6yFhc0gI4VYQtnQTWJENObPsCgYEAlawAq6jO5uCVw3dbhGKF +cDpwBaVFGQpyGrZKu6nUCudIpL6VtCiNubqs0tNL1ZVqIr9tFdrkTMkwX7XvDj4j +A/F49u3aOJ18iuD4Eh4WYIJjos/MF+NYM/K1CdIsMbpV94dusJmN0Tw3y/dqR2Jk +n3uFCuivTOdxngk//DnmmV0CgYEA1CXNUiZSfLg5xe4DVEc9lD3cKS8d3pSEXySk +6+8hTpHV59moRJpPG0iVIcRq0NDO2n8YOOy7MWJSPpWucPZw8h362E6Jr5hr/G20 +MLffYDh8EGdgBpyN4Kqqi/allQ3cOalrWhXP9YKBFMMU10I2nekbtESti6GiKnvy +9CXPRCMCgYBZ2w+VVdhUUBA/elbuEdfbPwIYVDAk31PYg0c9jvQVusmfD1CuY/51 +JVsF5oJSosiN7WdDIETkklth0q3lAsQBKoYYMUw54RBf6FawoumB6MVdc3u4y9Ko +l9JC9czdEqb/e0LBqFiWsrtPk9WQf2gyN1mIXQPbyTT1O1J+DvUIbQ== +-----END RSA PRIVATE KEY----- diff --git a/test/fixtures/client-key.pem b/test/fixtures/client-key.pem new file mode 100644 index 0000000..6b47524 --- /dev/null +++ b/test/fixtures/client-key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEA0Uibwkl0x65pxs/wjfeJ8YJp1A9e42M3yR/pW3CZMOEQx6W+ +imfOSxIo3iqGKBsPnGe2uC35qSEeK3Dd7CVhr2nychceA7S2D2YjawmjLHTFoh22 +zMK6b0rH+Rat71XlMz8u8Mc0kYjOFTsGe34QiXY9mCHkCYHiGYfjTf+JqclnoUuI +UEhxvnG0uHOOL5BE9B7+PBu5vuFCxQmc5htgCFvA64e91spppt5Mpg95AwgGOEg0 +iQwK8TbepSd7BiRjuo6G3nyxcqry4+eeOC75OAvIQ9zvnvTNzh3HcqkV3+eXWfEi +FoKaBlRl2+VeYN1i/53FxfO45tOaTGV6js9MHQIDAQABAoIBACOp2+Ef42ajsiLP +DI8kv70IHECm3eSh47/CUGHkrjZGJDXhaLbtOZpRXeV+GZ57/g0JH3oDW6gWnK2K +bkbvl9XsmAQZLGQ1R1EYdrCm08efno4hwiTiiiKs+6bW1o0Sdhxlh/o/+BVU2smD +ZXdl5CuImrZyEAoOuBjhrzp7cVodSOYYK2RIAL35oAtKLR6NE40XGcxQSCdm+1eU +PzRo8TimQxujyIHrd1QV2FirmLfDFGg3LN8DS72n26bhvDg3PF6PVMF20BKTDqiu +xAyKg3weBsee2QoyegDRdgTD1PvjwWqqnsntPbvY5V8PR1DDmssfotYToNPVuJd2 +6usmBAECgYEA/21NZPZJdxRKwCiWXoqBUIY0VFajxihVxZ9pIZPXOFhpGmyj/jf6 +jBiHAqtucRdABtNxqsztGbEzJsMyNv7MqEVTAWUPH804OwW/C6Z2011GZ1AUN05n +zTxPR4eCYlxvSM+wwC8q+4mSo7hAZj5HltUI0kfEahZnGXqG4FRC1TUCgYEA0cDO +DuTrytk6EoYYCsS7ps87MYUlU97RHFrRGwf+V1Rz2RCz+XAkYCI1/tOpb0VeF1de +fX1mlM3edkLX2ooylYxv5HKPpICzPXeGK/u/HaJBRyZEq6Ms0HK8XyJOdG/UyuiZ +p9nc8eaZYvco24bT4dWe5oZ43mnydAwyK2tOgEkCgYEA/blJg9zSJSNXDYJDvC3B +PofRO2XE0XYHnYM4H06IH0RTQxhf3oskqj1C/3fjARujUiR/aLafX0ISGZMUMmTw +TsZuKZiFaYWlMZwHpj75EgQ5hy6YpkeP/OLHrboB3ksLkDweywkPnUWPEGpaLjX3 +TvDXDmqTxP3z8+8uQ2/v43ECgYB5/3BaTV+vviT+vSuip8aVQRcmuFB7ta9elJvm +4wFV/fLbn9FuFYGywHMzYhy8cVZGsTRuPM+7YPoxQrOVkqfVP7ec4d0WSxz1dV1+ +m5APRl49ac6rHd9k5jcWBjgnlRvpYNxuOlM+B2fTnfoPpR37zmn7nt8STgEM6kML +6f/gsQKBgFJH95hEgqhfEHmP23+ZWH0Dl7zD5sJJe4CYTgYriNeKKzpz2G6OVv+U +xNc8eGbnr4raPTxCCLKz6XJhuQuPQDpkoHvkhjOqZ5Tbb4fCaLdcVE0vwqBE1gGk +ryKSvahgHIykq3+RYpL4u2xypx81IBOMk7EM++Z6gdYMq0ZTN/fL +-----END RSA PRIVATE KEY----- diff --git a/test/fixtures/key.pem b/test/fixtures/key.pem new file mode 100644 index 0000000..fe750de --- /dev/null +++ b/test/fixtures/key.pem @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQDvVEBwFjfiirsDjlZB+CjYNMNCqdJe27hqK/b72AnLjgN6mLcX +COABJC5N61TGFkiF9Zndh6IyFXRZVb4gQX4zxNDRuAydo95BmiYHGV0vt1ZXsLv7 +XrfQu6USLRtpZMe1cNULjsAB7raN+1hEN1CPMSmSjWc7MKPgv09QYJ5jcQIDAQAB +AoGAbqk3TlyHpKFfDarf6Yr0X9wtuQJK+n+ACt+fSR3AkbVtmF9KsUTyRrTTEEZT +IXCmQgKpDYysi5nt/WyvB70gu6xGYbT6PzZaf1RmcpWd1pLcdyBOppY6y7nTMZA3 +BVFfmIPSmAvtCuzZwQFFnNoKH3d6cqna+ZQJ0zvCLCSLcw0CQQD6tswNlhCIfguh +tvhw7hJB5vZPWWEzyTQl8nVdY6SbxAT8FTx0UjxsKgOiJFzAGAVoCi40oRKIHhrw +pKwHsEqTAkEA9GABbi2xqAmhPn66e0AiU8t2uv69PISBSt2tXbUAburJFj+4rYZW +71QIbSKEYceveb7wm0NP+adgZqJlxn7oawJBAOjfK4+fCIJPWWx+8Cqs5yZxae1w +HrokNBzfJSZ2bCoGm36uFvYQgHETYUaUsdX3OeZWNm7KAdWO6QUGX4fQtqMCQGXv +OgmEY+utAKZ55D2PFgKQB1me8r6wouHgr/U7kA+0Peba86TmOZMhIVaspD3JNqf4 +/pI1NMH1kF+fdAalXzsCQQCelwr9I3FWhx336CWrfAY20xbiMOWMyAhrjVrexgUD +53Y6AhSaRC725pZTgO2PQ4AjkGLIP61sZKgTrXS85KmJ +-----END RSA PRIVATE KEY----- diff --git a/test/fuzzing/client/client-fuzz-body.js b/test/fuzzing/client/client-fuzz-body.js new file mode 100644 index 0000000..6643dda --- /dev/null +++ b/test/fuzzing/client/client-fuzz-body.js @@ -0,0 +1,28 @@ +'use strict' + +const { request, errors } = require('../../..') + +const acceptableCodes = [ + 'ERR_INVALID_ARG_TYPE' +] + +// TODO: could make this a class with some inbuilt functionality that we can inherit +async function fuzz (netServer, results, buf) { + const body = buf + results.body = body + try { + const data = await request(`http://localhost:${netServer.address().port}`, { body }) + data.body.destroy().on('error', () => {}) + } catch (err) { + results.err = err + // Handle any undici errors + if (Object.values(errors).some(undiciError => err instanceof undiciError)) { + // Okay error + } else if (!acceptableCodes.includes(err.code)) { + console.log(`=== Headers: ${JSON.stringify(body)} ===`) + throw err + } + } +} + +module.exports = fuzz diff --git a/test/fuzzing/client/client-fuzz-headers.js b/test/fuzzing/client/client-fuzz-headers.js new file mode 100644 index 0000000..84f3390 --- /dev/null +++ b/test/fuzzing/client/client-fuzz-headers.js @@ -0,0 +1,27 @@ +'use strict' + +const { request, errors } = require('../../..') + +const acceptableCodes = [ + 'ERR_INVALID_ARG_TYPE' +] + +async function fuzz (netServer, results, buf) { + const headers = { buf: buf.toString() } + results.body = headers + try { + const data = await request(`http://localhost:${netServer.address().port}`, { headers }) + data.body.destroy().on('error', () => {}) + } catch (err) { + results.err = err + // Handle any undici errors + if (Object.values(errors).some(undiciError => err instanceof undiciError)) { + // Okay error + } else if (!acceptableCodes.includes(err.code)) { + console.log(`=== Headers: ${JSON.stringify(headers)} ===`) + throw err + } + } +} + +module.exports = fuzz diff --git a/test/fuzzing/client/client-fuzz-options.js b/test/fuzzing/client/client-fuzz-options.js new file mode 100644 index 0000000..5be81b6 --- /dev/null +++ b/test/fuzzing/client/client-fuzz-options.js @@ -0,0 +1,38 @@ +'use strict' + +const { request, errors } = require('../../..') + +const acceptableCodes = [ + 'ERR_INVALID_URL', + // These are included because '\\ABC' is interpreted as a Windows UNC path and can cause these errors. + 'ENOTFOUND', + 'EAI_AGAIN', + 'ECONNREFUSED' + // ---- +] + +async function fuzz (netServer, results, buf) { + const optionKeys = ['body', 'path', 'method', 'opaque', 'upgrade', buf] + const options = {} + for (const optionKey of optionKeys) { + if (Math.random() < 0.5) { + options[optionKey] = buf.toString() + } + } + results.options = options + try { + const data = await request(`http://localhost:${netServer.address().port}`, options) + data.body.destroy().on('error', () => {}) + } catch (err) { + results.err = err + // Handle any undici errors + if (Object.values(errors).some(undiciError => err instanceof undiciError)) { + // Okay error + } else if (!acceptableCodes.includes(err.code)) { + console.log(`=== Options: ${JSON.stringify(options)} ===`) + throw err + } + } +} + +module.exports = fuzz diff --git a/test/fuzzing/client/index.js b/test/fuzzing/client/index.js new file mode 100644 index 0000000..dac3d98 --- /dev/null +++ b/test/fuzzing/client/index.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = { + clientFuzzBody: require('./client-fuzz-body'), + clientFuzzHeaders: require('./client-fuzz-headers'), + clientFuzzOptions: require('./client-fuzz-options') +} diff --git a/test/fuzzing/fuzz.js b/test/fuzzing/fuzz.js new file mode 100644 index 0000000..c268178 --- /dev/null +++ b/test/fuzzing/fuzz.js @@ -0,0 +1,66 @@ +'use strict' + +const net = require('net') +const fs = require('fs/promises') +const path = require('path') +const serverFuzzFnMap = require('./server') +const clientFuzzFnMap = require('./client') + +const port = process.env.PORT || 0 +const timeout = parseInt(process.env.TIMEOUT, 10) || 300_000 // 5 minutes by default + +const netServer = net.createServer((socket) => { + socket.on('data', (data) => { + // Select server fuzz fn + const serverFuzzFns = Object.values(serverFuzzFnMap) + const serverFuzzFn = serverFuzzFns[Math.floor(Math.random() * serverFuzzFns.length)] + + serverFuzzFn(socket, data) + }) +}) +const waitForNetServer = netServer.listen(port) + +// Set script to exit gracefully after a set period of time. +const timer = setTimeout(() => { + process.kill(process.pid, 'SIGINT') +}, timeout) + +async function writeResults (resultsPath, data) { + try { + await fs.writeFile(resultsPath, JSON.stringify(data, null, 2)) + console.log(`=== Written results to ${resultsPath} ===`) + } catch (err) { + console.log(`=== Unable to write results to ${resultsPath}`, err, '===') + } +} + +async function fuzz (buf) { + // Wait for net server to be ready + await waitForNetServer + + // Select client fuzz fn based on the buf input + await Promise.all( + Object.entries(clientFuzzFnMap).map(async ([clientFuzzFnName, clientFuzzFn]) => { + const results = {} + try { + await clientFuzzFn(netServer, results, buf) + } catch (err) { + clearTimeout(timer) + const output = { clientFuzzFnName, buf: { raw: buf, string: buf.toString() }, raw: JSON.stringify({ clientFuzzFnName, buf: { raw: buf, string: buf.toString() }, err, ...results }), err, ...results } + + console.log(`=== Failed fuzz ${clientFuzzFnName} with input '${buf}' ===`) + console.log('=== Fuzz results start ===') + console.log(output) + console.log('=== Fuzz results end ===') + + await writeResults(path.resolve(`fuzz-results-${Date.now()}.json`), output) + + throw err + } + }) + ) +} + +module.exports = { + fuzz +} diff --git a/test/fuzzing/server/index.js b/test/fuzzing/server/index.js new file mode 100644 index 0000000..4bef554 --- /dev/null +++ b/test/fuzzing/server/index.js @@ -0,0 +1,6 @@ +'use strict' + +module.exports = { + splitData: require('./server-fuzz-split-data'), + appendData: require('./server-fuzz-append-data') +} diff --git a/test/fuzzing/server/server-fuzz-append-data.js b/test/fuzzing/server/server-fuzz-append-data.js new file mode 100644 index 0000000..8ef6c45 --- /dev/null +++ b/test/fuzzing/server/server-fuzz-append-data.js @@ -0,0 +1,7 @@ +'use strict' + +function appendData (socket, data) { + socket.end('HTTP/1.1 200 OK' + data) +} + +module.exports = appendData diff --git a/test/fuzzing/server/server-fuzz-split-data.js b/test/fuzzing/server/server-fuzz-split-data.js new file mode 100644 index 0000000..5e057dc --- /dev/null +++ b/test/fuzzing/server/server-fuzz-split-data.js @@ -0,0 +1,17 @@ +'use strict' + +function splitData (socket, data) { + const lines = [ + 'HTTP/1.1 200 OK', + 'Date: Sat, 09 Oct 2010 14:28:02 GMT', + 'Connection: close', + '', + data + ] + for (const line of lines.join('\r\n').split(data)) { + socket.write(line) + } + socket.end() +} + +module.exports = splitData diff --git a/test/gc.js b/test/gc.js new file mode 100644 index 0000000..c1ceecf --- /dev/null +++ b/test/gc.js @@ -0,0 +1,98 @@ +'use strict' +/* global WeakRef, FinalizationRegistry */ + +const { test } = require('tap') +const { createServer } = require('net') +const { Client, Pool } = require('..') + +const SKIP = typeof WeakRef === 'undefined' || typeof FinalizationRegistry === 'undefined' + +setInterval(() => { + global.gc() +}, 100).unref() + +test('gc should collect the client if, and only if, there are no active sockets', { skip: SKIP }, t => { + t.plan(4) + + const server = createServer((socket) => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 0\r\n') + socket.write('Keep-Alive: timeout=1s\r\n') + socket.write('Connection: keep-alive\r\n') + socket.write('\r\n\r\n') + }) + t.teardown(server.close.bind(server)) + + let weakRef + let disconnected = false + + const registry = new FinalizationRegistry((data) => { + t.equal(data, 'test') + t.equal(disconnected, true) + t.equal(weakRef.deref(), undefined) + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + keepAliveTimeoutThreshold: 100 + }) + client.once('disconnect', () => { + disconnected = true + }) + + weakRef = new WeakRef(client) + registry.register(client, 'test') + + client.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.resume() + }) + }) +}) + +test('gc should collect the pool if, and only if, there are no active sockets', { skip: SKIP }, t => { + t.plan(4) + + const server = createServer((socket) => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 0\r\n') + socket.write('Keep-Alive: timeout=1s\r\n') + socket.write('Connection: keep-alive\r\n') + socket.write('\r\n\r\n') + }) + t.teardown(server.close.bind(server)) + + let weakRef + let disconnected = false + + const registry = new FinalizationRegistry((data) => { + t.equal(data, 'test') + t.equal(disconnected, true) + t.equal(weakRef.deref(), undefined) + }) + + server.listen(0, () => { + const pool = new Pool(`http://localhost:${server.address().port}`, { + connections: 1, + keepAliveTimeoutThreshold: 500 + }) + + pool.once('disconnect', () => { + disconnected = true + }) + + weakRef = new WeakRef(pool) + registry.register(pool, 'test') + + pool.request({ + path: '/', + method: 'GET' + }, (err, { body }) => { + t.error(err) + body.resume() + }) + }) +}) diff --git a/test/get-head-body.js b/test/get-head-body.js new file mode 100644 index 0000000..3e86b13 --- /dev/null +++ b/test/get-head-body.js @@ -0,0 +1,184 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const { Readable } = require('stream') +const { kConnect } = require('../lib/core/symbols') +const { kBusy } = require('../lib/core/symbols') +const { wrapWithAsyncIterable } = require('./utils/async-iterators') + +test('GET and HEAD with body should reset connection', (t) => { + t.plan(8 + 2) + + const server = createServer((req, res) => { + res.end('asd') + }) + + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.on('disconnect', () => { + t.pass() + }) + + client.request({ + path: '/', + body: 'asd', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + + const emptyBody = new Readable({ + read () {} + }) + emptyBody.push(null) + client.request({ + path: '/', + body: emptyBody, + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + + client.request({ + path: '/', + body: new Readable({ + read () { + this.push(null) + } + }), + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + + client.request({ + path: '/', + body: new Readable({ + read () { + this.push('asd') + this.push(null) + } + }), + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + + client.request({ + path: '/', + body: [], + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + + client.request({ + path: '/', + body: wrapWithAsyncIterable(new Readable({ + read () { + this.push(null) + } + })), + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + + client.request({ + path: '/', + body: wrapWithAsyncIterable(new Readable({ + read () { + this.push('asd') + this.push(null) + } + })), + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + }) +}) + +// TODO: Avoid external dependency. +// test('GET with body should work when target parses body as request', (t) => { +// t.plan(4) + +// // This URL will send double responses when receiving a +// // GET request with body. +// const client = new Client('http://feeds.bbci.co.uk') +// t.teardown(client.close.bind(client)) + +// client.request({ method: 'GET', path: '/news/rss.xml', body: 'asd' }, (err, data) => { +// t.error(err) +// t.equal(data.statusCode, 200) +// data.body.resume() +// }) +// client.request({ method: 'GET', path: '/news/rss.xml', body: 'asd' }, (err, data) => { +// t.error(err) +// t.equal(data.statusCode, 200) +// data.body.resume() +// }) +// }) + +test('HEAD should reset connection', (t) => { + t.plan(8) + + const server = createServer((req, res) => { + res.end('asd') + }) + + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.once('disconnect', () => { + t.pass() + }) + + client.request({ + path: '/', + method: 'HEAD' + }, (err, data) => { + t.error(err) + data.body.resume() + }) + t.equal(client[kBusy], true) + + client.request({ + path: '/', + method: 'HEAD' + }, (err, data) => { + t.error(err) + data.body.resume() + client.once('disconnect', () => { + client[kConnect](() => { + client.request({ + path: '/', + method: 'HEAD' + }, (err, data) => { + t.error(err) + data.body.resume() + data.body.on('end', () => { + t.pass() + }) + }) + t.equal(client[kBusy], true) + }) + }) + }) + t.equal(client[kBusy], true) + }) +}) diff --git a/test/headers-as-array.js b/test/headers-as-array.js new file mode 100644 index 0000000..fd8bb5d --- /dev/null +++ b/test/headers-as-array.js @@ -0,0 +1,131 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const { createServer } = require('http') + +test('handle headers as array', (t) => { + t.plan(1) + const headers = ['a', '1', 'b', '2', 'c', '3'] + + const server = createServer((req, res) => { + t.match(req.headers, { a: '1', b: '2', c: '3' }) + res.end() + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers + }, () => {}) + }) +}) + +test('handle multi-valued headers as array', (t) => { + t.plan(1) + const headers = ['a', '1', 'b', '2', 'c', '3', 'd', '4', 'd', '5'] + + const server = createServer((req, res) => { + t.match(req.headers, { a: '1', b: '2', c: '3', d: '4, 5' }) + res.end() + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers + }, () => {}) + }) +}) + +test('handle headers with array', (t) => { + t.plan(1) + const headers = { a: '1', b: '2', c: '3', d: ['4'] } + + const server = createServer((req, res) => { + t.match(req.headers, { a: '1', b: '2', c: '3', d: '4' }) + res.end() + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers + }, () => {}) + }) +}) + +test('handle multi-valued headers', (t) => { + t.plan(1) + const headers = { a: '1', b: '2', c: '3', d: ['4', '5'] } + + const server = createServer((req, res) => { + t.match(req.headers, { a: '1', b: '2', c: '3', d: '4, 5' }) + res.end() + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers + }, () => {}) + }) +}) + +test('fail if headers array is odd', (t) => { + t.plan(2) + const headers = ['a', '1', 'b', '2', 'c', '3', 'd'] + + const server = createServer((req, res) => { res.end() }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers + }, (err) => { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'headers array must be even') + }) + }) +}) + +test('fail if headers is not an object or an array', (t) => { + t.plan(2) + const headers = 'not an object or an array' + + const server = createServer((req, res) => { res.end() }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers + }, (err) => { + t.ok(err instanceof errors.InvalidArgumentError) + t.equal(err.message, 'headers must be an object or an array') + }) + }) +}) diff --git a/test/headers-crlf.js b/test/headers-crlf.js new file mode 100644 index 0000000..b24fd39 --- /dev/null +++ b/test/headers-crlf.js @@ -0,0 +1,36 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') + +test('CRLF Injection in Nodejs ‘undici’ via host', (t) => { + t.plan(1) + + const server = createServer(async (req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const unsanitizedContentTypeInput = '12 \r\n\r\naaa:aaa' + + try { + const { body } = await client.request({ + path: '/', + method: 'POST', + headers: { + 'content-type': 'application/json', + host: unsanitizedContentTypeInput + }, + body: 'asd' + }) + await body.dump() + } catch (err) { + t.same(err.code, 'UND_ERR_INVALID_ARG') + } + }) +}) diff --git a/test/http-100.js b/test/http-100.js new file mode 100644 index 0000000..1662a8d --- /dev/null +++ b/test/http-100.js @@ -0,0 +1,141 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const net = require('net') + +test('ignore informational response', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.writeProcessing() + req.pipe(res) + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'POST', + body: 'hello' + }, (err, response) => { + t.error(err) + const bufs = [] + response.body.on('data', (buf) => { + bufs.push(buf) + }) + response.body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('error 103 body', (t) => { + t.plan(2) + + const server = net.createServer((socket) => { + socket.write('HTTP/1.1 103 Early Hints\r\n') + socket.write('Content-Length: 1\r\n') + socket.write('\r\n') + socket.write('a\r\n') + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.equal(err.code, 'HPE_INVALID_CONSTANT') + }) + client.on('disconnect', () => { + t.pass() + }) + }) +}) + +test('error 100 body', (t) => { + t.plan(2) + + const server = net.createServer((socket) => { + socket.write('HTTP/1.1 100 Early Hints\r\n') + socket.write('\r\n') + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.equal(err.message, 'bad response') + }) + client.on('disconnect', () => { + t.pass() + }) + }) +}) + +test('error 101 upgrade', (t) => { + t.plan(2) + + const server = net.createServer((socket) => { + socket.write('HTTP/1.1 101 Switching Protocols\r\nUpgrade: example/1\r\nConnection: Upgrade\r\n') + socket.write('\r\n') + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.equal(err.message, 'bad upgrade') + }) + client.on('disconnect', () => { + t.pass() + }) + }) +}) + +test('1xx response without timeouts', t => { + t.plan(2) + + const server = createServer((req, res) => { + res.writeProcessing() + setTimeout(() => req.pipe(res), 2000) + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0, + headersTimeout: 0 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'POST', + body: 'hello' + }, (err, response) => { + t.error(err) + const bufs = [] + response.body.on('data', (buf) => { + bufs.push(buf) + }) + response.body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) diff --git a/test/http-req-destroy.js b/test/http-req-destroy.js new file mode 100644 index 0000000..29ec98e --- /dev/null +++ b/test/http-req-destroy.js @@ -0,0 +1,69 @@ +'use strict' + +const { test } = require('tap') +const undici = require('..') +const { createServer } = require('http') +const { Readable } = require('stream') +const { maybeWrapStream, consts } = require('./utils/async-iterators') + +function doNotKillReqSocket (bodyType) { + test(`do not kill req socket ${bodyType}`, (t) => { + t.plan(3) + + const server1 = createServer((req, res) => { + const client = new undici.Client(`http://localhost:${server2.address().port}`) + t.teardown(client.close.bind(client)) + client.request({ + path: '/', + method: 'POST', + body: req + }, (err, response) => { + t.error(err) + setTimeout(() => { + response.body.on('data', buf => { + res.write(buf) + setTimeout(() => { + res.end() + }, 100) + }) + }, 100) + }) + }) + t.teardown(server1.close.bind(server1)) + + const server2 = createServer((req, res) => { + setTimeout(() => { + req.pipe(res) + }, 100) + }) + t.teardown(server2.close.bind(server2)) + + server1.listen(0, () => { + const client = new undici.Client(`http://localhost:${server1.address().port}`) + t.teardown(client.close.bind(client)) + + const r = new Readable({ read () {} }) + r.push('hello') + client.request({ + path: '/', + method: 'POST', + body: maybeWrapStream(r, bodyType) + }, (err, response) => { + t.error(err) + const bufs = [] + response.body.on('data', (buf) => { + bufs.push(buf) + r.push(null) + }) + response.body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) + + server2.listen(0) + }) +} + +doNotKillReqSocket(consts.STREAM) +doNotKillReqSocket(consts.ASYNC_ITERATOR) diff --git a/test/http2-alpn.js b/test/http2-alpn.js new file mode 100644 index 0000000..04b8cb6 --- /dev/null +++ b/test/http2-alpn.js @@ -0,0 +1,277 @@ +'use strict' + +const https = require('node:https') +const { once } = require('node:events') +const { createSecureServer } = require('node:http2') +const { readFileSync } = require('node:fs') +const { join } = require('node:path') +const { test } = require('tap') + +const { Client } = require('..') + +// get the crypto fixtures +const key = readFileSync(join(__dirname, 'fixtures', 'key.pem'), 'utf8') +const cert = readFileSync(join(__dirname, 'fixtures', 'cert.pem'), 'utf8') +const ca = readFileSync(join(__dirname, 'fixtures', 'ca.pem'), 'utf8') + +test('Should upgrade to HTTP/2 when HTTPS/1 is available for GET', async (t) => { + t.plan(10) + + const body = [] + const httpsBody = [] + + // create the server and server stream handler + const server = createSecureServer( + { + key, + cert, + allowHTTP1: true + }, + (req, res) => { + const { socket: { alpnProtocol } } = req.httpVersion === '2.0' ? req.stream.session : req + + // handle http/1 requests + res.writeHead(200, { + 'content-type': 'application/json; charset=utf-8', + 'x-custom-request-header': req.headers['x-custom-request-header'] || '', + 'x-custom-response-header': `using ${req.httpVersion}` + }) + res.end(JSON.stringify({ + alpnProtocol, + httpVersion: req.httpVersion + })) + } + ) + + server.listen(0) + await once(server, 'listening') + + // close the server on teardown + t.teardown(server.close.bind(server)) + + // set the port + const port = server.address().port + + // test undici against http/2 + const client = new Client(`https://localhost:${port}`, { + connect: { + ca, + servername: 'agent1' + }, + allowH2: true + }) + + // close the client on teardown + t.teardown(client.close.bind(client)) + + // make an undici request using where it wants http/2 + const response = await client.request({ + path: '/', + method: 'GET', + headers: { + 'x-custom-request-header': 'want 2.0' + } + }) + + response.body.on('data', chunk => { + body.push(chunk) + }) + + await once(response.body, 'end') + + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'application/json; charset=utf-8') + t.equal(response.headers['x-custom-request-header'], 'want 2.0') + t.equal(response.headers['x-custom-response-header'], 'using 2.0') + t.equal(Buffer.concat(body).toString('utf8'), JSON.stringify({ + alpnProtocol: 'h2', + httpVersion: '2.0' + })) + + // make an https request for http/1 to confirm undici is using http/2 + const httpsOptions = { + ca, + servername: 'agent1', + headers: { + 'x-custom-request-header': 'want 1.1' + } + } + + const httpsResponse = await new Promise((resolve, reject) => { + const httpsRequest = https.get(`https://localhost:${port}/`, httpsOptions, (res) => { + res.on('data', (chunk) => { + httpsBody.push(chunk) + }) + + res.on('end', () => { + resolve(res) + }) + }).on('error', (err) => { + reject(err) + }) + + t.teardown(httpsRequest.destroy.bind(httpsRequest)) + }) + + t.equal(httpsResponse.statusCode, 200) + t.equal(httpsResponse.headers['content-type'], 'application/json; charset=utf-8') + t.equal(httpsResponse.headers['x-custom-request-header'], 'want 1.1') + t.equal(httpsResponse.headers['x-custom-response-header'], 'using 1.1') + t.equal(Buffer.concat(httpsBody).toString('utf8'), JSON.stringify({ + alpnProtocol: false, + httpVersion: '1.1' + })) +}) + +test('Should upgrade to HTTP/2 when HTTPS/1 is available for POST', async (t) => { + t.plan(15) + + const requestChunks = [] + const responseBody = [] + + const httpsRequestChunks = [] + const httpsResponseBody = [] + + const expectedBody = 'hello' + const buf = Buffer.from(expectedBody) + const body = new ArrayBuffer(buf.byteLength) + + buf.copy(new Uint8Array(body)) + + // create the server and server stream handler + const server = createSecureServer( + { + key, + cert, + allowHTTP1: true + }, + (req, res) => { + // use the stream handler for http2 + if (req.httpVersion === '2.0') { + return + } + + const { socket: { alpnProtocol } } = req + + req.on('data', (chunk) => { + httpsRequestChunks.push(chunk) + }) + + req.on('end', () => { + // handle http/1 requests + res.writeHead(201, { + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-request-header': req.headers['x-custom-request-header'] || '', + 'x-custom-alpn-protocol': alpnProtocol + }) + res.end('hello http/1!') + }) + } + ) + + server.on('stream', (stream, headers) => { + t.equal(headers[':method'], 'POST') + t.equal(headers[':path'], '/') + t.equal(headers[':scheme'], 'https') + + const { socket: { alpnProtocol } } = stream.session + + stream.on('data', (chunk) => { + requestChunks.push(chunk) + }) + + stream.respond({ + ':status': 201, + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-request-header': headers['x-custom-request-header'] || '', + 'x-custom-alpn-protocol': alpnProtocol + }) + + stream.end('hello h2!') + }) + + server.listen(0) + await once(server, 'listening') + + // close the server on teardown + t.teardown(server.close.bind(server)) + + // set the port + const port = server.address().port + + // test undici against http/2 + const client = new Client(`https://localhost:${port}`, { + connect: { + ca, + servername: 'agent1' + }, + allowH2: true + }) + + // close the client on teardown + t.teardown(client.close.bind(client)) + + // make an undici request using where it wants http/2 + const response = await client.request({ + path: '/', + method: 'POST', + headers: { + 'x-custom-request-header': 'want 2.0' + }, + body + }) + + response.body.on('data', (chunk) => { + responseBody.push(chunk) + }) + + await once(response.body, 'end') + + t.equal(response.statusCode, 201) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-request-header'], 'want 2.0') + t.equal(response.headers['x-custom-alpn-protocol'], 'h2') + t.equal(Buffer.concat(responseBody).toString('utf-8'), 'hello h2!') + t.equal(Buffer.concat(requestChunks).toString('utf-8'), expectedBody) + + // make an https request for http/1 to confirm undici is using http/2 + const httpsOptions = { + ca, + servername: 'agent1', + method: 'POST', + headers: { + 'content-type': 'text/plain; charset=utf-8', + 'content-length': Buffer.byteLength(body), + 'x-custom-request-header': 'want 1.1' + } + } + + const httpsResponse = await new Promise((resolve, reject) => { + const httpsRequest = https.request(`https://localhost:${port}/`, httpsOptions, (res) => { + res.on('data', (chunk) => { + httpsResponseBody.push(chunk) + }) + + res.on('end', () => { + resolve(res) + }) + }).on('error', (err) => { + reject(err) + }) + + httpsRequest.on('error', (err) => { + reject(err) + }) + + httpsRequest.write(Buffer.from(body)) + + t.teardown(httpsRequest.destroy.bind(httpsRequest)) + }) + + t.equal(httpsResponse.statusCode, 201) + t.equal(httpsResponse.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(httpsResponse.headers['x-custom-request-header'], 'want 1.1') + t.equal(httpsResponse.headers['x-custom-alpn-protocol'], 'false') + t.equal(Buffer.concat(httpsResponseBody).toString('utf-8'), 'hello http/1!') + t.equal(Buffer.concat(httpsRequestChunks).toString('utf-8'), expectedBody) +}) diff --git a/test/http2.js b/test/http2.js new file mode 100644 index 0000000..71b7749 --- /dev/null +++ b/test/http2.js @@ -0,0 +1,1191 @@ +'use strict' + +const { createSecureServer } = require('node:http2') +const { createReadStream, readFileSync } = require('node:fs') +const { once } = require('node:events') +const { Blob } = require('node:buffer') +const { Writable, pipeline, PassThrough, Readable } = require('node:stream') + +const { test, plan } = require('tap') +const { gte } = require('semver') +const pem = require('https-pem') + +const { Client, Agent } = require('..') + +const isGreaterThanv20 = gte(process.version.slice(1), '20.0.0') +// NOTE: node versions <16.14.1 have a bug which changes the order of pseudo-headers +// https://github.com/nodejs/node/pull/41735 +const hasPseudoHeadersOrderFix = gte(process.version.slice(1), '16.14.1') + +plan(23) + +test('Should support H2 connection', async t => { + const body = [] + const server = createSecureServer(pem) + + server.on('stream', (stream, headers, _flags, rawHeaders) => { + t.equal(headers['x-my-header'], 'foo') + t.equal(headers[':method'], 'GET') + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': 'hello', + ':status': 200 + }) + stream.end('hello h2!') + }) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.plan(6) + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + }) + + response.body.on('data', chunk => { + body.push(chunk) + }) + + await once(response.body, 'end') + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-h2'], 'hello') + t.equal(Buffer.concat(body).toString('utf8'), 'hello h2!') +}) + +test('Should support H2 connection(multiple requests)', async t => { + const server = createSecureServer(pem) + + server.on('stream', async (stream, headers, _flags, rawHeaders) => { + t.equal(headers['x-my-header'], 'foo') + t.equal(headers[':method'], 'POST') + const reqData = [] + stream.on('data', chunk => reqData.push(chunk.toString())) + await once(stream, 'end') + const reqBody = reqData.join('') + t.equal(reqBody.length > 0, true) + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': 'hello', + ':status': 200 + }) + stream.end(`hello h2! ${reqBody}`) + }) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.plan(21) + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + for (let i = 0; i < 3; i++) { + const sendBody = `seq ${i}` + const body = [] + const response = await client.request({ + path: '/', + method: 'POST', + headers: { + 'content-type': 'text/plain; charset=utf-8', + 'x-my-header': 'foo' + }, + body: Readable.from(sendBody) + }) + + response.body.on('data', chunk => { + body.push(chunk) + }) + + await once(response.body, 'end') + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-h2'], 'hello') + t.equal(Buffer.concat(body).toString('utf8'), `hello h2! ${sendBody}`) + } +}) + +test('Should support H2 connection (headers as array)', async t => { + const body = [] + const server = createSecureServer(pem) + + server.on('stream', (stream, headers) => { + t.equal(headers['x-my-header'], 'foo') + t.equal(headers['x-my-drink'], 'coffee,tea') + t.equal(headers[':method'], 'GET') + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': 'hello', + ':status': 200 + }) + stream.end('hello h2!') + }) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.plan(7) + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + path: '/', + method: 'GET', + headers: ['x-my-header', 'foo', 'x-my-drink', ['coffee', 'tea']] + }) + + response.body.on('data', chunk => { + body.push(chunk) + }) + + await once(response.body, 'end') + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-h2'], 'hello') + t.equal(Buffer.concat(body).toString('utf8'), 'hello h2!') +}) + +test('Should support H2 connection(POST Buffer)', async t => { + const server = createSecureServer({ ...pem, allowHTTP1: false }) + + server.on('stream', async (stream, headers, _flags, rawHeaders) => { + t.equal(headers[':method'], 'POST') + const reqData = [] + stream.on('data', chunk => reqData.push(chunk.toString())) + await once(stream, 'end') + t.equal(reqData.join(''), 'hello!') + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': 'hello', + ':status': 200 + }) + stream.end('hello h2!') + }) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.plan(6) + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const sendBody = 'hello!' + const body = [] + const response = await client.request({ + path: '/', + method: 'POST', + body: sendBody + }) + + response.body.on('data', chunk => { + body.push(chunk) + }) + + await once(response.body, 'end') + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-h2'], 'hello') + t.equal(Buffer.concat(body).toString('utf8'), 'hello h2!') +}) + +test('Should support H2 GOAWAY (server-side)', async t => { + const body = [] + const server = createSecureServer(pem) + + server.on('stream', (stream, headers) => { + t.equal(headers['x-my-header'], 'foo') + t.equal(headers[':method'], 'GET') + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': 'hello', + ':status': 200 + }) + stream.end('hello h2!') + }) + + server.on('session', session => { + setTimeout(() => { + session.goaway(204) + }, 1000) + }) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.plan(9) + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + }) + + response.body.on('data', chunk => { + body.push(chunk) + }) + + await once(response.body, 'end') + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-h2'], 'hello') + t.equal(Buffer.concat(body).toString('utf8'), 'hello h2!') + + const [url, disconnectClient, err] = await once(client, 'disconnect') + + t.type(url, URL) + t.same(disconnectClient, [client]) + t.equal(err.message, 'HTTP/2: "GOAWAY" frame received with code 204') +}) + +test('Should throw if bad allowH2 has been pased', async t => { + try { + // eslint-disable-next-line + new Client('https://localhost:1000', { + allowH2: 'true' + }) + t.fail() + } catch (error) { + t.equal(error.message, 'allowH2 must be a valid boolean value') + } +}) + +test('Should throw if bad maxConcurrentStreams has been pased', async t => { + try { + // eslint-disable-next-line + new Client('https://localhost:1000', { + allowH2: true, + maxConcurrentStreams: {} + }) + t.fail() + } catch (error) { + t.equal( + error.message, + 'maxConcurrentStreams must be a possitive integer, greater than 0' + ) + } + + try { + // eslint-disable-next-line + new Client('https://localhost:1000', { + allowH2: true, + maxConcurrentStreams: -1 + }) + t.fail() + } catch (error) { + t.equal( + error.message, + 'maxConcurrentStreams must be a possitive integer, greater than 0' + ) + } +}) + +test( + 'Request should fail if allowH2 is false and server advertises h1 only', + { skip: isGreaterThanv20 }, + async t => { + const server = createSecureServer( + { + ...pem, + allowHTTP1: false, + ALPNProtocols: ['http/1.1'] + }, + (req, res) => { + t.fail('Should not create a valid h2 stream') + } + ) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + allowH2: false, + connect: { + rejectUnauthorized: false + } + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + }) + + t.equal(response.statusCode, 403) + } +) + +test( + '[v20] Request should fail if allowH2 is false and server advertises h1 only', + { skip: !isGreaterThanv20 }, + async t => { + const server = createSecureServer( + { + ...pem, + allowHTTP1: false, + ALPNProtocols: ['http/1.1'] + }, + (req, res) => { + t.fail('Should not create a valid h2 stream') + } + ) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + allowH2: false, + connect: { + rejectUnauthorized: false + } + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + t.plan(2) + + try { + await client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + }) + } catch (error) { + t.equal( + error.message, + 'Client network socket disconnected before secure TLS connection was established' + ) + t.equal(error.code, 'ECONNRESET') + } + } +) + +test('Should handle h2 continue', async t => { + const requestBody = [] + const server = createSecureServer(pem, () => {}) + const responseBody = [] + + server.on('checkContinue', (request, response) => { + t.equal(request.headers.expect, '100-continue') + t.equal(request.headers['x-my-header'], 'foo') + t.equal(request.headers[':method'], 'POST') + response.writeContinue() + + request.on('data', chunk => requestBody.push(chunk)) + + response.writeHead(200, { + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': 'foo' + }) + response.end('hello h2!') + }) + + t.plan(7) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + expectContinue: true, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + path: '/', + method: 'POST', + headers: { + 'x-my-header': 'foo' + }, + expectContinue: true + }) + + response.body.on('data', chunk => { + responseBody.push(chunk) + }) + + await once(response.body, 'end') + + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-h2'], 'foo') + t.equal(Buffer.concat(responseBody).toString('utf-8'), 'hello h2!') +}) + +test('Dispatcher#Stream', t => { + const server = createSecureServer(pem) + const expectedBody = 'hello from client!' + const bufs = [] + let requestBody = '' + + server.on('stream', async (stream, headers) => { + stream.setEncoding('utf-8') + stream.on('data', chunk => { + requestBody += chunk + }) + + stream.respond({ ':status': 200, 'x-custom': 'custom-header' }) + stream.end('hello h2!') + }) + + t.plan(4) + + server.listen(0, async () => { + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + await client.stream( + { path: '/', opaque: { bufs }, method: 'POST', body: expectedBody }, + ({ statusCode, headers, opaque: { bufs } }) => { + t.equal(statusCode, 200) + t.equal(headers['x-custom'], 'custom-header') + + return new Writable({ + write (chunk, _encoding, cb) { + bufs.push(chunk) + cb() + } + }) + } + ) + + t.equal(Buffer.concat(bufs).toString('utf-8'), 'hello h2!') + t.equal(requestBody, expectedBody) + }) +}) + +test('Dispatcher#Pipeline', t => { + const server = createSecureServer(pem) + const expectedBody = 'hello from client!' + const bufs = [] + let requestBody = '' + + server.on('stream', async (stream, headers) => { + stream.setEncoding('utf-8') + stream.on('data', chunk => { + requestBody += chunk + }) + + stream.respond({ ':status': 200, 'x-custom': 'custom-header' }) + stream.end('hello h2!') + }) + + t.plan(5) + + server.listen(0, () => { + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + pipeline( + new Readable({ + read () { + this.push(Buffer.from(expectedBody)) + this.push(null) + } + }), + client.pipeline( + { path: '/', method: 'POST', body: expectedBody }, + ({ statusCode, headers, body }) => { + t.equal(statusCode, 200) + t.equal(headers['x-custom'], 'custom-header') + + return pipeline(body, new PassThrough(), () => {}) + } + ), + new Writable({ + write (chunk, _, cb) { + bufs.push(chunk) + cb() + } + }), + err => { + t.error(err) + t.equal(Buffer.concat(bufs).toString('utf-8'), 'hello h2!') + t.equal(requestBody, expectedBody) + } + ) + }) +}) + +test('Dispatcher#Connect', t => { + const server = createSecureServer(pem) + const expectedBody = 'hello from client!' + let requestBody = '' + + server.on('stream', async (stream, headers) => { + stream.setEncoding('utf-8') + stream.on('data', chunk => { + requestBody += chunk + }) + + stream.respond({ ':status': 200, 'x-custom': 'custom-header' }) + stream.end('hello h2!') + }) + + t.plan(6) + + server.listen(0, () => { + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + let result = '' + client.connect({ path: '/' }, (err, { socket }) => { + t.error(err) + socket.on('data', chunk => { + result += chunk + }) + socket.on('response', headers => { + t.equal(headers[':status'], 200) + t.equal(headers['x-custom'], 'custom-header') + t.notOk(socket.closed) + }) + + // We need to handle the error event although + // is not controlled by Undici, the fact that a session + // is destroyed and destroys subsequent streams, causes + // unhandled errors to surface if not handling this event. + socket.on('error', () => {}) + + socket.once('end', () => { + t.equal(requestBody, expectedBody) + t.equal(result, 'hello h2!') + }) + socket.end(expectedBody) + }) + }) +}) + +test('Dispatcher#Upgrade', t => { + const server = createSecureServer(pem) + + server.on('stream', async (stream, headers) => { + stream.end() + }) + + t.plan(1) + + server.listen(0, async () => { + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + try { + await client.upgrade({ path: '/' }) + } catch (error) { + t.equal(error.message, 'Upgrade not supported for H2') + } + }) +}) + +test('Dispatcher#destroy', async t => { + const promises = [] + const server = createSecureServer(pem) + + server.on('stream', (stream, headers) => { + setTimeout(stream.end.bind(stream), 1500) + }) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.plan(4) + t.teardown(server.close.bind(server)) + + promises.push( + client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + }) + ) + + promises.push( + client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + }) + ) + + promises.push( + client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + }) + ) + + promises.push( + client.request({ + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + }) + ) + + await client.destroy() + + const results = await Promise.allSettled(promises) + + t.equal(results[0].status, 'rejected') + t.equal(results[1].status, 'rejected') + t.equal(results[2].status, 'rejected') + t.equal(results[3].status, 'rejected') +}) + +test('Should handle h2 request with body (string or buffer) - dispatch', t => { + const server = createSecureServer(pem) + const expectedBody = 'hello from client!' + const response = [] + const requestBody = [] + + server.on('stream', async (stream, headers) => { + stream.on('data', chunk => requestBody.push(chunk)) + + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': headers['x-my-header'], + ':status': 200 + }) + + stream.end('hello h2!') + }) + + t.plan(7) + + server.listen(0, () => { + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + client.dispatch( + { + path: '/', + method: 'POST', + headers: { + 'x-my-header': 'foo', + 'content-type': 'text/plain' + }, + body: expectedBody + }, + { + onConnect () { + t.ok(true) + }, + onError (err) { + t.error(err) + }, + onHeaders (statusCode, headers) { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain; charset=utf-8') + t.equal(headers['x-custom-h2'], 'foo') + }, + onData (chunk) { + response.push(chunk) + }, + onBodySent (body) { + t.equal(body.toString('utf-8'), expectedBody) + }, + onComplete () { + t.equal(Buffer.concat(response).toString('utf-8'), 'hello h2!') + t.equal( + Buffer.concat(requestBody).toString('utf-8'), + 'hello from client!' + ) + } + } + ) + }) +}) + +test('Should handle h2 request with body (stream)', async t => { + const server = createSecureServer(pem) + const expectedBody = readFileSync(__filename, 'utf-8') + const stream = createReadStream(__filename) + const requestChunks = [] + const responseBody = [] + + server.on('stream', async (stream, headers) => { + t.equal(headers[':method'], 'PUT') + t.equal(headers[':path'], '/') + t.equal(headers[':scheme'], 'https') + + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': headers['x-my-header'], + ':status': 200 + }) + + for await (const chunk of stream) { + requestChunks.push(chunk) + } + + stream.end('hello h2!') + }) + + t.plan(8) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + path: '/', + method: 'PUT', + headers: { + 'x-my-header': 'foo' + }, + body: stream + }) + + for await (const chunk of response.body) { + responseBody.push(chunk) + } + + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-h2'], 'foo') + t.equal(Buffer.concat(responseBody).toString('utf-8'), 'hello h2!') + t.equal(Buffer.concat(requestChunks).toString('utf-8'), expectedBody) +}) + +test('Should handle h2 request with body (iterable)', async t => { + const server = createSecureServer(pem) + const expectedBody = 'hello' + const requestChunks = [] + const responseBody = [] + const iterableBody = { + [Symbol.iterator]: function * () { + const end = expectedBody.length - 1 + for (let i = 0; i < end + 1; i++) { + yield expectedBody[i] + } + + return expectedBody[end] + } + } + + server.on('stream', async (stream, headers) => { + t.equal(headers[':method'], 'POST') + t.equal(headers[':path'], '/') + t.equal(headers[':scheme'], 'https') + + stream.on('data', chunk => requestChunks.push(chunk)) + + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': headers['x-my-header'], + ':status': 200 + }) + + stream.end('hello h2!') + }) + + t.plan(8) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + path: '/', + method: 'POST', + headers: { + 'x-my-header': 'foo' + }, + body: iterableBody + }) + + response.body.on('data', chunk => { + responseBody.push(chunk) + }) + + await once(response.body, 'end') + + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-h2'], 'foo') + t.equal(Buffer.concat(responseBody).toString('utf-8'), 'hello h2!') + t.equal(Buffer.concat(requestChunks).toString('utf-8'), expectedBody) +}) + +test('Should handle h2 request with body (Blob)', { skip: !Blob }, async t => { + const server = createSecureServer(pem) + const expectedBody = 'asd' + const requestChunks = [] + const responseBody = [] + const body = new Blob(['asd'], { + type: 'application/json' + }) + + server.on('stream', async (stream, headers) => { + t.equal(headers[':method'], 'POST') + t.equal(headers[':path'], '/') + t.equal(headers[':scheme'], 'https') + + stream.on('data', chunk => requestChunks.push(chunk)) + + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': headers['x-my-header'], + ':status': 200 + }) + + stream.end('hello h2!') + }) + + t.plan(8) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + path: '/', + method: 'POST', + headers: { + 'x-my-header': 'foo' + }, + body + }) + + response.body.on('data', chunk => { + responseBody.push(chunk) + }) + + await once(response.body, 'end') + + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-h2'], 'foo') + t.equal(Buffer.concat(responseBody).toString('utf-8'), 'hello h2!') + t.equal(Buffer.concat(requestChunks).toString('utf-8'), expectedBody) +}) + +test( + 'Should handle h2 request with body (Blob:ArrayBuffer)', + { skip: !Blob }, + async t => { + const server = createSecureServer(pem) + const expectedBody = 'hello' + const requestChunks = [] + const responseBody = [] + const buf = Buffer.from(expectedBody) + const body = new ArrayBuffer(buf.byteLength) + + buf.copy(new Uint8Array(body)) + + server.on('stream', async (stream, headers) => { + t.equal(headers[':method'], 'POST') + t.equal(headers[':path'], '/') + t.equal(headers[':scheme'], 'https') + + stream.on('data', chunk => requestChunks.push(chunk)) + + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': headers['x-my-header'], + ':status': 200 + }) + + stream.end('hello h2!') + }) + + t.plan(8) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + path: '/', + method: 'POST', + headers: { + 'x-my-header': 'foo' + }, + body + }) + + response.body.on('data', chunk => { + responseBody.push(chunk) + }) + + await once(response.body, 'end') + + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-h2'], 'foo') + t.equal(Buffer.concat(responseBody).toString('utf-8'), 'hello h2!') + t.equal(Buffer.concat(requestChunks).toString('utf-8'), expectedBody) + } +) + +test('Agent should support H2 connection', async t => { + const body = [] + const server = createSecureServer(pem) + + server.on('stream', (stream, headers) => { + t.equal(headers['x-my-header'], 'foo') + t.equal(headers[':method'], 'GET') + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + 'x-custom-h2': 'hello', + ':status': 200 + }) + stream.end('hello h2!') + }) + + server.listen(0) + await once(server, 'listening') + + const client = new Agent({ + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.plan(6) + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + origin: `https://localhost:${server.address().port}`, + path: '/', + method: 'GET', + headers: { + 'x-my-header': 'foo' + } + }) + + response.body.on('data', chunk => { + body.push(chunk) + }) + + await once(response.body, 'end') + t.equal(response.statusCode, 200) + t.equal(response.headers['content-type'], 'text/plain; charset=utf-8') + t.equal(response.headers['x-custom-h2'], 'hello') + t.equal(Buffer.concat(body).toString('utf8'), 'hello h2!') +}) + +test( + 'Should provide pseudo-headers in proper order', + { skip: !hasPseudoHeadersOrderFix }, + async t => { + const server = createSecureServer(pem) + server.on('stream', (stream, _headers, _flags, rawHeaders) => { + t.same(rawHeaders, [ + ':authority', + `localhost:${server.address().port}`, + ':method', + 'GET', + ':path', + '/', + ':scheme', + 'https' + ]) + + stream.respond({ + 'content-type': 'text/plain; charset=utf-8', + ':status': 200 + }) + stream.end() + }) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + path: '/', + method: 'GET' + }) + + t.equal(response.statusCode, 200) + } +) + +test('The h2 pseudo-headers is not included in the headers', async t => { + const server = createSecureServer(pem) + + server.on('stream', (stream, headers) => { + stream.respond({ + ':status': 200 + }) + stream.end('hello h2!') + }) + + server.listen(0) + await once(server, 'listening') + + const client = new Client(`https://localhost:${server.address().port}`, { + connect: { + rejectUnauthorized: false + }, + allowH2: true + }) + + t.plan(2) + t.teardown(server.close.bind(server)) + t.teardown(client.close.bind(client)) + + const response = await client.request({ + path: '/', + method: 'GET' + }) + + await response.body.text() + + t.equal(response.statusCode, 200) + t.equal(response.headers[':status'], undefined) +}) diff --git a/test/https.js b/test/https.js new file mode 100644 index 0000000..1ba492c --- /dev/null +++ b/test/https.js @@ -0,0 +1,74 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('https') +const pem = require('https-pem') + +test('https get with tls opts', (t) => { + t.plan(6) + + const server = createServer(pem, (req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`https://localhost:${server.address().port}`, { + tls: { + rejectUnauthorized: false + } + }) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('https get with tls opts ip', (t) => { + t.plan(6) + + const server = createServer(pem, (req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`https://127.0.0.1:${server.address().port}`, { + tls: { + rejectUnauthorized: false + } + }) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) diff --git a/test/imports/undici-import.ts b/test/imports/undici-import.ts new file mode 100644 index 0000000..fb7344e --- /dev/null +++ b/test/imports/undici-import.ts @@ -0,0 +1,5 @@ +import { request } from '../../' + +async function exampleCode() { + await request('http://localhost:3000/foo') +} diff --git a/test/inflight-and-close.js b/test/inflight-and-close.js new file mode 100644 index 0000000..49fbb10 --- /dev/null +++ b/test/inflight-and-close.js @@ -0,0 +1,31 @@ +'use strict' + +const t = require('tap') +const { request } = require('..') +const http = require('http') + +const server = http.createServer((req, res) => { + res.writeHead(200) + res.end('Response body') + res.socket.end() // Close the connection immediately with every response +}).listen(0, '127.0.0.1', function () { + const url = `http://127.0.0.1:${this.address().port}` + request(url) + .then(({ statusCode, headers, body }) => { + t.pass('first response') + body.resume() + body.on('close', function () { + t.pass('first body closed') + }) + return request(url) + .then(({ statusCode, headers, body }) => { + t.pass('second response') + body.resume() + body.on('close', function () { + server.close() + }) + }) + }).catch((err) => { + t.error(err) + }) +}) diff --git a/test/invalid-headers.js b/test/invalid-headers.js new file mode 100644 index 0000000..caf9e0a --- /dev/null +++ b/test/invalid-headers.js @@ -0,0 +1,108 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') + +test('invalid headers', (t) => { + t.plan(10) + + const client = new Client('http://localhost:3000') + t.teardown(client.destroy.bind(client)) + client.request({ + path: '/', + method: 'GET', + headers: { + 'content-length': 'asd' + } + }, (err, data) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.request({ + path: '/', + method: 'GET', + headers: 1 + }, (err, data) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'transfer-encoding': 'chunked' + } + }, (err, data) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + upgrade: 'asd' + } + }, (err, data) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + connection: 'asd' + } + }, (err, data) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'keep-alive': 'timeout=5' + } + }, (err, data) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + foo: {} + } + }, (err, data) => { + t.type(err, errors.InvalidArgumentError) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + expect: '100-continue' + } + }, (err, data) => { + t.type(err, errors.NotSupportedError) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + Expect: '100-continue' + } + }, (err, data) => { + t.type(err, errors.NotSupportedError) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + expect: 'asd' + } + }, (err, data) => { + t.type(err, errors.NotSupportedError) + }) +}) diff --git a/test/issue-1670.js b/test/issue-1670.js new file mode 100644 index 0000000..c27bdb2 --- /dev/null +++ b/test/issue-1670.js @@ -0,0 +1,12 @@ +'use strict' + +const { test } = require('tap') +const { request } = require('..') + +test('https://github.com/mcollina/undici/issues/810', async (t) => { + const { body } = await request('https://api.github.com/user/emails') + + await body.text() + + t.end() +}) diff --git a/test/issue-1903.js b/test/issue-1903.js new file mode 100644 index 0000000..76ac81e --- /dev/null +++ b/test/issue-1903.js @@ -0,0 +1,78 @@ +'use strict' + +const { createServer } = require('http') +const { test } = require('tap') +const { request } = require('..') +const { nodeMajor } = require('../lib/core/util') + +function createPromise () { + const result = {} + result.promise = new Promise((resolve) => { + result.resolve = resolve + }) + return result +} + +test('should parse content-disposition consistently', { skip: nodeMajor >= 18 }, async (t) => { + t.plan(5) + + // create promise to allow server spinup in parallel + const spinup1 = createPromise() + const spinup2 = createPromise() + const spinup3 = createPromise() + + // variables to store content-disposition header + const header = [] + + const server = createServer((req, res) => { + res.writeHead(200, { + 'content-length': 2, + 'content-disposition': "attachment; filename='Ã¥r.pdf'" + }) + header.push("attachment; filename='Ã¥r.pdf'") + res.end('OK', spinup1.resolve) + }) + t.teardown(server.close.bind(server)) + server.listen(0, spinup1.resolve) + + const proxy1 = createServer(async (req, res) => { + const { statusCode, headers, body } = await request(`http://localhost:${server.address().port}`, { + method: 'GET' + }) + header.push(headers['content-disposition']) + delete headers['transfer-encoding'] + res.writeHead(statusCode, headers) + body.pipe(res) + }) + t.teardown(proxy1.close.bind(proxy1)) + proxy1.listen(0, spinup2.resolve) + + const proxy2 = createServer(async (req, res) => { + const { statusCode, headers, body } = await request(`http://localhost:${proxy1.address().port}`, { + method: 'GET' + }) + header.push(headers['content-disposition']) + delete headers['transfer-encoding'] + res.writeHead(statusCode, headers) + body.pipe(res) + }) + t.teardown(proxy2.close.bind(proxy2)) + proxy2.listen(0, spinup3.resolve) + + // wait until all server spinup + await Promise.all([spinup1.promise, spinup2.promise, spinup3.promise]) + + const { statusCode, headers, body } = await request(`http://localhost:${proxy2.address().port}`, { + method: 'GET' + }) + header.push(headers['content-disposition']) + t.equal(statusCode, 200) + t.equal(await body.text(), 'OK') + + // we check header + // must not be the same in first proxy + t.notSame(header[0], header[1]) + // chaining always the same value + t.equal(header[1], header[2]) + t.equal(header[2], header[3]) +}) diff --git a/test/issue-2065.js b/test/issue-2065.js new file mode 100644 index 0000000..cc288c4 --- /dev/null +++ b/test/issue-2065.js @@ -0,0 +1,71 @@ +'use strict' + +const { test, skip } = require('tap') +const { nodeMajor, nodeMinor } = require('../lib/core/util') +const { createServer } = require('http') +const { once } = require('events') +const { createReadStream } = require('fs') +const { File, FormData, request } = require('..') + +if (nodeMajor < 16 || (nodeMajor === 16 && nodeMinor < 8)) { + skip('FormData is not available in node < v16.8.0') + process.exit() +} + +test('undici.request with a FormData body should set content-length header', async (t) => { + const server = createServer((req, res) => { + t.ok(req.headers['content-length']) + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + const body = new FormData() + body.set('file', new File(['abc'], 'abc.txt')) + + await request(`http://localhost:${server.address().port}`, { + method: 'POST', + body + }) +}) + +test('undici.request with a FormData stream value should set transfer-encoding header', async (t) => { + const server = createServer((req, res) => { + t.equal(req.headers['transfer-encoding'], 'chunked') + res.end() + }).listen(0) + + t.teardown(server.close.bind(server)) + await once(server, 'listening') + + class BlobFromStream { + #stream + #type + constructor (stream, type) { + this.#stream = stream + this.#type = type + } + + stream () { + return this.#stream + } + + get type () { + return this.#type + } + + get [Symbol.toStringTag] () { + return 'Blob' + } + } + + const body = new FormData() + const fileReadable = createReadStream(__filename) + body.set('file', new BlobFromStream(fileReadable, '.js'), 'streamfile') + + await request(`http://localhost:${server.address().port}`, { + method: 'POST', + body + }) +}) diff --git a/test/issue-2078.js b/test/issue-2078.js new file mode 100644 index 0000000..d3aa868 --- /dev/null +++ b/test/issue-2078.js @@ -0,0 +1,30 @@ +'use strict' + +const { test, skip } = require('tap') +const { nodeMajor, nodeMinor } = require('../lib/core/util') +const { MockAgent, getGlobalDispatcher, setGlobalDispatcher, fetch } = require('..') + +if (nodeMajor < 16 || (nodeMajor === 16 && nodeMinor < 8)) { + skip('fetch is not supported in node < v16.8.0') + process.exit() +} + +test('MockPool.reply headers are an object, not an array - issue #2078', async (t) => { + const global = getGlobalDispatcher() + const mockAgent = new MockAgent() + const mockPool = mockAgent.get('http://localhost') + + t.teardown(() => setGlobalDispatcher(global)) + setGlobalDispatcher(mockAgent) + + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply((options) => { + t.ok(!Array.isArray(options.headers)) + + return { statusCode: 200 } + }) + + await t.resolves(fetch('http://localhost/foo')) +}) diff --git a/test/issue-2349.js b/test/issue-2349.js new file mode 100644 index 0000000..a82bb74 --- /dev/null +++ b/test/issue-2349.js @@ -0,0 +1,53 @@ +'use strict' + +const { test, skip } = require('tap') +const { nodeMajor } = require('../lib/core/util') +const { Writable } = require('stream') +const { MockAgent, errors, stream } = require('..') + +if (nodeMajor < 16) { + skip('only for node 16') + process.exit(0) +} + +test('stream() does not fail after request has been aborted', async (t) => { + t.plan(1) + + const mockAgent = new MockAgent() + + mockAgent.disableNetConnect() + mockAgent + .get('http://localhost:3333') + .intercept({ + path: '/' + }) + .reply(200, 'ok') + .delay(10) + + const parts = [] + const ac = new AbortController() + + setTimeout(() => ac.abort('nevermind'), 5) + + try { + await stream( + 'http://localhost:3333/', + { + opaque: { parts }, + signal: ac.signal, + dispatcher: mockAgent + }, + ({ opaque: { parts } }) => { + return new Writable({ + write (chunk, _encoding, callback) { + parts.push(chunk) + callback() + } + }) + } + ) + } catch (error) { + console.log(error) + t.equal(error instanceof errors.RequestAbortedError, true) + } +}) diff --git a/test/issue-803.js b/test/issue-803.js new file mode 100644 index 0000000..70f64cc --- /dev/null +++ b/test/issue-803.js @@ -0,0 +1,47 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const EE = require('events') + +test('https://github.com/nodejs/undici/issues/803', (t) => { + t.plan(2) + + const SIZE = 5900373096 + + const server = createServer(async (req, res) => { + res.setHeader('content-length', SIZE) + let pos = 0 + while (pos < SIZE) { + const len = Math.min(SIZE - pos, 65536) + if (!res.write(Buffer.allocUnsafe(len))) { + await EE.once(res, 'drain') + } + pos += len + } + + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + + let pos = 0 + data.body.on('data', (buf) => { + pos += buf.length + }) + data.body.on('end', () => { + t.equal(pos, SIZE) + }) + }) + }) +}) diff --git a/test/issue-810.js b/test/issue-810.js new file mode 100644 index 0000000..226a5aa --- /dev/null +++ b/test/issue-810.js @@ -0,0 +1,135 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const net = require('net') + +test('https://github.com/mcollina/undici/issues/810', (t) => { + t.plan(3) + + let x = 0 + const server = net.createServer(socket => { + if (x++ === 0) { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 1\r\n\r\n') + socket.write('11111\r\n') + } else { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 0\r\n\r\n') + socket.write('\r\n') + } + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { pipelining: 2 }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume().on('end', () => { + // t.fail() FIX: Should fail. + t.pass() + }).on('error', err => ( + t.type(err, errors.HTTPParserError) + )) + }) + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.type(err, errors.HTTPParserError) + }) + }) +}) + +test('https://github.com/mcollina/undici/issues/810 no pipelining', (t) => { + t.plan(2) + + const server = net.createServer(socket => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 1\r\n\r\n') + socket.write('11111\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume().on('end', () => { + // t.fail() FIX: Should fail. + t.pass() + }) + }) + }) +}) + +test('https://github.com/mcollina/undici/issues/810 pipelining', (t) => { + t.plan(2) + + const server = net.createServer(socket => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 1\r\n\r\n') + socket.write('11111\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { pipelining: true }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume().on('end', () => { + // t.fail() FIX: Should fail. + t.pass() + }) + }) + }) +}) + +test('https://github.com/mcollina/undici/issues/810 pipelining 2', (t) => { + t.plan(4) + + const server = net.createServer(socket => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Content-Length: 1\r\n\r\n') + socket.write('11111\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { pipelining: true }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.resume().on('end', () => { + // t.fail() FIX: Should fail. + t.pass() + }) + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.equal(err.code, 'HPE_INVALID_CONSTANT') + t.type(err, errors.HTTPParserError) + }) + }) +}) diff --git a/test/jest/instanceof-error.test.js b/test/jest/instanceof-error.test.js new file mode 100644 index 0000000..8bb36d2 --- /dev/null +++ b/test/jest/instanceof-error.test.js @@ -0,0 +1,44 @@ +'use strict' + +const { createServer } = require('http') +const { once } = require('events') + +/* global expect, it, jest, AbortController */ + +// https://github.com/facebook/jest/issues/11607#issuecomment-899068995 +jest.useRealTimers() + +const runIf = (condition) => condition ? it : it.skip +const nodeMajor = Number(process.versions.node.split('.', 1)[0]) + +runIf(nodeMajor >= 16)('isErrorLike sanity check', () => { + const { isErrorLike } = require('../../lib/fetch/util') + const { DOMException } = require('../../lib/fetch/constants') + const error = new DOMException('') + + // https://github.com/facebook/jest/issues/2549 + expect(error instanceof Error).toBeFalsy() + expect(isErrorLike(error)).toBeTruthy() +}) + +runIf(nodeMajor >= 16)('Real use-case', async () => { + const { fetch } = require('../..') + + const ac = new AbortController() + ac.abort() + + const server = createServer((req, res) => { + res.end() + }).listen(0) + + await once(server, 'listening') + + const promise = fetch(`https://localhost:${server.address().port}`, { + signal: ac.signal + }) + + await expect(promise).rejects.toThrowError(/^Th(e|is) operation was aborted\.?$/) + + server.close() + await once(server, 'close') +}) diff --git a/test/jest/interceptor.test.js b/test/jest/interceptor.test.js new file mode 100644 index 0000000..73d70b7 --- /dev/null +++ b/test/jest/interceptor.test.js @@ -0,0 +1,197 @@ +'use strict' + +const { createServer } = require('http') +const { Agent, request } = require('../../index') +const DecoratorHandler = require('../../lib/handler/DecoratorHandler') +/* global expect */ + +const defaultOpts = { keepAliveTimeout: 10, keepAliveMaxTimeout: 10 } + +describe('interceptors', () => { + let server + beforeEach(async () => { + server = createServer((req, res) => { + res.setHeader('Content-Type', 'text/plain') + res.end('hello') + }) + await new Promise((resolve) => { server.listen(0, resolve) }) + }) + afterEach(async () => { + await new Promise((resolve) => server.close(resolve)) + }) + + test('interceptors are applied on client from an agent', async () => { + const interceptors = [] + const buildInterceptor = dispatch => { + const interceptorContext = { requestCount: 0 } + interceptors.push(interceptorContext) + return (opts, handler) => { + interceptorContext.requestCount++ + return dispatch(opts, handler) + } + } + + const opts = { interceptors: { Client: [buildInterceptor] }, ...defaultOpts } + const agent = new Agent(opts) + const origin = new URL(`http://localhost:${server.address().port}`) + await Promise.all([ + request(origin, { dispatcher: agent }), + request(origin, { dispatcher: agent }) + ]) + + // Assert that the requests are run on different interceptors (different Clients) + const requestCounts = interceptors.map(x => x.requestCount) + expect(requestCounts).toEqual([1, 1]) + }) + + test('interceptors are applied in the correct order', async () => { + const setHeaderInterceptor = (dispatch) => { + return (opts, handler) => { + opts.headers.push('foo', 'bar') + return dispatch(opts, handler) + } + } + + const assertHeaderInterceptor = (dispatch) => { + return (opts, handler) => { + expect(opts.headers).toEqual(['foo', 'bar']) + return dispatch(opts, handler) + } + } + + const opts = { interceptors: { Pool: [setHeaderInterceptor, assertHeaderInterceptor] }, ...defaultOpts } + const agent = new Agent(opts) + const origin = new URL(`http://localhost:${server.address().port}`) + await request(origin, { dispatcher: agent, headers: [] }) + }) + + test('interceptors handlers are called in reverse order', async () => { + const clearResponseHeadersInterceptor = (dispatch) => { + return (opts, handler) => { + class ResultInterceptor extends DecoratorHandler { + onHeaders (statusCode, headers, resume) { + return super.onHeaders(statusCode, [], resume) + } + } + + return dispatch(opts, new ResultInterceptor(handler)) + } + } + + const assertHeaderInterceptor = (dispatch) => { + return (opts, handler) => { + class ResultInterceptor extends DecoratorHandler { + onHeaders (statusCode, headers, resume) { + expect(headers).toEqual([]) + return super.onHeaders(statusCode, headers, resume) + } + } + + return dispatch(opts, new ResultInterceptor(handler)) + } + } + + const opts = { interceptors: { Agent: [assertHeaderInterceptor, clearResponseHeadersInterceptor] }, ...defaultOpts } + const agent = new Agent(opts) + const origin = new URL(`http://localhost:${server.address().port}`) + await request(origin, { dispatcher: agent, headers: [] }) + }) +}) + +describe('interceptors with NtlmRequestHandler', () => { + class FakeNtlmRequestHandler { + constructor (dispatch, opts, handler) { + this.dispatch = dispatch + this.opts = opts + this.handler = handler + this.requestCount = 0 + } + + onConnect (...args) { + return this.handler.onConnect(...args) + } + + onError (...args) { + return this.handler.onError(...args) + } + + onUpgrade (...args) { + return this.handler.onUpgrade(...args) + } + + onHeaders (statusCode, headers, resume, statusText) { + this.requestCount++ + if (this.requestCount < 2) { + // Do nothing + } else { + return this.handler.onHeaders(statusCode, headers, resume, statusText) + } + } + + onData (...args) { + if (this.requestCount < 2) { + // Do nothing + } else { + return this.handler.onData(...args) + } + } + + onComplete (...args) { + if (this.requestCount < 2) { + this.dispatch(this.opts, this) + } else { + return this.handler.onComplete(...args) + } + } + + onBodySent (...args) { + if (this.requestCount < 2) { + // Do nothing + } else { + return this.handler.onBodySent(...args) + } + } + } + let server + + beforeEach(async () => { + // This Test is important because NTLM and Negotiate require several + // http requests in sequence to run on the same keepAlive socket + + const socketRequestCountSymbol = Symbol('Socket Request Count') + server = createServer((req, res) => { + if (req.socket[socketRequestCountSymbol] === undefined) { + req.socket[socketRequestCountSymbol] = 0 + } + req.socket[socketRequestCountSymbol]++ + res.setHeader('Content-Type', 'text/plain') + + // Simulate NTLM/Negotiate logic, by returning 200 + // on the second request of each socket + if (req.socket[socketRequestCountSymbol] >= 2) { + res.statusCode = 200 + res.end() + } else { + res.statusCode = 401 + res.end() + } + }) + await new Promise((resolve) => { server.listen(0, resolve) }) + }) + afterEach(async () => { + await new Promise((resolve) => server.close(resolve)) + }) + + test('Retry interceptor on Client will use the same socket', async () => { + const interceptor = dispatch => { + return (opts, handler) => { + return dispatch(opts, new FakeNtlmRequestHandler(dispatch, opts, handler)) + } + } + const opts = { interceptors: { Client: [interceptor] }, ...defaultOpts } + const agent = new Agent(opts) + const origin = new URL(`http://localhost:${server.address().port}`) + const { statusCode } = await request(origin, { dispatcher: agent, headers: [] }) + expect(statusCode).toEqual(200) + }) +}) diff --git a/test/jest/issue-1757.test.js b/test/jest/issue-1757.test.js new file mode 100644 index 0000000..b6519d9 --- /dev/null +++ b/test/jest/issue-1757.test.js @@ -0,0 +1,61 @@ +'use strict' + +const { Dispatcher, setGlobalDispatcher, MockAgent } = require('../..') + +/* global expect, it */ + +class MiniflareDispatcher extends Dispatcher { + constructor (inner, options) { + super(options) + this.inner = inner + } + + dispatch (options, handler) { + return this.inner.dispatch(options, handler) + } + + close (...args) { + return this.inner.close(...args) + } + + destroy (...args) { + return this.inner.destroy(...args) + } +} + +const runIf = (condition) => condition ? it : it.skip +const nodeMajor = Number(process.versions.node.split('.', 1)[0]) + +runIf(nodeMajor >= 16)('https://github.com/nodejs/undici/issues/1757', async () => { + // fetch isn't exported in <16.8 + const { fetch } = require('../..') + + const mockAgent = new MockAgent() + const mockClient = mockAgent.get('http://localhost:3000') + mockAgent.disableNetConnect() + setGlobalDispatcher(new MiniflareDispatcher(mockAgent)) + + mockClient.intercept({ + path: () => true, + method: () => true + }).reply(200, async (opts) => { + if (opts.body?.[Symbol.asyncIterator]) { + const chunks = [] + for await (const chunk of opts.body) { + chunks.push(chunk) + } + + return Buffer.concat(chunks) + } + + return opts.body + }) + + const response = await fetch('http://localhost:3000', { + method: 'POST', + body: JSON.stringify({ foo: 'bar' }) + }) + + expect(response.json()).resolves.toMatchObject({ foo: 'bar' }) + expect(response.status).toBe(200) +}) diff --git a/test/jest/mock-agent.test.js b/test/jest/mock-agent.test.js new file mode 100644 index 0000000..6f6bac2 --- /dev/null +++ b/test/jest/mock-agent.test.js @@ -0,0 +1,46 @@ +'use strict' + +const { request, setGlobalDispatcher, MockAgent } = require('../..') +const { getResponse } = require('../../lib/mock/mock-utils') + +/* global describe, it, expect */ + +describe('MockAgent', () => { + let mockAgent + + afterEach(() => { + mockAgent.close() + }) + + it('should work in jest', async () => { + expect.assertions(4) + + const baseUrl = 'http://localhost:9999' + + mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + const mockClient = mockAgent.get(baseUrl) + + mockClient.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }).reply(200, { foo: 'bar' }, { + headers: { + 'content-type': 'application/json' + }, + trailers: { 'Content-MD5': 'test' } + }) + + const { statusCode, headers, trailers, body } = await request(`${baseUrl}/foo?hello=there&see=ya`, { + method: 'POST', + body: 'form1=data1&form2=data2' + }) + expect(statusCode).toBe(200) + expect(headers).toEqual({ 'content-type': 'application/json' }) + expect(trailers).toEqual({ 'content-md5': 'test' }) + + const jsonResponse = JSON.parse(await getResponse(body)) + expect(jsonResponse).toEqual({ foo: 'bar' }) + }) +}) diff --git a/test/jest/mock-scope.test.js b/test/jest/mock-scope.test.js new file mode 100644 index 0000000..cab77f6 --- /dev/null +++ b/test/jest/mock-scope.test.js @@ -0,0 +1,32 @@ +const { MockAgent, setGlobalDispatcher, request } = require('../../index') + +/* global afterAll, expect, it, AbortController */ + +const runIf = (condition) => condition ? it : it.skip + +const nodeMajor = Number(process.versions.node.split('.', 1)[0]) +const mockAgent = new MockAgent() + +afterAll(async () => { + await mockAgent.close() +}) + +runIf(nodeMajor >= 16)('Jest works with MockScope.delay - issue #1327', async () => { + mockAgent.disableNetConnect() + setGlobalDispatcher(mockAgent) + + const mockPool = mockAgent.get('http://localhost:3333') + + mockPool.intercept({ + path: '/jest-bugs', + method: 'GET' + }).reply(200, 'Hello').delay(100) + + const ac = new AbortController() + setTimeout(() => ac.abort(), 5) + const promise = request('http://localhost:3333/jest-bugs', { + signal: ac.signal + }) + + await expect(promise).rejects.toThrowError('Request aborted') +}, 1000) diff --git a/test/jest/test.js b/test/jest/test.js new file mode 100644 index 0000000..079a41f --- /dev/null +++ b/test/jest/test.js @@ -0,0 +1,36 @@ +'use strict' + +const { Client } = require('../..') +const { createServer } = require('http') +/* global test, expect */ + +test('should work in jest', async () => { + const server = createServer((req, res) => { + expect(req.url).toBe('/') + expect(req.method).toBe('POST') + expect(req.headers.host).toBe(`localhost:${server.address().port}`) + res.setHeader('Content-Type', 'text/plain') + res.end('hello') + }) + await expect(new Promise((resolve, reject) => { + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + client.request({ + path: '/', + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: '{}' + }, (err, result) => { + server.close() + client.close() + if (err) { + reject(err) + } else { + resolve(result.body.text()) + } + }) + }) + })).resolves.toBe('hello') +}) diff --git a/test/max-headers.js b/test/max-headers.js new file mode 100644 index 0000000..a08b931 --- /dev/null +++ b/test/max-headers.js @@ -0,0 +1,41 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') + +test('handle a lot of headers', (t) => { + t.plan(3) + + const headers = {} + for (let n = 0; n < 64; ++n) { + headers[n] = String(n) + } + + const server = createServer((req, res) => { + res.writeHead(200, headers) + res.end() + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + const headers2 = {} + for (let n = 0; n < 64; ++n) { + headers2[n] = data.headers[n] + } + t.strictSame(headers2, headers) + data.body + .resume() + .on('end', () => { + t.pass() + }) + }) + }) +}) diff --git a/test/max-response-size.js b/test/max-response-size.js new file mode 100644 index 0000000..75bfade --- /dev/null +++ b/test/max-response-size.js @@ -0,0 +1,105 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const { createServer } = require('http') + +test('max response size', (t) => { + t.plan(4) + + t.test('default max default size should allow all responses', (t) => { + t.plan(3) + + const server = createServer() + t.teardown(server.close.bind(server)) + + server.on('request', (req, res) => { + res.end('hello') + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { maxResponseSize: -1 }) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, body }) => { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) + }) + + t.test('max response size set to zero should allow only empty responses', (t) => { + t.plan(3) + + const server = createServer() + t.teardown(server.close.bind(server)) + + server.on('request', (req, res) => { + res.end() + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { maxResponseSize: 0 }) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, body }) => { + t.error(err) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) + }) + + t.test('should throw an error if the response is too big', (t) => { + t.plan(3) + + const server = createServer() + t.teardown(server.close.bind(server)) + + server.on('request', (req, res) => { + res.end('hello') + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + maxResponseSize: 1 + }) + + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { body }) => { + t.error(err) + body.on('error', (err) => { + t.ok(err) + t.type(err, errors.ResponseExceededMaxSizeError) + }) + }) + }) + }) + + t.test('invalid max response size should throw an error', (t) => { + t.plan(2) + + t.throws(() => { + // eslint-disable-next-line no-new + new Client('http://localhost:3000', { maxResponseSize: 'hello' }) + }, 'maxResponseSize must be a number') + t.throws(() => { + // eslint-disable-next-line no-new + new Client('http://localhost:3000', { maxResponseSize: -2 }) + }, 'maxResponseSize must be greater than or equal to -1') + }) +}) diff --git a/test/mock-agent.js b/test/mock-agent.js new file mode 100644 index 0000000..c9ffda4 --- /dev/null +++ b/test/mock-agent.js @@ -0,0 +1,2637 @@ +'use strict' + +const { test } = require('tap') +const { createServer } = require('http') +const { promisify } = require('util') +const { request, setGlobalDispatcher, MockAgent, Agent } = require('..') +const { getResponse } = require('../lib/mock/mock-utils') +const { kClients, kConnected } = require('../lib/core/symbols') +const { InvalidArgumentError, ClientDestroyedError } = require('../lib/core/errors') +const { nodeMajor } = require('../lib/core/util') +const MockClient = require('../lib/mock/mock-client') +const MockPool = require('../lib/mock/mock-pool') +const { kAgent } = require('../lib/mock/mock-symbols') +const Dispatcher = require('../lib/dispatcher') +const { MockNotMatchedError } = require('../lib/mock/mock-errors') + +test('MockAgent - constructor', t => { + t.plan(5) + + t.test('sets up mock agent', t => { + t.plan(1) + t.doesNotThrow(() => new MockAgent()) + }) + + t.test('should implement the Dispatcher API', t => { + t.plan(1) + + const mockAgent = new MockAgent() + t.type(mockAgent, Dispatcher) + }) + + t.test('sets up mock agent with single connection', t => { + t.plan(1) + t.doesNotThrow(() => new MockAgent({ connections: 1 })) + }) + + t.test('should error passed agent is not valid', t => { + t.plan(2) + t.throws(() => new MockAgent({ agent: {} }), new InvalidArgumentError('Argument opts.agent must implement Agent')) + t.throws(() => new MockAgent({ agent: { dispatch: '' } }), new InvalidArgumentError('Argument opts.agent must implement Agent')) + }) + + t.test('should be able to specify the agent to mock', t => { + t.plan(1) + const agent = new Agent() + t.teardown(agent.close.bind(agent)) + const mockAgent = new MockAgent({ agent }) + + t.equal(mockAgent[kAgent], agent) + }) +}) + +test('MockAgent - get', t => { + t.plan(3) + + t.test('should return MockClient', (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + t.type(mockClient, MockClient) + }) + + t.test('should return MockPool', (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + t.type(mockPool, MockPool) + }) + + t.test('should return the same instance if already created', (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool1 = mockAgent.get(baseUrl) + const mockPool2 = mockAgent.get(baseUrl) + t.equal(mockPool1, mockPool2) + }) +}) + +test('MockAgent - dispatch', t => { + t.plan(3) + + t.test('should call the dispatch method of the MockPool', (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'hello') + + t.doesNotThrow(() => mockAgent.dispatch({ + origin: baseUrl, + path: '/foo', + method: 'GET' + }, { + onHeaders: (_statusCode, _headers, resume) => resume(), + onData: () => {}, + onComplete: () => {}, + onError: () => {} + })) + }) + + t.test('should call the dispatch method of the MockClient', (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + + mockClient.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'hello') + + t.doesNotThrow(() => mockAgent.dispatch({ + origin: baseUrl, + path: '/foo', + method: 'GET' + }, { + onHeaders: (_statusCode, _headers, resume) => resume(), + onData: () => {}, + onComplete: () => {}, + onError: () => {} + })) + }) + + t.test('should throw if handler is not valid on redirect', (t) => { + t.plan(7) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + t.throws(() => mockAgent.dispatch({ + origin: baseUrl, + path: '/foo', + method: 'GET' + }, { + onError: 'INVALID' + }), new InvalidArgumentError('invalid onError method')) + + t.throws(() => mockAgent.dispatch({ + origin: baseUrl, + path: '/foo', + method: 'GET' + }, { + onError: (err) => { throw err }, + onConnect: 'INVALID' + }), new InvalidArgumentError('invalid onConnect method')) + + t.throws(() => mockAgent.dispatch({ + origin: baseUrl, + path: '/foo', + method: 'GET' + }, { + onError: (err) => { throw err }, + onConnect: () => {}, + onBodySent: 'INVALID' + }), new InvalidArgumentError('invalid onBodySent method')) + + t.throws(() => mockAgent.dispatch({ + origin: baseUrl, + path: '/foo', + method: 'CONNECT' + }, { + onError: (err) => { throw err }, + onConnect: () => {}, + onBodySent: () => {}, + onUpgrade: 'INVALID' + }), new InvalidArgumentError('invalid onUpgrade method')) + + t.throws(() => mockAgent.dispatch({ + origin: baseUrl, + path: '/foo', + method: 'GET' + }, { + onError: (err) => { throw err }, + onConnect: () => {}, + onBodySent: () => {}, + onHeaders: 'INVALID' + }), new InvalidArgumentError('invalid onHeaders method')) + + t.throws(() => mockAgent.dispatch({ + origin: baseUrl, + path: '/foo', + method: 'GET' + }, { + onError: (err) => { throw err }, + onConnect: () => {}, + onBodySent: () => {}, + onHeaders: () => {}, + onData: 'INVALID' + }), new InvalidArgumentError('invalid onData method')) + + t.throws(() => mockAgent.dispatch({ + origin: baseUrl, + path: '/foo', + method: 'GET' + }, { + onError: (err) => { throw err }, + onConnect: () => {}, + onBodySent: () => {}, + onHeaders: () => {}, + onData: () => {}, + onComplete: 'INVALID' + }), new InvalidArgumentError('invalid onComplete method')) + }) +}) + +test('MockAgent - .close should clean up registered pools', async (t) => { + t.plan(5) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent() + + // Register a pool + const mockPool = mockAgent.get(baseUrl) + t.type(mockPool, MockPool) + + t.equal(mockPool[kConnected], 1) + t.equal(mockAgent[kClients].size, 1) + await mockAgent.close() + t.equal(mockPool[kConnected], 0) + t.equal(mockAgent[kClients].size, 0) +}) + +test('MockAgent - .close should clean up registered clients', async (t) => { + t.plan(5) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent({ connections: 1 }) + + // Register a pool + const mockClient = mockAgent.get(baseUrl) + t.type(mockClient, MockClient) + + t.equal(mockClient[kConnected], 1) + t.equal(mockAgent[kClients].size, 1) + await mockAgent.close() + t.equal(mockClient[kConnected], 0) + t.equal(mockAgent[kClients].size, 0) +}) + +test('MockAgent - [kClients] should match encapsulated agent', async (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const agent = new Agent() + t.teardown(agent.close.bind(agent)) + + const mockAgent = new MockAgent({ agent }) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'hello') + + // The MockAgent should encapsulate the input agent clients + t.equal(mockAgent[kClients].size, agent[kClients].size) +}) + +test('MockAgent - basic intercept with MockAgent.request', async (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + const mockPool = mockAgent.get(baseUrl) + + mockPool.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }).reply(200, { foo: 'bar' }, { + headers: { 'content-type': 'application/json' }, + trailers: { 'Content-MD5': 'test' } + }) + + const { statusCode, headers, trailers, body } = await mockAgent.request({ + origin: baseUrl, + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + t.same(trailers, { 'content-md5': 'test' }) + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + foo: 'bar' + }) +}) + +test('MockAgent - basic intercept with request', async (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + const mockPool = mockAgent.get(baseUrl) + + mockPool.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }).reply(200, { foo: 'bar' }, { + headers: { 'content-type': 'application/json' }, + trailers: { 'Content-MD5': 'test' } + }) + + const { statusCode, headers, trailers, body } = await request(`${baseUrl}/foo?hello=there&see=ya`, { + method: 'POST', + body: 'form1=data1&form2=data2' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + t.same(trailers, { 'content-md5': 'test' }) + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + foo: 'bar' + }) +}) + +test('MockAgent - should support local agents', async (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + + t.teardown(mockAgent.close.bind(mockAgent)) + const mockPool = mockAgent.get(baseUrl) + + mockPool.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }).reply(200, { foo: 'bar' }, { + headers: { + 'content-type': 'application/json' + }, + trailers: { 'Content-MD5': 'test' } + }) + + const { statusCode, headers, trailers, body } = await request(`${baseUrl}/foo?hello=there&see=ya`, { + method: 'POST', + body: 'form1=data1&form2=data2', + dispatcher: mockAgent + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + t.same(trailers, { 'content-md5': 'test' }) + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + foo: 'bar' + }) +}) + +test('MockAgent - should support specifying custom agents to mock', async (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const agent = new Agent() + t.teardown(agent.close.bind(agent)) + + const mockAgent = new MockAgent({ agent }) + setGlobalDispatcher(mockAgent) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }).reply(200, { foo: 'bar' }, { + headers: { + 'content-type': 'application/json' + }, + trailers: { 'Content-MD5': 'test' } + }) + + const { statusCode, headers, trailers, body } = await request(`${baseUrl}/foo?hello=there&see=ya`, { + method: 'POST', + body: 'form1=data1&form2=data2' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + t.same(trailers, { 'content-md5': 'test' }) + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + foo: 'bar' + }) +}) + +test('MockAgent - basic Client intercept with request', async (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent({ connections: 1 }) + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + mockClient.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }).reply(200, { foo: 'bar' }, { + headers: { + 'content-type': 'application/json' + }, + trailers: { 'Content-MD5': 'test' } + }) + + const { statusCode, headers, trailers, body } = await request(`${baseUrl}/foo?hello=there&see=ya`, { + method: 'POST', + body: 'form1=data1&form2=data2' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + t.same(trailers, { 'content-md5': 'test' }) + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + foo: 'bar' + }) +}) + +test('MockAgent - basic intercept with multiple pools', async (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + const mockPool1 = mockAgent.get(baseUrl) + const mockPool2 = mockAgent.get('http://localhost:9999') + + mockPool1.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }).reply(200, { foo: 'bar-1' }, { + headers: { + 'content-type': 'application/json' + }, + trailers: { 'Content-MD5': 'test' } + }) + + mockPool2.intercept({ + path: '/foo?hello=there&see=ya', + method: 'GET', + body: 'form1=data1&form2=data2' + }).reply(200, { foo: 'bar-2' }) + + const { statusCode, headers, trailers, body } = await request(`${baseUrl}/foo?hello=there&see=ya`, { + method: 'POST', + body: 'form1=data1&form2=data2' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + t.same(trailers, { 'content-md5': 'test' }) + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + foo: 'bar-1' + }) +}) + +test('MockAgent - should handle multiple responses for an interceptor', async (t) => { + t.plan(6) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + + const interceptor = mockPool.intercept({ + path: '/foo', + method: 'POST' + }) + interceptor.reply(200, { foo: 'bar' }, { + headers: { + 'content-type': 'application/json' + } + }) + interceptor.reply(200, { hello: 'there' }, { + headers: { + 'content-type': 'application/json' + } + }) + + { + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'POST' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + foo: 'bar' + }) + } + + { + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'POST' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + hello: 'there' + }) + } +}) + +test('MockAgent - should call original Pool dispatch if request not found', async (t) => { + t.plan(5) + + const server = createServer((req, res) => { + t.equal(req.url, '/foo') + t.equal(req.method, 'GET') + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + const response = await getResponse(body) + t.equal(response, 'hello') +}) + +test('MockAgent - should call original Client dispatch if request not found', async (t) => { + t.plan(5) + + const server = createServer((req, res) => { + t.equal(req.url, '/foo') + t.equal(req.method, 'GET') + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent({ connections: 1 }) + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + const response = await getResponse(body) + t.equal(response, 'hello') +}) + +test('MockAgent - should handle string responses', async (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'POST' + }).reply(200, 'hello') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'POST' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'hello') +}) + +test('MockAgent - should handle basic concurrency for requests', { jobs: 5 }, async (t) => { + t.plan(5) + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + await Promise.all([...Array(5).keys()].map(idx => + t.test(`concurrent job (${idx})`, async (innerTest) => { + innerTest.plan(2) + + const baseUrl = 'http://localhost:9999' + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'POST' + }).reply(200, { foo: `bar ${idx}` }) + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'POST' + }) + innerTest.equal(statusCode, 200) + + const jsonResponse = JSON.parse(await getResponse(body)) + innerTest.same(jsonResponse, { + foo: `bar ${idx}` + }) + }) + )) +}) + +test('MockAgent - handle delays to simulate work', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'POST' + }).reply(200, 'hello').delay(50) + + const start = process.hrtime() + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'POST' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'hello') + const elapsedInMs = process.hrtime(start)[1] / 1e6 + t.ok(elapsedInMs >= 50, `Elapsed time is not greater than 50ms: ${elapsedInMs}`) +}) + +test('MockAgent - should persist requests', async (t) => { + t.plan(8) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }).reply(200, { foo: 'bar' }, { + headers: { + 'content-type': 'application/json' + }, + trailers: { 'Content-MD5': 'test' } + }).persist() + + { + const { statusCode, headers, trailers, body } = await request(`${baseUrl}/foo?hello=there&see=ya`, { + method: 'POST', + body: 'form1=data1&form2=data2' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + t.same(trailers, { 'content-md5': 'test' }) + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + foo: 'bar' + }) + } + + { + const { statusCode, headers, trailers, body } = await request(`${baseUrl}/foo?hello=there&see=ya`, { + method: 'POST', + body: 'form1=data1&form2=data2' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + t.same(trailers, { 'content-md5': 'test' }) + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + foo: 'bar' + }) + } +}) + +test('MockAgent - handle persists with delayed requests', async (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'POST' + }).reply(200, 'hello').delay(1).persist() + + { + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'POST' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'hello') + } + + { + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'POST' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'hello') + } +}) + +test('MockAgent - calling close on a mock pool should not affect other mock pools', async (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPoolToClose = mockAgent.get('http://localhost:9999') + mockPoolToClose.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'should-not-be-returned') + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'foo') + mockPool.intercept({ + path: '/bar', + method: 'POST' + }).reply(200, 'bar') + + await mockPoolToClose.close() + + { + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') + } + + { + const { statusCode, body } = await request(`${baseUrl}/bar`, { + method: 'POST' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'bar') + } +}) + +test('MockAgent - close removes all registered mock clients', async (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent({ connections: 1 }) + setGlobalDispatcher(mockAgent) + + const mockClient = mockAgent.get(baseUrl) + mockClient.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'foo') + + await mockAgent.close() + t.equal(mockAgent[kClients].size, 0) + + try { + await request(`${baseUrl}/foo`, { method: 'GET' }) + } catch (err) { + t.type(err, ClientDestroyedError) + } +}) + +test('MockAgent - close removes all registered mock pools', async (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'foo') + + await mockAgent.close() + t.equal(mockAgent[kClients].size, 0) + + try { + await request(`${baseUrl}/foo`, { method: 'GET' }) + } catch (err) { + t.type(err, ClientDestroyedError) + } +}) + +test('MockAgent - should handle replyWithError', async (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).replyWithError(new Error('kaboom')) + + await t.rejects(request(`${baseUrl}/foo`, { method: 'GET' }), new Error('kaboom')) +}) + +test('MockAgent - should support setting a reply to respond a set amount of times', async (t) => { + t.plan(9) + + const server = createServer((req, res) => { + t.equal(req.url, '/foo') + t.equal(req.method, 'GET') + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'foo').times(2) + + { + const { statusCode, body } = await request(`${baseUrl}/foo`) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') + } + + { + const { statusCode, body } = await request(`${baseUrl}/foo`) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') + } + + { + const { statusCode, headers, body } = await request(`${baseUrl}/foo`) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + const response = await getResponse(body) + t.equal(response, 'hello') + } +}) + +test('MockAgent - persist overrides times', async (t) => { + t.plan(6) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'foo').times(2).persist() + + { + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') + } + + { + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') + } + + { + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') + } +}) + +test('MockAgent - matcher should not find mock dispatch if path is of unsupported type', async (t) => { + t.plan(4) + + const server = createServer((req, res) => { + t.equal(req.url, '/foo') + t.equal(req.method, 'GET') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: {}, + method: 'GET' + }).reply(200, 'foo') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'hello') +}) + +test('MockAgent - should match path with regex', async (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: /foo/, + method: 'GET' + }).reply(200, 'foo').persist() + + { + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') + } + + { + const { statusCode, body } = await request(`${baseUrl}/hello/foobar`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') + } +}) + +test('MockAgent - should match path with function', async (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: (value) => value === '/foo', + method: 'GET' + }).reply(200, 'foo') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - should match method with regex', async (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: /^GET$/ + }).reply(200, 'foo') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - should match method with function', async (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: (value) => value === 'GET' + }).reply(200, 'foo') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - should match body with regex', async (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET', + body: /hello/ + }).reply(200, 'foo') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET', + body: 'hello=there' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - should match body with function', async (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET', + body: (value) => value.startsWith('hello') + }).reply(200, 'foo') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET', + body: 'hello=there' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - should match headers with string', async (t) => { + t.plan(6) + + const server = createServer((req, res) => { + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET', + headers: { + 'User-Agent': 'undici', + Host: 'example.com' + } + }).reply(200, 'foo') + + // Disable net connect so we can make sure it matches properly + mockAgent.disableNetConnect() + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET' + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar' + } + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar', + 'User-Agent': 'undici' + } + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar', + 'User-Agent': 'undici', + Host: 'wrong' + } + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar', + 'User-Agent': 'undici', + Host: 'example.com' + } + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - should match headers with regex', async (t) => { + t.plan(6) + + const server = createServer((req, res) => { + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET', + headers: { + 'User-Agent': /^undici$/, + Host: /^example.com$/ + } + }).reply(200, 'foo') + + // Disable net connect so we can make sure it matches properly + mockAgent.disableNetConnect() + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET' + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar' + } + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar', + 'User-Agent': 'undici' + } + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar', + 'User-Agent': 'undici', + Host: 'wrong' + } + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar', + 'User-Agent': 'undici', + Host: 'example.com' + } + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - should match headers with function', async (t) => { + t.plan(6) + + const server = createServer((req, res) => { + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET', + headers: { + 'User-Agent': (value) => value === 'undici', + Host: (value) => value === 'example.com' + } + }).reply(200, 'foo') + + // Disable net connect so we can make sure it matches properly + mockAgent.disableNetConnect() + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET' + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar' + } + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar', + 'User-Agent': 'undici' + } + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar', + 'User-Agent': 'undici', + Host: 'wrong' + } + }), MockNotMatchedError, 'should reject with MockNotMatchedError') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar', + 'User-Agent': 'undici', + Host: 'example.com' + } + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - should match url with regex', async (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(new RegExp(baseUrl)) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'foo') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - should match url with function', async (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get((value) => baseUrl === value) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'foo') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - handle default reply headers', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).defaultReplyHeaders({ foo: 'bar' }).reply(200, 'foo', { headers: { hello: 'there' } }) + + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.same(headers, { + foo: 'bar', + hello: 'there' + }) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - handle default reply trailers', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).defaultReplyTrailers({ foo: 'bar' }).reply(200, 'foo', { trailers: { hello: 'there' } }) + + const { statusCode, trailers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.same(trailers, { + foo: 'bar', + hello: 'there' + }) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - return calculated content-length if specified', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).replyContentLength().reply(200, 'foo', { headers: { hello: 'there' } }) + + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.same(headers, { + hello: 'there', + 'content-length': 3 + }) + + const response = await getResponse(body) + t.equal(response, 'foo') +}) + +test('MockAgent - return calculated content-length for object response if specified', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).replyContentLength().reply(200, { foo: 'bar' }, { headers: { hello: 'there' } }) + + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.same(headers, { + hello: 'there', + 'content-length': 13 + }) + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { foo: 'bar' }) +}) + +test('MockAgent - should activate and deactivate mock clients', async (t) => { + t.plan(9) + + const server = createServer((req, res) => { + t.equal(req.url, '/foo') + t.equal(req.method, 'GET') + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'foo').persist() + + { + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') + } + + mockAgent.deactivate() + + { + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + const response = await getResponse(body) + t.equal(response, 'hello') + } + + mockAgent.activate() + + { + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.equal(response, 'foo') + } +}) + +test('MockAgent - enableNetConnect should allow all original dispatches to be called if dispatch not found', async (t) => { + t.plan(5) + + const server = createServer((req, res) => { + t.equal(req.url, '/foo') + t.equal(req.method, 'GET') + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/wrong', + method: 'GET' + }).reply(200, 'foo') + + mockAgent.enableNetConnect() + + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + const response = await getResponse(body) + t.equal(response, 'hello') +}) + +test('MockAgent - enableNetConnect with a host string should allow all original dispatches to be called if mockDispatch not found', async (t) => { + t.plan(5) + + const server = createServer((req, res) => { + t.equal(req.url, '/foo') + t.equal(req.method, 'GET') + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/wrong', + method: 'GET' + }).reply(200, 'foo') + + mockAgent.enableNetConnect(`localhost:${server.address().port}`) + + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + const response = await getResponse(body) + t.equal(response, 'hello') +}) + +test('MockAgent - enableNetConnect when called with host string multiple times should allow all original dispatches to be called if mockDispatch not found', async (t) => { + t.plan(5) + + const server = createServer((req, res) => { + t.equal(req.url, '/foo') + t.equal(req.method, 'GET') + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/wrong', + method: 'GET' + }).reply(200, 'foo') + + mockAgent.enableNetConnect('example.com:9999') + mockAgent.enableNetConnect(`localhost:${server.address().port}`) + + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + const response = await getResponse(body) + t.equal(response, 'hello') +}) + +test('MockAgent - enableNetConnect with a host regex should allow all original dispatches to be called if mockDispatch not found', async (t) => { + t.plan(5) + + const server = createServer((req, res) => { + t.equal(req.url, '/foo') + t.equal(req.method, 'GET') + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/wrong', + method: 'GET' + }).reply(200, 'foo') + + mockAgent.enableNetConnect(new RegExp(`localhost:${server.address().port}`)) + + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + const response = await getResponse(body) + t.equal(response, 'hello') +}) + +test('MockAgent - enableNetConnect with a function should allow all original dispatches to be called if mockDispatch not found', async (t) => { + t.plan(5) + + const server = createServer((req, res) => { + t.equal(req.url, '/foo') + t.equal(req.method, 'GET') + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/wrong', + method: 'GET' + }).reply(200, 'foo') + + mockAgent.enableNetConnect((value) => value === `localhost:${server.address().port}`) + + const { statusCode, headers, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + const response = await getResponse(body) + t.equal(response, 'hello') +}) + +test('MockAgent - enableNetConnect with an unknown input should throw', async (t) => { + t.plan(1) + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get('http://localhost:9999') + mockPool.intercept({ + path: '/wrong', + method: 'GET' + }).reply(200, 'foo') + + t.throws(() => mockAgent.enableNetConnect({}), new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.')) +}) + +test('MockAgent - enableNetConnect should throw if dispatch not matched for path and the origin was not allowed by net connect', async (t) => { + t.plan(1) + + const server = createServer((req, res) => { + t.fail('should not be called') + t.end() + res.end('should not be called') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'foo') + + mockAgent.enableNetConnect('example.com:9999') + + await t.rejects(request(`${baseUrl}/wrong`, { + method: 'GET' + }), new MockNotMatchedError(`Mock dispatch not matched for path '/wrong': subsequent request to origin ${baseUrl} was not allowed (net.connect is not enabled for this origin)`)) +}) + +test('MockAgent - enableNetConnect should throw if dispatch not matched for method and the origin was not allowed by net connect', async (t) => { + t.plan(1) + + const server = createServer((req, res) => { + t.fail('should not be called') + t.end() + res.end('should not be called') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'foo') + + mockAgent.enableNetConnect('example.com:9999') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'WRONG' + }), new MockNotMatchedError(`Mock dispatch not matched for method 'WRONG': subsequent request to origin ${baseUrl} was not allowed (net.connect is not enabled for this origin)`)) +}) + +test('MockAgent - enableNetConnect should throw if dispatch not matched for body and the origin was not allowed by net connect', async (t) => { + t.plan(1) + + const server = createServer((req, res) => { + t.fail('should not be called') + t.end() + res.end('should not be called') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET', + body: 'hello' + }).reply(200, 'foo') + + mockAgent.enableNetConnect('example.com:9999') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + body: 'wrong' + }), new MockNotMatchedError(`Mock dispatch not matched for body 'wrong': subsequent request to origin ${baseUrl} was not allowed (net.connect is not enabled for this origin)`)) +}) + +test('MockAgent - enableNetConnect should throw if dispatch not matched for headers and the origin was not allowed by net connect', async (t) => { + t.plan(1) + + const server = createServer((req, res) => { + t.fail('should not be called') + t.end() + res.end('should not be called') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/foo', + method: 'GET', + headers: { + 'User-Agent': 'undici' + } + }).reply(200, 'foo') + + mockAgent.enableNetConnect('example.com:9999') + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + 'User-Agent': 'wrong' + } + }), new MockNotMatchedError(`Mock dispatch not matched for headers '{"User-Agent":"wrong"}': subsequent request to origin ${baseUrl} was not allowed (net.connect is not enabled for this origin)`)) +}) + +test('MockAgent - disableNetConnect should throw if dispatch not found by net connect', async (t) => { + t.plan(1) + + const server = createServer((req, res) => { + t.equal(req.url, '/foo') + t.equal(req.method, 'GET') + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + mockPool.intercept({ + path: '/wrong', + method: 'GET' + }).reply(200, 'foo') + + mockAgent.disableNetConnect() + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET' + }), new MockNotMatchedError(`Mock dispatch not matched for path '/foo': subsequent request to origin ${baseUrl} was not allowed (net.connect disabled)`)) +}) + +test('MockAgent - headers function interceptor', async (t) => { + t.plan(7) + + const server = createServer((req, res) => { + t.fail('should not be called') + t.end() + res.end('should not be called') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + const mockPool = mockAgent.get(baseUrl) + + // Disable net connect so we can make sure it matches properly + mockAgent.disableNetConnect() + + mockPool.intercept({ + path: '/foo', + method: 'GET', + headers (headers) { + t.equal(typeof headers, 'object') + return !Object.keys(headers).includes('authorization') + } + }).reply(200, 'foo').times(2) + + await t.rejects(request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + Authorization: 'Bearer foo' + } + }), new MockNotMatchedError(`Mock dispatch not matched for headers '{"Authorization":"Bearer foo"}': subsequent request to origin ${baseUrl} was not allowed (net.connect disabled)`)) + + { + const { statusCode } = await request(`${baseUrl}/foo`, { + method: 'GET', + headers: { + foo: 'bar' + } + }) + t.equal(statusCode, 200) + } + + { + const { statusCode } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + } +}) + +test('MockAgent - clients are not garbage collected', async (t) => { + const samples = 250 + t.plan(2) + + const server = createServer((req, res) => { + t.fail('should not be called') + t.end() + res.end('should not be called') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + // Create the dispatcher and isable net connect so we can make sure it matches properly + const dispatcher = new MockAgent() + dispatcher.disableNetConnect() + + // When Node 16 is the minimum supported, this can be replaced by simply requiring setTimeout from timers/promises + function sleep (delay) { + return new Promise(resolve => { + setTimeout(resolve, delay) + }) + } + + // Purposely create the pool inside a function so that the reference is lost + function intercept () { + // Create the pool and add a lot of intercepts + const pool = dispatcher.get(baseUrl) + + for (let i = 0; i < samples; i++) { + pool.intercept({ + path: `/foo/${i}`, + method: 'GET' + }).reply(200, Buffer.alloc(1024 * 1024)) + } + } + + intercept() + + const results = new Set() + for (let i = 0; i < samples; i++) { + // Let's make some time pass to allow garbage collection to happen + await sleep(10) + + const { statusCode } = await request(`${baseUrl}/foo/${i}`, { method: 'GET', dispatcher }) + results.add(statusCode) + } + + t.equal(results.size, 1) + t.ok(results.has(200)) +}) + +// https://github.com/nodejs/undici/issues/1321 +test('MockAgent - using fetch yields correct statusText', { skip: nodeMajor < 16 }, async (t) => { + const { fetch } = require('..') + + const mockAgent = new MockAgent() + mockAgent.disableNetConnect() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get('http://localhost:3000') + + mockPool.intercept({ + path: '/statusText', + method: 'GET' + }).reply(200, 'Body') + + const { status, statusText } = await fetch('http://localhost:3000/statusText') + + t.equal(status, 200) + t.equal(statusText, 'OK') + + mockPool.intercept({ + path: '/unknownStatusText', + method: 'GET' + }).reply(420, 'Everyday') + + const unknownStatusCodeRes = await fetch('http://localhost:3000/unknownStatusText') + t.equal(unknownStatusCodeRes.status, 420) + t.equal(unknownStatusCodeRes.statusText, 'unknown') + + t.end() +}) + +// https://github.com/nodejs/undici/issues/1556 +test('MockAgent - using fetch yields a headers object in the reply callback', { skip: nodeMajor < 16 }, async (t) => { + const { fetch } = require('..') + + const mockAgent = new MockAgent() + mockAgent.disableNetConnect() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get('http://localhost:3000') + + mockPool.intercept({ + path: '/headers', + method: 'GET' + }).reply(200, (opts) => { + t.same(opts.headers, { + accept: '*/*', + 'accept-language': '*', + 'sec-fetch-mode': 'cors', + 'user-agent': 'undici', + 'accept-encoding': 'gzip, deflate' + }) + + return {} + }) + + await fetch('http://localhost:3000/headers', { + dispatcher: mockAgent + }) + + t.end() +}) + +// https://github.com/nodejs/undici/issues/1579 +test('MockAgent - headers in mock dispatcher intercept should be case-insensitive', { skip: nodeMajor < 16 }, async (t) => { + const { fetch } = require('..') + + const mockAgent = new MockAgent() + mockAgent.disableNetConnect() + setGlobalDispatcher(mockAgent) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get('https://example.com') + + mockPool + .intercept({ + path: '/', + headers: { + authorization: 'Bearer 12345', + 'USER-agent': 'undici' + } + }) + .reply(200) + + await fetch('https://example.com', { + headers: { + Authorization: 'Bearer 12345', + 'user-AGENT': 'undici' + } + }) + + t.end() +}) + +// https://github.com/nodejs/undici/issues/1757 +test('MockAgent - reply callback can be asynchronous', { skip: nodeMajor < 16 }, async (t) => { + const { fetch } = require('..') + const ReadableStream = globalThis.ReadableStream || require('stream/web').ReadableStream + + class MiniflareDispatcher extends Dispatcher { + constructor (inner, options) { + super(options) + this.inner = inner + } + + dispatch (options, handler) { + return this.inner.dispatch(options, handler) + } + + close (...args) { + return this.inner.close(...args) + } + + destroy (...args) { + return this.inner.destroy(...args) + } + } + + const mockAgent = new MockAgent() + const mockClient = mockAgent.get('http://localhost:3000') + mockAgent.disableNetConnect() + setGlobalDispatcher(new MiniflareDispatcher(mockAgent)) + + t.teardown(mockAgent.close.bind(mockAgent)) + + mockClient.intercept({ + path: () => true, + method: () => true + }).reply(200, async (opts) => { + if (opts.body && opts.body[Symbol.asyncIterator]) { + const chunks = [] + for await (const chunk of opts.body) { + chunks.push(chunk) + } + + return Buffer.concat(chunks) + } + + return opts.body + }).persist() + + { + const response = await fetch('http://localhost:3000', { + method: 'POST', + body: JSON.stringify({ foo: 'bar' }) + }) + + t.same(await response.json(), { foo: 'bar' }) + } + + { + const response = await fetch('http://localhost:3000', { + method: 'POST', + body: new ReadableStream({ + start (controller) { + controller.enqueue(new TextEncoder().encode('{"foo":')) + + setTimeout(() => { + controller.enqueue(new TextEncoder().encode('"bar"}')) + controller.close() + }, 100) + } + }), + duplex: 'half' + }) + + t.same(await response.json(), { foo: 'bar' }) + } +}) + +test('MockAgent - headers should be array of strings', async (t) => { + const mockAgent = new MockAgent() + mockAgent.disableNetConnect() + setGlobalDispatcher(mockAgent) + + const mockPool = mockAgent.get('http://localhost:3000') + + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'foo', { + headers: { + 'set-cookie': [ + 'foo=bar', + 'bar=baz', + 'baz=qux' + ] + } + }) + + const { headers } = await request('http://localhost:3000/foo', { + method: 'GET' + }) + + t.same(headers['set-cookie'], [ + 'foo=bar', + 'bar=baz', + 'baz=qux' + ]) +}) + +// https://github.com/nodejs/undici/issues/2418 +test('MockAgent - Sending ReadableStream body', { skip: nodeMajor < 16 }, async (t) => { + t.plan(1) + const { fetch } = require('..') + const ReadableStream = globalThis.ReadableStream || require('stream/web').ReadableStream + + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + req.pipe(res) + }) + + t.teardown(mockAgent.close.bind(mockAgent)) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const url = `http://localhost:${server.address().port}` + + const response = await fetch(url, { + method: 'POST', + body: new ReadableStream({ + start (controller) { + controller.enqueue('test') + controller.close() + } + }), + duplex: 'half' + }) + + t.same(await response.text(), 'test') +}) diff --git a/test/mock-client.js b/test/mock-client.js new file mode 100644 index 0000000..ef0600e --- /dev/null +++ b/test/mock-client.js @@ -0,0 +1,446 @@ +'use strict' + +const { test } = require('tap') +const { createServer } = require('http') +const { promisify } = require('util') +const { MockAgent, MockClient, setGlobalDispatcher, request } = require('..') +const { kUrl } = require('../lib/core/symbols') +const { kDispatches } = require('../lib/mock/mock-symbols') +const { InvalidArgumentError } = require('../lib/core/errors') +const { MockInterceptor } = require('../lib/mock/mock-interceptor') +const { getResponse } = require('../lib/mock/mock-utils') +const Dispatcher = require('../lib/dispatcher') + +test('MockClient - constructor', t => { + t.plan(3) + + t.test('fails if opts.agent does not implement `get` method', t => { + t.plan(1) + t.throws(() => new MockClient('http://localhost:9999', { agent: { get: 'not a function' } }), InvalidArgumentError) + }) + + t.test('sets agent', t => { + t.plan(1) + t.doesNotThrow(() => new MockClient('http://localhost:9999', { agent: new MockAgent({ connections: 1 }) })) + }) + + t.test('should implement the Dispatcher API', t => { + t.plan(1) + + const mockClient = new MockClient('http://localhost:9999', { agent: new MockAgent({ connections: 1 }) }) + t.type(mockClient, Dispatcher) + }) +}) + +test('MockClient - dispatch', t => { + t.plan(2) + + t.test('should handle a single interceptor', (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + + this[kUrl] = new URL('http://localhost:9999') + mockClient[kDispatches] = [ + { + path: '/foo', + method: 'GET', + data: { + statusCode: 200, + data: 'hello', + headers: {}, + trailers: {}, + error: null + } + } + ] + + t.doesNotThrow(() => mockClient.dispatch({ + path: '/foo', + method: 'GET' + }, { + onHeaders: (_statusCode, _headers, resume) => resume(), + onData: () => {}, + onComplete: () => {} + })) + }) + + t.test('should directly throw error from mockDispatch function if error is not a MockNotMatchedError', (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + + this[kUrl] = new URL('http://localhost:9999') + mockClient[kDispatches] = [ + { + path: '/foo', + method: 'GET', + data: { + statusCode: 200, + data: 'hello', + headers: {}, + trailers: {}, + error: null + } + } + ] + + t.throws(() => mockClient.dispatch({ + path: '/foo', + method: 'GET' + }, { + onHeaders: (_statusCode, _headers, resume) => { throw new Error('kaboom') }, + onData: () => {}, + onComplete: () => {} + }), new Error('kaboom')) + }) +}) + +test('MockClient - intercept should return a MockInterceptor', (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + + const interceptor = mockClient.intercept({ + path: '/foo', + method: 'GET' + }) + + t.type(interceptor, MockInterceptor) +}) + +test('MockClient - intercept validation', (t) => { + t.plan(4) + + t.test('it should error if no options specified in the intercept', t => { + t.plan(1) + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get('http://localhost:9999') + + t.throws(() => mockClient.intercept(), new InvalidArgumentError('opts must be an object')) + }) + + t.test('it should error if no path specified in the intercept', t => { + t.plan(1) + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get('http://localhost:9999') + + t.throws(() => mockClient.intercept({}), new InvalidArgumentError('opts.path must be defined')) + }) + + t.test('it should default to GET if no method specified in the intercept', t => { + t.plan(1) + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get('http://localhost:9999') + t.doesNotThrow(() => mockClient.intercept({ path: '/foo' })) + }) + + t.test('it should uppercase the method - https://github.com/nodejs/undici/issues/1320', t => { + t.plan(1) + + const mockAgent = new MockAgent() + const mockClient = mockAgent.get('http://localhost:3000') + + t.teardown(mockAgent.close.bind(mockAgent)) + + mockClient.intercept({ + path: '/test', + method: 'patch' + }).reply(200, 'Hello!') + + t.equal(mockClient[kDispatches][0].method, 'PATCH') + }) +}) + +test('MockClient - close should run without error', async (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + mockClient[kDispatches] = [ + { + path: '/foo', + method: 'GET', + data: { + statusCode: 200, + data: 'hello', + headers: {}, + trailers: {}, + error: null + } + } + ] + + await t.resolves(mockClient.close()) +}) + +test('MockClient - should be able to set as globalDispatcher', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + t.type(mockClient, MockClient) + setGlobalDispatcher(mockClient) + + mockClient.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'hello') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.same(response, 'hello') +}) + +test('MockClient - should support query params', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + t.type(mockClient, MockClient) + setGlobalDispatcher(mockClient) + + const query = { + pageNum: 1 + } + mockClient.intercept({ + path: '/foo', + query, + method: 'GET' + }).reply(200, 'hello') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET', + query + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.same(response, 'hello') +}) + +test('MockClient - should intercept query params with hardcoded path', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + t.type(mockClient, MockClient) + setGlobalDispatcher(mockClient) + + const query = { + pageNum: 1 + } + mockClient.intercept({ + path: '/foo?pageNum=1', + method: 'GET' + }).reply(200, 'hello') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET', + query + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.same(response, 'hello') +}) + +test('MockClient - should intercept query params regardless of key ordering', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + t.type(mockClient, MockClient) + setGlobalDispatcher(mockClient) + + const query = { + pageNum: 1, + limit: 100, + ordering: [false, true] + } + + mockClient.intercept({ + path: '/foo', + query: { + ordering: query.ordering, + pageNum: query.pageNum, + limit: query.limit + }, + method: 'GET' + }).reply(200, 'hello') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET', + query + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.same(response, 'hello') +}) + +test('MockClient - should be able to use as a local dispatcher', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockClient = mockAgent.get(baseUrl) + t.type(mockClient, MockClient) + + mockClient.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'hello') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET', + dispatcher: mockClient + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.same(response, 'hello') +}) + +test('MockClient - basic intercept with MockClient.request', async (t) => { + t.plan(5) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent({ connections: 1 }) + t.teardown(mockAgent.close.bind(mockAgent)) + const mockClient = mockAgent.get(baseUrl) + t.type(mockClient, MockClient) + + mockClient.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }).reply(200, { foo: 'bar' }, { + headers: { 'content-type': 'application/json' }, + trailers: { 'Content-MD5': 'test' } + }) + + const { statusCode, headers, trailers, body } = await mockClient.request({ + origin: baseUrl, + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + t.same(trailers, { 'content-md5': 'test' }) + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + foo: 'bar' + }) +}) diff --git a/test/mock-errors.js b/test/mock-errors.js new file mode 100644 index 0000000..a96de0b --- /dev/null +++ b/test/mock-errors.js @@ -0,0 +1,32 @@ +'use strict' + +const { test } = require('tap') +const { mockErrors, errors } = require('..') + +test('mockErrors', (t) => { + t.plan(1) + + t.test('MockNotMatchedError', t => { + t.plan(2) + + t.test('should implement an UndiciError', t => { + t.plan(4) + + const mockError = new mockErrors.MockNotMatchedError() + t.type(mockError, errors.UndiciError) + t.same(mockError.name, 'MockNotMatchedError') + t.same(mockError.code, 'UND_MOCK_ERR_MOCK_NOT_MATCHED') + t.same(mockError.message, 'The request does not match any registered mock dispatches') + }) + + t.test('should set a custom message', t => { + t.plan(4) + + const mockError = new mockErrors.MockNotMatchedError('custom message') + t.type(mockError, errors.UndiciError) + t.same(mockError.name, 'MockNotMatchedError') + t.same(mockError.code, 'UND_MOCK_ERR_MOCK_NOT_MATCHED') + t.same(mockError.message, 'custom message') + }) + }) +}) diff --git a/test/mock-interceptor-unused-assertions.js b/test/mock-interceptor-unused-assertions.js new file mode 100644 index 0000000..bfa2275 --- /dev/null +++ b/test/mock-interceptor-unused-assertions.js @@ -0,0 +1,219 @@ +'use strict' + +const { test, beforeEach, afterEach } = require('tap') +const { MockAgent, setGlobalDispatcher } = require('..') +const PendingInterceptorsFormatter = require('../lib/mock/pending-interceptors-formatter') + +// Avoid colors in the output for inline snapshots. +const pendingInterceptorsFormatter = new PendingInterceptorsFormatter({ disableColors: true }) + +let originalGlobalDispatcher + +const origin = 'https://localhost:9999' + +beforeEach(() => { + // Disallow all network activity by default by using a mock agent as the global dispatcher + const globalDispatcher = new MockAgent() + globalDispatcher.disableNetConnect() + setGlobalDispatcher(globalDispatcher) + originalGlobalDispatcher = globalDispatcher +}) + +afterEach(() => { + setGlobalDispatcher(originalGlobalDispatcher) +}) + +function mockAgentWithOneInterceptor () { + const agent = new MockAgent() + agent.disableNetConnect() + + agent + .get('https://example.com') + .intercept({ method: 'GET', path: '/' }) + .reply(200, '') + + return agent +} + +test('1 pending interceptor', t => { + t.plan(2) + + const err = t.throws(() => mockAgentWithOneInterceptor().assertNoPendingInterceptors({ pendingInterceptorsFormatter })) + + t.same(err.message, ` +1 interceptor is pending: + +┌─────────┬────────┬───────────────────────┬──────┬─────────────┬────────────┬─────────────┬───────────┠+│ (index) │ Method │ Origin │ Path │ Status code │ Persistent │ Invocations │ Remaining │ +├─────────┼────────┼───────────────────────┼──────┼─────────────┼────────────┼─────────────┼───────────┤ +│ 0 │ 'GET' │ 'https://example.com' │ '/' │ 200 │ 'âŒ' │ 0 │ 1 │ +└─────────┴────────┴───────────────────────┴──────┴─────────────┴────────────┴─────────────┴───────────┘ +`.trim()) +}) + +test('2 pending interceptors', t => { + t.plan(2) + + const withTwoInterceptors = mockAgentWithOneInterceptor() + withTwoInterceptors + .get(origin) + .intercept({ method: 'get', path: '/some/path' }) + .reply(204, 'OK') + const err = t.throws(() => withTwoInterceptors.assertNoPendingInterceptors({ pendingInterceptorsFormatter })) + + t.same(err.message, ` +2 interceptors are pending: + +┌─────────┬────────┬──────────────────────────┬──────────────┬─────────────┬────────────┬─────────────┬───────────┠+│ (index) │ Method │ Origin │ Path │ Status code │ Persistent │ Invocations │ Remaining │ +├─────────┼────────┼──────────────────────────┼──────────────┼─────────────┼────────────┼─────────────┼───────────┤ +│ 0 │ 'GET' │ 'https://example.com' │ '/' │ 200 │ 'âŒ' │ 0 │ 1 │ +│ 1 │ 'GET' │ 'https://localhost:9999' │ '/some/path' │ 204 │ 'âŒ' │ 0 │ 1 │ +└─────────┴────────┴──────────────────────────┴──────────────┴─────────────┴────────────┴─────────────┴───────────┘ +`.trim()) +}) + +test('Variations of persist(), times(), and pending status', async t => { + t.plan(7) + + // Agent with unused interceptor + const agent = mockAgentWithOneInterceptor() + + // Unused with persist() + agent + .get(origin) + .intercept({ method: 'get', path: '/persistent/unused' }) + .reply(200, 'OK') + .persist() + + // Used with persist() + agent + .get(origin) + .intercept({ method: 'GET', path: '/persistent/used' }) + .reply(200, 'OK') + .persist() + t.same((await agent.request({ origin, method: 'GET', path: '/persistent/used' })).statusCode, 200) + + // Consumed without persist() + agent.get(origin) + .intercept({ method: 'post', path: '/transient/pending' }) + .reply(201, 'Created') + t.same((await agent.request({ origin, method: 'POST', path: '/transient/pending' })).statusCode, 201) + + // Partially pending with times() + agent.get(origin) + .intercept({ method: 'get', path: '/times/partial' }) + .reply(200, 'OK') + .times(5) + t.same((await agent.request({ origin, method: 'GET', path: '/times/partial' })).statusCode, 200) + + // Unused with times() + agent.get(origin) + .intercept({ method: 'get', path: '/times/unused' }) + .reply(200, 'OK') + .times(2) + + // Fully pending with times() + agent.get(origin) + .intercept({ method: 'get', path: '/times/pending' }) + .reply(200, 'OK') + .times(2) + t.same((await agent.request({ origin, method: 'GET', path: '/times/pending' })).statusCode, 200) + t.same((await agent.request({ origin, method: 'GET', path: '/times/pending' })).statusCode, 200) + + const err = t.throws(() => agent.assertNoPendingInterceptors({ pendingInterceptorsFormatter })) + + t.same(err.message, ` +4 interceptors are pending: + +┌─────────┬────────┬──────────────────────────┬──────────────────────┬─────────────┬────────────┬─────────────┬───────────┠+│ (index) │ Method │ Origin │ Path │ Status code │ Persistent │ Invocations │ Remaining │ +├─────────┼────────┼──────────────────────────┼──────────────────────┼─────────────┼────────────┼─────────────┼───────────┤ +│ 0 │ 'GET' │ 'https://example.com' │ '/' │ 200 │ 'âŒ' │ 0 │ 1 │ +│ 1 │ 'GET' │ 'https://localhost:9999' │ '/persistent/unused' │ 200 │ '✅' │ 0 │ Infinity │ +│ 2 │ 'GET' │ 'https://localhost:9999' │ '/times/partial' │ 200 │ 'âŒ' │ 1 │ 4 │ +│ 3 │ 'GET' │ 'https://localhost:9999' │ '/times/unused' │ 200 │ 'âŒ' │ 0 │ 2 │ +└─────────┴────────┴──────────────────────────┴──────────────────────┴─────────────┴────────────┴─────────────┴───────────┘ +`.trim()) +}) + +test('works when no interceptors are registered', t => { + t.plan(2) + + const agent = new MockAgent() + agent.disableNetConnect() + + t.same(agent.pendingInterceptors(), []) + t.doesNotThrow(() => agent.assertNoPendingInterceptors()) +}) + +test('works when all interceptors are pending', async t => { + t.plan(4) + + const agent = new MockAgent() + agent.disableNetConnect() + + agent.get(origin).intercept({ method: 'get', path: '/' }).reply(200, 'OK') + t.same((await agent.request({ origin, method: 'GET', path: '/' })).statusCode, 200) + + agent.get(origin).intercept({ method: 'get', path: '/persistent' }).reply(200, 'OK') + t.same((await agent.request({ origin, method: 'GET', path: '/persistent' })).statusCode, 200) + + t.same(agent.pendingInterceptors(), []) + t.doesNotThrow(() => agent.assertNoPendingInterceptors()) +}) + +test('defaults to rendering output with terminal color when process.env.CI is unset', t => { + t.plan(2) + + // This ensures that the test works in an environment where the CI env var is set. + const oldCiEnvVar = process.env.CI + delete process.env.CI + + const err = t.throws( + () => mockAgentWithOneInterceptor().assertNoPendingInterceptors()) + t.same(err.message, ` +1 interceptor is pending: + +┌─────────┬────────┬───────────────────────┬──────┬─────────────┬────────────┬─────────────┬───────────┠+│ (index) │ Method │ Origin │ Path │ Status code │ Persistent │ Invocations │ Remaining │ +├─────────┼────────┼───────────────────────┼──────┼─────────────┼────────────┼─────────────┼───────────┤ +│ 0 │ \u001b[32m'GET'\u001b[39m │ \u001b[32m'https://example.com'\u001b[39m │ \u001b[32m'/'\u001b[39m │ \u001b[33m200\u001b[39m │ \u001b[32m'âŒ'\u001b[39m │ \u001b[33m0\u001b[39m │ \u001b[33m1\u001b[39m │ +└─────────┴────────┴───────────────────────┴──────┴─────────────┴────────────┴─────────────┴───────────┘ +`.trim()) + + // Re-set the CI env var if it were set. + // Assigning `undefined` does not work, + // because reading the env var afterwards yields the string 'undefined', + // so we need to re-set it conditionally. + if (oldCiEnvVar != null) { + process.env.CI = oldCiEnvVar + } +}) + +test('returns unused interceptors', t => { + t.plan(1) + + t.same(mockAgentWithOneInterceptor().pendingInterceptors(), [ + { + timesInvoked: 0, + times: 1, + persist: false, + consumed: false, + pending: true, + path: '/', + method: 'GET', + body: undefined, + query: undefined, + headers: undefined, + data: { + error: null, + statusCode: 200, + data: '', + headers: {}, + trailers: {} + }, + origin: 'https://example.com' + } + ]) +}) diff --git a/test/mock-interceptor.js b/test/mock-interceptor.js new file mode 100644 index 0000000..a11377d --- /dev/null +++ b/test/mock-interceptor.js @@ -0,0 +1,258 @@ +'use strict' + +const { test } = require('tap') +const { MockInterceptor, MockScope } = require('../lib/mock/mock-interceptor') +const MockAgent = require('../lib/mock/mock-agent') +const { kDispatchKey } = require('../lib/mock/mock-symbols') +const { InvalidArgumentError } = require('../lib/core/errors') + +test('MockInterceptor - path', t => { + t.plan(1) + t.test('should remove hash fragment from paths', t => { + t.plan(1) + const mockInterceptor = new MockInterceptor({ + path: '#foobar', + method: '' + }, []) + t.equal(mockInterceptor[kDispatchKey].path, '') + }) +}) + +test('MockInterceptor - reply', t => { + t.plan(2) + + t.test('should return MockScope', t => { + t.plan(1) + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + const result = mockInterceptor.reply(200, 'hello') + t.type(result, MockScope) + }) + + t.test('should error if passed options invalid', t => { + t.plan(2) + + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + t.throws(() => mockInterceptor.reply(), new InvalidArgumentError('statusCode must be defined')) + t.throws(() => mockInterceptor.reply(200, '', 'hello'), new InvalidArgumentError('responseOptions must be an object')) + }) +}) + +test('MockInterceptor - reply callback', t => { + t.plan(2) + + t.test('should return MockScope', t => { + t.plan(1) + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + const result = mockInterceptor.reply(200, () => 'hello') + t.type(result, MockScope) + }) + + t.test('should error if passed options invalid', t => { + t.plan(2) + + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + t.throws(() => mockInterceptor.reply(), new InvalidArgumentError('statusCode must be defined')) + t.throws(() => mockInterceptor.reply(200, () => {}, 'hello'), new InvalidArgumentError('responseOptions must be an object')) + }) +}) + +test('MockInterceptor - reply options callback', t => { + t.plan(2) + + t.test('should return MockScope', t => { + t.plan(2) + + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + const result = mockInterceptor.reply((options) => ({ + statusCode: 200, + data: 'hello' + })) + t.type(result, MockScope) + + // Test parameters + + const baseUrl = 'http://localhost:9999' + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + + mockPool.intercept({ + path: '/test', + method: 'GET' + }).reply((options) => { + t.strictSame(options, { path: '/test', method: 'GET', headers: { foo: 'bar' } }) + return { statusCode: 200, data: 'hello' } + }) + + mockPool.dispatch({ + path: '/test', + method: 'GET', + headers: { foo: 'bar' } + }, { + onHeaders: () => {}, + onData: () => {}, + onComplete: () => {} + }) + }) + + t.test('should error if passed options invalid', async (t) => { + t.plan(3) + + const baseUrl = 'http://localhost:9999' + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + + mockPool.intercept({ + path: '/test', + method: 'GET' + }).reply(() => {}) + + mockPool.intercept({ + path: '/test3', + method: 'GET' + }).reply(() => ({ + statusCode: 200, + data: 'hello', + responseOptions: 42 + })) + + mockPool.intercept({ + path: '/test4', + method: 'GET' + }).reply(() => ({ + data: 'hello', + responseOptions: 42 + })) + + t.throws(() => mockPool.dispatch({ + path: '/test', + method: 'GET' + }, { + onHeaders: () => {}, + onData: () => {}, + onComplete: () => {} + }), new InvalidArgumentError('reply options callback must return an object')) + + t.throws(() => mockPool.dispatch({ + path: '/test3', + method: 'GET' + }, { + onHeaders: () => {}, + onData: () => {}, + onComplete: () => {} + }), new InvalidArgumentError('responseOptions must be an object')) + + t.throws(() => mockPool.dispatch({ + path: '/test4', + method: 'GET' + }, { + onHeaders: () => {}, + onData: () => {}, + onComplete: () => {} + }), new InvalidArgumentError('statusCode must be defined')) + }) +}) + +test('MockInterceptor - replyWithError', t => { + t.plan(2) + + t.test('should return MockScope', t => { + t.plan(1) + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + const result = mockInterceptor.replyWithError(new Error('kaboom')) + t.type(result, MockScope) + }) + + t.test('should error if passed options invalid', t => { + t.plan(1) + + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + t.throws(() => mockInterceptor.replyWithError(), new InvalidArgumentError('error must be defined')) + }) +}) + +test('MockInterceptor - defaultReplyHeaders', t => { + t.plan(2) + + t.test('should return MockInterceptor', t => { + t.plan(1) + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + const result = mockInterceptor.defaultReplyHeaders({}) + t.type(result, MockInterceptor) + }) + + t.test('should error if passed options invalid', t => { + t.plan(1) + + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + t.throws(() => mockInterceptor.defaultReplyHeaders(), new InvalidArgumentError('headers must be defined')) + }) +}) + +test('MockInterceptor - defaultReplyTrailers', t => { + t.plan(2) + + t.test('should return MockInterceptor', t => { + t.plan(1) + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + const result = mockInterceptor.defaultReplyTrailers({}) + t.type(result, MockInterceptor) + }) + + t.test('should error if passed options invalid', t => { + t.plan(1) + + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + t.throws(() => mockInterceptor.defaultReplyTrailers(), new InvalidArgumentError('trailers must be defined')) + }) +}) + +test('MockInterceptor - replyContentLength', t => { + t.plan(1) + + t.test('should return MockInterceptor', t => { + t.plan(1) + const mockInterceptor = new MockInterceptor({ + path: '', + method: '' + }, []) + const result = mockInterceptor.defaultReplyTrailers({}) + t.type(result, MockInterceptor) + }) +}) diff --git a/test/mock-pool.js b/test/mock-pool.js new file mode 100644 index 0000000..0ac1aac --- /dev/null +++ b/test/mock-pool.js @@ -0,0 +1,369 @@ +'use strict' + +const { test } = require('tap') +const { createServer } = require('http') +const { promisify } = require('util') +const { MockAgent, MockPool, getGlobalDispatcher, setGlobalDispatcher, request } = require('..') +const { kUrl } = require('../lib/core/symbols') +const { nodeMajor } = require('../lib/core/util') +const { kDispatches } = require('../lib/mock/mock-symbols') +const { InvalidArgumentError } = require('../lib/core/errors') +const { MockInterceptor } = require('../lib/mock/mock-interceptor') +const { getResponse } = require('../lib/mock/mock-utils') +const Dispatcher = require('../lib/dispatcher') + +test('MockPool - constructor', t => { + t.plan(3) + + t.test('fails if opts.agent does not implement `get` method', t => { + t.plan(1) + t.throws(() => new MockPool('http://localhost:9999', { agent: { get: 'not a function' } }), InvalidArgumentError) + }) + + t.test('sets agent', t => { + t.plan(1) + t.doesNotThrow(() => new MockPool('http://localhost:9999', { agent: new MockAgent() })) + }) + + t.test('should implement the Dispatcher API', t => { + t.plan(1) + + const mockPool = new MockPool('http://localhost:9999', { agent: new MockAgent() }) + t.type(mockPool, Dispatcher) + }) +}) + +test('MockPool - dispatch', t => { + t.plan(2) + + t.test('should handle a single interceptor', (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + + this[kUrl] = new URL('http://localhost:9999') + mockPool[kDispatches] = [ + { + path: '/foo', + method: 'GET', + data: { + statusCode: 200, + data: 'hello', + headers: {}, + trailers: {}, + error: null + } + } + ] + + t.doesNotThrow(() => mockPool.dispatch({ + path: '/foo', + method: 'GET' + }, { + onHeaders: (_statusCode, _headers, resume) => resume(), + onData: () => {}, + onComplete: () => {} + })) + }) + + t.test('should directly throw error from mockDispatch function if error is not a MockNotMatchedError', (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + + this[kUrl] = new URL('http://localhost:9999') + mockPool[kDispatches] = [ + { + path: '/foo', + method: 'GET', + data: { + statusCode: 200, + data: 'hello', + headers: {}, + trailers: {}, + error: null + } + } + ] + + t.throws(() => mockPool.dispatch({ + path: '/foo', + method: 'GET' + }, { + onHeaders: (_statusCode, _headers, resume) => { throw new Error('kaboom') }, + onData: () => {}, + onComplete: () => {} + }), new Error('kaboom')) + }) +}) + +test('MockPool - intercept should return a MockInterceptor', (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + + const interceptor = mockPool.intercept({ + path: '/foo', + method: 'GET' + }) + + t.ok(interceptor instanceof MockInterceptor) +}) + +test('MockPool - intercept validation', (t) => { + t.plan(3) + + t.test('it should error if no options specified in the intercept', t => { + t.plan(1) + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get('http://localhost:9999') + + t.throws(() => mockPool.intercept(), new InvalidArgumentError('opts must be an object')) + }) + + t.test('it should error if no path specified in the intercept', t => { + t.plan(1) + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get('http://localhost:9999') + + t.throws(() => mockPool.intercept({}), new InvalidArgumentError('opts.path must be defined')) + }) + + t.test('it should default to GET if no method specified in the intercept', t => { + t.plan(1) + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get('http://localhost:9999') + t.doesNotThrow(() => mockPool.intercept({ path: '/foo' })) + }) +}) + +test('MockPool - close should run without error', async (t) => { + t.plan(1) + + const baseUrl = 'http://localhost:9999' + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + + mockPool[kDispatches] = [ + { + path: '/foo', + method: 'GET', + data: { + statusCode: 200, + data: 'hello', + headers: {}, + trailers: {}, + error: null + } + } + ] + + await t.resolves(mockPool.close()) +}) + +test('MockPool - should be able to set as globalDispatcher', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + t.type(mockPool, MockPool) + setGlobalDispatcher(mockPool) + + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'hello') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET' + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.same(response, 'hello') +}) + +test('MockPool - should be able to use as a local dispatcher', async (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + + const mockPool = mockAgent.get(baseUrl) + t.type(mockPool, MockPool) + + mockPool.intercept({ + path: '/foo', + method: 'GET' + }).reply(200, 'hello') + + const { statusCode, body } = await request(`${baseUrl}/foo`, { + method: 'GET', + dispatcher: mockPool + }) + t.equal(statusCode, 200) + + const response = await getResponse(body) + t.same(response, 'hello') +}) + +test('MockPool - basic intercept with MockPool.request', async (t) => { + t.plan(5) + + const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('should not be called') + t.fail('should not be called') + t.end() + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + + const baseUrl = `http://localhost:${server.address().port}` + + const mockAgent = new MockAgent() + t.teardown(mockAgent.close.bind(mockAgent)) + const mockPool = mockAgent.get(baseUrl) + t.type(mockPool, MockPool) + + mockPool.intercept({ + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }).reply(200, { foo: 'bar' }, { + headers: { 'content-type': 'application/json' }, + trailers: { 'Content-MD5': 'test' } + }) + + const { statusCode, headers, trailers, body } = await mockPool.request({ + origin: baseUrl, + path: '/foo?hello=there&see=ya', + method: 'POST', + body: 'form1=data1&form2=data2' + }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'application/json') + t.same(trailers, { 'content-md5': 'test' }) + + const jsonResponse = JSON.parse(await getResponse(body)) + t.same(jsonResponse, { + foo: 'bar' + }) +}) + +// https://github.com/nodejs/undici/issues/1546 +test('MockPool - correct errors when consuming invalid JSON body', async (t) => { + const oldDispatcher = getGlobalDispatcher() + + const mockAgent = new MockAgent() + mockAgent.disableNetConnect() + setGlobalDispatcher(mockAgent) + + t.teardown(() => setGlobalDispatcher(oldDispatcher)) + + const mockPool = mockAgent.get('https://google.com') + mockPool.intercept({ + path: 'https://google.com' + }).reply(200, 'it\'s just a text') + + const { body } = await request('https://google.com') + await t.rejects(body.json(), SyntaxError) + + t.end() +}) + +test('MockPool - allows matching headers in fetch', { skip: nodeMajor < 16 }, async (t) => { + const { fetch } = require('../index') + + const oldDispatcher = getGlobalDispatcher() + + const baseUrl = 'http://localhost:9999' + const mockAgent = new MockAgent() + mockAgent.disableNetConnect() + setGlobalDispatcher(mockAgent) + + t.teardown(async () => { + await mockAgent.close() + setGlobalDispatcher(oldDispatcher) + }) + + const pool = mockAgent.get(baseUrl) + pool.intercept({ + path: '/foo', + method: 'GET', + headers: { + accept: 'application/json' + } + }).reply(200, { ok: 1 }).times(3) + + await t.resolves( + fetch(`${baseUrl}/foo`, { + headers: { + accept: 'application/json' + } + }) + ) + + // no 'accept: application/json' header sent, not matched + await t.rejects(fetch(`${baseUrl}/foo`)) + + // not 'accept: application/json', not matched + await t.rejects(fetch(`${baseUrl}/foo`), { + headers: { + accept: 'text/plain' + } + }, TypeError) + + t.end() +}) diff --git a/test/mock-scope.js b/test/mock-scope.js new file mode 100644 index 0000000..605ba58 --- /dev/null +++ b/test/mock-scope.js @@ -0,0 +1,73 @@ +'use strict' + +const { test } = require('tap') +const { MockScope } = require('../lib/mock/mock-interceptor') +const { InvalidArgumentError } = require('../lib/core/errors') + +test('MockScope - delay', t => { + t.plan(2) + + t.test('should return MockScope', t => { + t.plan(1) + const mockScope = new MockScope({ + path: '', + method: '' + }, []) + const result = mockScope.delay(200) + t.type(result, MockScope) + }) + + t.test('should error if passed options invalid', t => { + t.plan(4) + + const mockScope = new MockScope({ + path: '', + method: '' + }, []) + t.throws(() => mockScope.delay(), new InvalidArgumentError('waitInMs must be a valid integer > 0')) + t.throws(() => mockScope.delay(200.1), new InvalidArgumentError('waitInMs must be a valid integer > 0')) + t.throws(() => mockScope.delay(0), new InvalidArgumentError('waitInMs must be a valid integer > 0')) + t.throws(() => mockScope.delay(-1), new InvalidArgumentError('waitInMs must be a valid integer > 0')) + }) +}) + +test('MockScope - persist', t => { + t.plan(1) + + t.test('should return MockScope', t => { + t.plan(1) + const mockScope = new MockScope({ + path: '', + method: '' + }, []) + const result = mockScope.persist() + t.type(result, MockScope) + }) +}) + +test('MockScope - times', t => { + t.plan(2) + + t.test('should return MockScope', t => { + t.plan(1) + const mockScope = new MockScope({ + path: '', + method: '' + }, []) + const result = mockScope.times(200) + t.type(result, MockScope) + }) + + t.test('should error if passed options invalid', t => { + t.plan(4) + + const mockScope = new MockScope({ + path: '', + method: '' + }, []) + t.throws(() => mockScope.times(), new InvalidArgumentError('repeatTimes must be a valid integer > 0')) + t.throws(() => mockScope.times(200.1), new InvalidArgumentError('repeatTimes must be a valid integer > 0')) + t.throws(() => mockScope.times(0), new InvalidArgumentError('repeatTimes must be a valid integer > 0')) + t.throws(() => mockScope.times(-1), new InvalidArgumentError('repeatTimes must be a valid integer > 0')) + }) +}) diff --git a/test/mock-utils.js b/test/mock-utils.js new file mode 100644 index 0000000..7799803 --- /dev/null +++ b/test/mock-utils.js @@ -0,0 +1,160 @@ +'use strict' + +const { test } = require('tap') +const { nodeMajor } = require('../lib/core/util') +const { MockNotMatchedError } = require('../lib/mock/mock-errors') +const { + deleteMockDispatch, + getMockDispatch, + getResponseData, + getStatusText, + getHeaderByName +} = require('../lib/mock/mock-utils') + +test('deleteMockDispatch - should do nothing if not able to find mock dispatch', (t) => { + t.plan(1) + + const key = { + url: 'url', + path: 'path', + method: 'method', + body: 'body' + } + + t.doesNotThrow(() => deleteMockDispatch([], key)) +}) + +test('getMockDispatch', (t) => { + t.plan(3) + + t.test('it should find a mock dispatch', (t) => { + t.plan(1) + const dispatches = [ + { + path: 'path', + method: 'method', + consumed: false + } + ] + + const result = getMockDispatch(dispatches, { + path: 'path', + method: 'method' + }) + t.same(result, { + path: 'path', + method: 'method', + consumed: false + }) + }) + + t.test('it should skip consumed dispatches', (t) => { + t.plan(1) + const dispatches = [ + { + path: 'path', + method: 'method', + consumed: true + }, + { + path: 'path', + method: 'method', + consumed: false + } + ] + + const result = getMockDispatch(dispatches, { + path: 'path', + method: 'method' + }) + t.same(result, { + path: 'path', + method: 'method', + consumed: false + }) + }) + + t.test('it should throw if dispatch not found', (t) => { + t.plan(1) + const dispatches = [ + { + path: 'path', + method: 'method', + consumed: false + } + ] + + t.throws(() => getMockDispatch(dispatches, { + path: 'wrong', + method: 'wrong' + }), new MockNotMatchedError('Mock dispatch not matched for path \'wrong\'')) + }) +}) + +test('getResponseData', (t) => { + t.plan(3) + + t.test('it should stringify objects', (t) => { + t.plan(1) + const responseData = getResponseData({ str: 'string', num: 42 }) + t.equal(responseData, '{"str":"string","num":42}') + }) + + t.test('it should return strings untouched', (t) => { + t.plan(1) + const responseData = getResponseData('test') + t.equal(responseData, 'test') + }) + + t.test('it should return buffers untouched', (t) => { + t.plan(1) + const responseData = getResponseData(Buffer.from('test')) + t.ok(Buffer.isBuffer(responseData)) + }) +}) + +test('getStatusText', (t) => { + for (const statusCode of [ + 100, 101, 102, 103, 200, 201, 202, 203, + 204, 205, 206, 207, 208, 226, 300, 301, + 302, 303, 304, 305, 306, 307, 308, 400, + 401, 402, 403, 404, 405, 406, 407, 408, + 409, 410, 411, 412, 413, 414, 415, 416, + 417, 418, 421, 422, 423, 424, 425, 426, + 428, 429, 431, 451, 500, 501, 502, 503, + 504, 505, 506, 507, 508, 510, 511 + ]) { + t.ok(getStatusText(statusCode)) + } + + t.equal(getStatusText(420), 'unknown') + + t.end() +}) + +test('getHeaderByName', (t) => { + const headersRecord = { + key: 'value' + } + + t.equal(getHeaderByName(headersRecord, 'key'), 'value') + t.equal(getHeaderByName(headersRecord, 'anotherKey'), undefined) + + const headersArray = ['key', 'value'] + + t.equal(getHeaderByName(headersArray, 'key'), 'value') + t.equal(getHeaderByName(headersArray, 'anotherKey'), undefined) + + if (nodeMajor >= 16) { + const { Headers } = require('../index') + + const headers = new Headers([ + ['key', 'value'] + ]) + + t.equal(getHeaderByName(headers, 'key'), 'value') + t.equal(getHeaderByName(headers, 'anotherKey'), null) + } + + t.end() +}) diff --git a/test/no-strict-content-length.js b/test/no-strict-content-length.js new file mode 100644 index 0000000..993b0fd --- /dev/null +++ b/test/no-strict-content-length.js @@ -0,0 +1,349 @@ +'use strict' + +const tap = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const { Readable } = require('stream') +const sinon = require('sinon') +const { wrapWithAsyncIterable } = require('./utils/async-iterators') + +tap.test('strictContentLength: false', (t) => { + t.plan(7) + + const emitWarningStub = sinon.stub(process, 'emitWarning') + + function assertEmitWarningCalledAndReset () { + sinon.assert.called(emitWarningStub) + emitWarningStub.resetHistory() + } + + t.teardown(() => { + emitWarningStub.restore() + }) + + t.test('request invalid content-length', (t) => { + t.plan(8) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + strictContentLength: false + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: 'asd' + }, (err, data) => { + assertEmitWarningCalledAndReset() + t.error(err) + }) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: 'asdasdasdasdasdasda' + }, (err, data) => { + assertEmitWarningCalledAndReset() + t.error(err) + }) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: Buffer.alloc(9) + }, (err, data) => { + assertEmitWarningCalledAndReset() + t.error(err) + }) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: Buffer.alloc(11) + }, (err, data) => { + assertEmitWarningCalledAndReset() + t.error(err) + }) + + client.request({ + path: '/', + method: 'HEAD', + headers: { + 'content-length': 10 + } + }, (err, data) => { + t.error(err) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'content-length': 0 + } + }, (err, data) => { + t.error(err) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'content-length': 4 + }, + body: new Readable({ + read () { + this.push('asd') + this.push(null) + } + }) + }, (err, data) => { + t.error(err) + }) + + client.request({ + path: '/', + method: 'GET', + headers: { + 'content-length': 4 + }, + body: new Readable({ + read () { + this.push('asasdasdasdd') + this.push(null) + } + }) + }, (err, data) => { + t.error(err) + }) + }) + }) + + t.test('request streaming content-length less than body size', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + strictContentLength: false + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 2 + }, + body: new Readable({ + read () { + setImmediate(() => { + this.push('abcd') + this.push(null) + }) + } + }) + }, (err) => { + assertEmitWarningCalledAndReset() + t.error(err) + }) + }) + }) + + t.test('request streaming content-length greater than body size', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + strictContentLength: false + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: new Readable({ + read () { + setImmediate(() => { + this.push('abcd') + this.push(null) + }) + } + }) + }, (err) => { + assertEmitWarningCalledAndReset() + t.error(err) + }) + }) + }) + + t.test('request streaming data when content-length=0', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + strictContentLength: false + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 0 + }, + body: new Readable({ + read () { + setImmediate(() => { + this.push('asdasdasdkajsdnasdkjasnd') + this.push(null) + }) + } + }) + }, (err) => { + assertEmitWarningCalledAndReset() + t.error(err) + }) + }) + }) + + t.test('request async iterating content-length less than body size', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + strictContentLength: false + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 2 + }, + body: wrapWithAsyncIterable(new Readable({ + read () { + setImmediate(() => { + this.push('abcd') + this.push(null) + }) + } + })) + }, (err) => { + assertEmitWarningCalledAndReset() + t.error(err) + }) + }) + }) + + t.test('request async iterator content-length greater than body size', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + strictContentLength: false + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 10 + }, + body: wrapWithAsyncIterable(new Readable({ + read () { + setImmediate(() => { + this.push('abcd') + this.push(null) + }) + } + })) + }, (err) => { + assertEmitWarningCalledAndReset() + t.error(err) + }) + }) + }) + + t.test('request async iterator data when content-length=0', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + strictContentLength: false + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + headers: { + 'content-length': 0 + }, + body: wrapWithAsyncIterable(new Readable({ + read () { + setImmediate(() => { + this.push('asdasdasdkajsdnasdkjasnd') + this.push(null) + }) + } + })) + }, (err) => { + assertEmitWarningCalledAndReset() + t.error(err) + }) + }) + }) +}) diff --git a/test/node-fetch/LICENSE b/test/node-fetch/LICENSE new file mode 100644 index 0000000..41ca1b6 --- /dev/null +++ b/test/node-fetch/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 - 2020 Node Fetch Team + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/test/node-fetch/headers.js b/test/node-fetch/headers.js new file mode 100644 index 0000000..e509fd8 --- /dev/null +++ b/test/node-fetch/headers.js @@ -0,0 +1,282 @@ +/* eslint no-unused-expressions: "off" */ + +const { format } = require('util') +const chai = require('chai') +const chaiIterator = require('chai-iterator') +const { Headers } = require('../../lib/fetch/headers.js') + +chai.use(chaiIterator) + +const { expect } = chai + +describe('Headers', () => { + it('should have attributes conforming to Web IDL', () => { + const headers = new Headers() + expect(Object.getOwnPropertyNames(headers)).to.be.empty + const enumerableProperties = [] + + for (const property in headers) { + enumerableProperties.push(property) + } + + for (const toCheck of [ + 'append', + 'delete', + 'entries', + 'forEach', + 'get', + 'has', + 'keys', + 'set', + 'values' + ]) { + expect(enumerableProperties).to.contain(toCheck) + } + }) + + it('should allow iterating through all headers with forEach', () => { + const headers = new Headers([ + ['b', '2'], + ['c', '4'], + ['b', '3'], + ['a', '1'] + ]) + expect(headers).to.have.property('forEach') + + const result = [] + for (const [key, value] of headers.entries()) { + result.push([key, value]) + } + + expect(result).to.deep.equal([ + ['a', '1'], + ['b', '2, 3'], + ['c', '4'] + ]) + }) + + it('should be iterable with forEach', () => { + const headers = new Headers() + headers.append('Accept', 'application/json') + headers.append('Accept', 'text/plain') + headers.append('Content-Type', 'text/html') + + const results = [] + headers.forEach((value, key, object) => { + results.push({ value, key, object }) + }) + + expect(results.length).to.equal(2) + expect({ key: 'accept', value: 'application/json, text/plain', object: headers }).to.deep.equal(results[0]) + expect({ key: 'content-type', value: 'text/html', object: headers }).to.deep.equal(results[1]) + }) + + xit('should set "this" to undefined by default on forEach', () => { + const headers = new Headers({ Accept: 'application/json' }) + headers.forEach(function () { + expect(this).to.be.undefined + }) + }) + + it('should accept thisArg as a second argument for forEach', () => { + const headers = new Headers({ Accept: 'application/json' }) + const thisArg = {} + headers.forEach(function () { + expect(this).to.equal(thisArg) + }, thisArg) + }) + + it('should allow iterating through all headers with for-of loop', () => { + const headers = new Headers([ + ['b', '2'], + ['c', '4'], + ['a', '1'] + ]) + headers.append('b', '3') + expect(headers).to.be.iterable + + const result = [] + for (const pair of headers) { + result.push(pair) + } + + expect(result).to.deep.equal([ + ['a', '1'], + ['b', '2, 3'], + ['c', '4'] + ]) + }) + + it('should allow iterating through all headers with entries()', () => { + const headers = new Headers([ + ['b', '2'], + ['c', '4'], + ['a', '1'] + ]) + headers.append('b', '3') + + expect(headers.entries()).to.be.iterable + .and.to.deep.iterate.over([ + ['a', '1'], + ['b', '2, 3'], + ['c', '4'] + ]) + }) + + it('should allow iterating through all headers with keys()', () => { + const headers = new Headers([ + ['b', '2'], + ['c', '4'], + ['a', '1'] + ]) + headers.append('b', '3') + + expect(headers.keys()).to.be.iterable + .and.to.iterate.over(['a', 'b', 'c']) + }) + + it('should allow iterating through all headers with values()', () => { + const headers = new Headers([ + ['b', '2'], + ['c', '4'], + ['a', '1'] + ]) + headers.append('b', '3') + + expect(headers.values()).to.be.iterable + .and.to.iterate.over(['1', '2, 3', '4']) + }) + + it('should reject illegal header', () => { + const headers = new Headers() + expect(() => new Headers({ 'He y': 'ok' })).to.throw(TypeError) + expect(() => new Headers({ 'Hé-y': 'ok' })).to.throw(TypeError) + expect(() => new Headers({ 'He-y': 'ăk' })).to.throw(TypeError) + expect(() => headers.append('Hé-y', 'ok')).to.throw(TypeError) + expect(() => headers.delete('Hé-y')).to.throw(TypeError) + expect(() => headers.get('Hé-y')).to.throw(TypeError) + expect(() => headers.has('Hé-y')).to.throw(TypeError) + expect(() => headers.set('Hé-y', 'ok')).to.throw(TypeError) + // Should reject empty header + expect(() => headers.append('', 'ok')).to.throw(TypeError) + }) + + xit('should ignore unsupported attributes while reading headers', () => { + const FakeHeader = function () {} + // Prototypes are currently ignored + // This might change in the future: #181 + FakeHeader.prototype.z = 'fake' + + const res = new FakeHeader() + res.a = 'string' + res.b = ['1', '2'] + res.c = '' + res.d = [] + res.e = 1 + res.f = [1, 2] + res.g = { a: 1 } + res.h = undefined + res.i = null + res.j = Number.NaN + res.k = true + res.l = false + res.m = Buffer.from('test') + + const h1 = new Headers(res) + h1.set('n', [1, 2]) + h1.append('n', ['3', 4]) + + const h1Raw = h1.raw() + + expect(h1Raw.a).to.include('string') + expect(h1Raw.b).to.include('1,2') + expect(h1Raw.c).to.include('') + expect(h1Raw.d).to.include('') + expect(h1Raw.e).to.include('1') + expect(h1Raw.f).to.include('1,2') + expect(h1Raw.g).to.include('[object Object]') + expect(h1Raw.h).to.include('undefined') + expect(h1Raw.i).to.include('null') + expect(h1Raw.j).to.include('NaN') + expect(h1Raw.k).to.include('true') + expect(h1Raw.l).to.include('false') + expect(h1Raw.m).to.include('test') + expect(h1Raw.n).to.include('1,2') + expect(h1Raw.n).to.include('3,4') + + expect(h1Raw.z).to.be.undefined + }) + + xit('should wrap headers', () => { + const h1 = new Headers({ + a: '1' + }) + const h1Raw = h1.raw() + + const h2 = new Headers(h1) + h2.set('b', '1') + const h2Raw = h2.raw() + + const h3 = new Headers(h2) + h3.append('a', '2') + const h3Raw = h3.raw() + + expect(h1Raw.a).to.include('1') + expect(h1Raw.a).to.not.include('2') + + expect(h2Raw.a).to.include('1') + expect(h2Raw.a).to.not.include('2') + expect(h2Raw.b).to.include('1') + + expect(h3Raw.a).to.include('1') + expect(h3Raw.a).to.include('2') + expect(h3Raw.b).to.include('1') + }) + + it('should accept headers as an iterable of tuples', () => { + let headers + + headers = new Headers([ + ['a', '1'], + ['b', '2'], + ['a', '3'] + ]) + expect(headers.get('a')).to.equal('1, 3') + expect(headers.get('b')).to.equal('2') + + headers = new Headers([ + new Set(['a', '1']), + ['b', '2'], + new Map([['a', null], ['3', null]]).keys() + ]) + expect(headers.get('a')).to.equal('1, 3') + expect(headers.get('b')).to.equal('2') + + headers = new Headers(new Map([ + ['a', '1'], + ['b', '2'] + ])) + expect(headers.get('a')).to.equal('1') + expect(headers.get('b')).to.equal('2') + }) + + it('should throw a TypeError if non-tuple exists in a headers initializer', () => { + expect(() => new Headers([['b', '2', 'huh?']])).to.throw(TypeError) + expect(() => new Headers(['b2'])).to.throw(TypeError) + expect(() => new Headers('b2')).to.throw(TypeError) + expect(() => new Headers({ [Symbol.iterator]: 42 })).to.throw(TypeError) + }) + + xit('should use a custom inspect function', () => { + const headers = new Headers([ + ['Host', 'thehost'], + ['Host', 'notthehost'], + ['a', '1'], + ['b', '2'], + ['a', '3'] + ]) + + // eslint-disable-next-line quotes + expect(format(headers)).to.equal("{ a: [ '1', '3' ], b: '2', host: 'thehost' }") + }) +}) diff --git a/test/node-fetch/main.js b/test/node-fetch/main.js new file mode 100644 index 0000000..358a969 --- /dev/null +++ b/test/node-fetch/main.js @@ -0,0 +1,1661 @@ +/* eslint no-unused-expressions: "off" */ +/* globals AbortController */ + +// Test tools +const zlib = require('zlib') +const stream = require('stream') +const vm = require('vm') +const chai = require('chai') +const crypto = require('crypto') +const chaiPromised = require('chai-as-promised') +const chaiIterator = require('chai-iterator') +const chaiString = require('chai-string') +const delay = require('delay') +const { Blob } = require('buffer') + +const { + fetch, + Headers, + Request, + FormData, + Response, + setGlobalDispatcher, + Agent +} = require('../../index.js') +const HeadersOrig = require('../../lib/fetch/headers.js').Headers +const RequestOrig = require('../../lib/fetch/request.js').Request +const ResponseOrig = require('../../lib/fetch/response.js').Response +const TestServer = require('./utils/server.js') +const chaiTimeout = require('./utils/chai-timeout.js') +const { ReadableStream } = require('stream/web') + +function isNodeLowerThan (version) { + return !~process.version.localeCompare(version, undefined, { numeric: true }) +} + +const { + Uint8Array: VMUint8Array +} = vm.runInNewContext('this') + +chai.use(chaiPromised) +chai.use(chaiIterator) +chai.use(chaiString) +chai.use(chaiTimeout) +const { expect } = chai + +describe('node-fetch', () => { + const local = new TestServer() + let base + + before(async () => { + await local.start() + setGlobalDispatcher(new Agent({ + connect: { + rejectUnauthorized: false + } + })) + base = `http://${local.hostname}:${local.port}/` + }) + + after(async () => { + return local.stop() + }) + + it('should return a promise', () => { + const url = `${base}hello` + const p = fetch(url) + expect(p).to.be.an.instanceof(Promise) + expect(p).to.have.property('then') + }) + + it('should expose Headers, Response and Request constructors', () => { + expect(Headers).to.equal(HeadersOrig) + expect(Response).to.equal(ResponseOrig) + expect(Request).to.equal(RequestOrig) + }) + + it('should support proper toString output for Headers, Response and Request objects', () => { + expect(new Headers().toString()).to.equal('[object Headers]') + expect(new Response().toString()).to.equal('[object Response]') + expect(new Request(base).toString()).to.equal('[object Request]') + }) + + it('should reject with error if url is protocol relative', () => { + const url = '//example.com/' + return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError) + }) + + it('should reject with error if url is relative path', () => { + const url = '/some/path' + return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError) + }) + + it('should reject with error if protocol is unsupported', () => { + const url = 'ftp://example.com/' + return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError) + }) + + it('should reject with error on network failure', function () { + this.timeout(5000) + const url = 'http://localhost:50000/' + return expect(fetch(url)).to.eventually.be.rejected + .and.be.an.instanceOf(TypeError) + }) + + it('should resolve into response', () => { + const url = `${base}hello` + return fetch(url).then(res => { + expect(res).to.be.an.instanceof(Response) + expect(res.headers).to.be.an.instanceof(Headers) + expect(res.body).to.be.an.instanceof(ReadableStream) + expect(res.bodyUsed).to.be.false + + expect(res.url).to.equal(url) + expect(res.ok).to.be.true + expect(res.status).to.equal(200) + expect(res.statusText).to.equal('OK') + }) + }) + + it('Response.redirect should resolve into response', () => { + const res = Response.redirect('http://localhost') + expect(res).to.be.an.instanceof(Response) + expect(res.headers).to.be.an.instanceof(Headers) + expect(res.headers.get('location')).to.equal('http://localhost/') + expect(res.status).to.equal(302) + }) + + it('Response.redirect /w invalid url should fail', () => { + expect(() => { + Response.redirect('localhost') + }).to.throw() + }) + + it('Response.redirect /w invalid status should fail', () => { + expect(() => { + Response.redirect('http://localhost', 200) + }).to.throw() + }) + + it('should accept plain text response', () => { + const url = `${base}plain` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + return res.text().then(result => { + expect(res.bodyUsed).to.be.true + expect(result).to.be.a('string') + expect(result).to.equal('text') + }) + }) + }) + + it('should accept html response (like plain text)', () => { + const url = `${base}html` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/html') + return res.text().then(result => { + expect(res.bodyUsed).to.be.true + expect(result).to.be.a('string') + expect(result).to.equal('') + }) + }) + }) + + it('should accept json response', () => { + const url = `${base}json` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('application/json') + return res.json().then(result => { + expect(res.bodyUsed).to.be.true + expect(result).to.be.an('object') + expect(result).to.deep.equal({ name: 'value' }) + }) + }) + }) + + it('should send request with custom headers', () => { + const url = `${base}inspect` + const options = { + headers: { 'x-custom-header': 'abc' } + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.headers['x-custom-header']).to.equal('abc') + }) + }) + + it('should send request with custom headers array', () => { + const url = `${base}inspect` + const options = { + headers: { 'x-custom-header': ['abc'] } + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.headers['x-custom-header']).to.equal('abc') + }) + }) + + it('should send request with multi-valued headers', () => { + const url = `${base}inspect` + const options = { + headers: { 'x-custom-header': ['abc', '123'] } + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.headers['x-custom-header']).to.equal('abc,123') + }) + }) + + it('should accept headers instance', () => { + const url = `${base}inspect` + const options = { + headers: new Headers({ 'x-custom-header': 'abc' }) + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.headers['x-custom-header']).to.equal('abc') + }) + }) + + it('should follow redirect code 301', () => { + const url = `${base}redirect/301` + return fetch(url).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + expect(res.ok).to.be.true + }) + }) + + it('should follow redirect code 302', () => { + const url = `${base}redirect/302` + return fetch(url).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + }) + }) + + it('should follow redirect code 303', () => { + const url = `${base}redirect/303` + return fetch(url).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + }) + }) + + it('should follow redirect code 307', () => { + const url = `${base}redirect/307` + return fetch(url).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + }) + }) + + it('should follow redirect code 308', () => { + const url = `${base}redirect/308` + return fetch(url).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + }) + }) + + it('should follow redirect chain', () => { + const url = `${base}redirect/chain` + return fetch(url).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + }) + }) + + it('should follow POST request redirect code 301 with GET', () => { + const url = `${base}redirect/301` + const options = { + method: 'POST', + body: 'a=1' + } + return fetch(url, options).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + return res.json().then(result => { + expect(result.method).to.equal('GET') + expect(result.body).to.equal('') + }) + }) + }) + + it('should follow PATCH request redirect code 301 with PATCH', () => { + const url = `${base}redirect/301` + const options = { + method: 'PATCH', + body: 'a=1' + } + return fetch(url, options).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + return res.json().then(res => { + expect(res.method).to.equal('PATCH') + expect(res.body).to.equal('a=1') + }) + }) + }) + + it('should follow POST request redirect code 302 with GET', () => { + const url = `${base}redirect/302` + const options = { + method: 'POST', + body: 'a=1' + } + return fetch(url, options).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + return res.json().then(result => { + expect(result.method).to.equal('GET') + expect(result.body).to.equal('') + }) + }) + }) + + it('should follow PATCH request redirect code 302 with PATCH', () => { + const url = `${base}redirect/302` + const options = { + method: 'PATCH', + body: 'a=1' + } + return fetch(url, options).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + return res.json().then(res => { + expect(res.method).to.equal('PATCH') + expect(res.body).to.equal('a=1') + }) + }) + }) + + it('should follow redirect code 303 with GET', () => { + const url = `${base}redirect/303` + const options = { + method: 'PUT', + body: 'a=1' + } + return fetch(url, options).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + return res.json().then(result => { + expect(result.method).to.equal('GET') + expect(result.body).to.equal('') + }) + }) + }) + + it('should follow PATCH request redirect code 307 with PATCH', () => { + const url = `${base}redirect/307` + const options = { + method: 'PATCH', + body: 'a=1' + } + return fetch(url, options).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + return res.json().then(result => { + expect(result.method).to.equal('PATCH') + expect(result.body).to.equal('a=1') + }) + }) + }) + + it('should not follow non-GET redirect if body is a readable stream', () => { + const url = `${base}redirect/307` + const options = { + method: 'PATCH', + body: stream.Readable.from('tada') + } + return expect(fetch(url, options)).to.eventually.be.rejected + .and.be.an.instanceOf(TypeError) + }) + + it('should obey maximum redirect, reject case', () => { + const url = `${base}redirect/chain/20` + return expect(fetch(url)).to.eventually.be.rejected + .and.be.an.instanceOf(TypeError) + }) + + it('should obey redirect chain, resolve case', () => { + const url = `${base}redirect/chain/19` + return fetch(url).then(res => { + expect(res.url).to.equal(`${base}inspect`) + expect(res.status).to.equal(200) + }) + }) + + it('should support redirect mode, error flag', () => { + const url = `${base}redirect/301` + const options = { + redirect: 'error' + } + return expect(fetch(url, options)).to.eventually.be.rejected + .and.be.an.instanceOf(TypeError) + }) + + it('should support redirect mode, manual flag when there is no redirect', () => { + const url = `${base}hello` + const options = { + redirect: 'manual' + } + return fetch(url, options).then(res => { + expect(res.url).to.equal(url) + expect(res.status).to.equal(200) + expect(res.headers.get('location')).to.be.null + }) + }) + + it('should follow redirect code 301 and keep existing headers', () => { + const url = `${base}redirect/301` + const options = { + headers: new Headers({ 'x-custom-header': 'abc' }) + } + return fetch(url, options).then(res => { + expect(res.url).to.equal(`${base}inspect`) + return res.json() + }).then(res => { + expect(res.headers['x-custom-header']).to.equal('abc') + }) + }) + + it('should treat broken redirect as ordinary response (follow)', () => { + const url = `${base}redirect/no-location` + return fetch(url).then(res => { + expect(res.url).to.equal(url) + expect(res.status).to.equal(301) + expect(res.headers.get('location')).to.be.null + }) + }) + + it('should treat broken redirect as ordinary response (manual)', () => { + const url = `${base}redirect/no-location` + const options = { + redirect: 'manual' + } + return fetch(url, options).then(res => { + expect(res.url).to.equal(url) + expect(res.status).to.equal(301) + expect(res.headers.get('location')).to.be.null + }) + }) + + it('should throw a TypeError on an invalid redirect option', () => { + const url = `${base}redirect/301` + const options = { + redirect: 'foobar' + } + return fetch(url, options).then(() => { + expect.fail() + }, error => { + expect(error).to.be.an.instanceOf(TypeError) + }) + }) + + it('should set redirected property on response when redirect', () => { + const url = `${base}redirect/301` + return fetch(url).then(res => { + expect(res.redirected).to.be.true + }) + }) + + it('should not set redirected property on response without redirect', () => { + const url = `${base}hello` + return fetch(url).then(res => { + expect(res.redirected).to.be.false + }) + }) + + it('should handle client-error response', () => { + const url = `${base}error/400` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + expect(res.status).to.equal(400) + expect(res.statusText).to.equal('Bad Request') + expect(res.ok).to.be.false + return res.text().then(result => { + expect(res.bodyUsed).to.be.true + expect(result).to.be.a('string') + expect(result).to.equal('client error') + }) + }) + }) + + it('should handle server-error response', () => { + const url = `${base}error/500` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + expect(res.status).to.equal(500) + expect(res.statusText).to.equal('Internal Server Error') + expect(res.ok).to.be.false + return res.text().then(result => { + expect(res.bodyUsed).to.be.true + expect(result).to.be.a('string') + expect(result).to.equal('server error') + }) + }) + }) + + it('should handle network-error response', () => { + const url = `${base}error/reset` + return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError) + }) + + it('should handle network-error partial response', () => { + const url = `${base}error/premature` + return fetch(url).then(res => { + expect(res.status).to.equal(200) + expect(res.ok).to.be.true + return expect(res.text()).to.eventually.be.rejectedWith(Error) + }) + }) + + it('should handle network-error in chunked response async iterator', () => { + const url = `${base}error/premature/chunked` + return fetch(url).then(res => { + expect(res.status).to.equal(200) + expect(res.ok).to.be.true + + const read = async body => { + const chunks = [] + for await (const chunk of body) { + chunks.push(chunk) + } + + return chunks + } + + return expect(read(res.body)) + .to.eventually.be.rejectedWith(Error) + }) + }) + + it('should handle network-error in chunked response in consumeBody', () => { + const url = `${base}error/premature/chunked` + return fetch(url).then(res => { + expect(res.status).to.equal(200) + expect(res.ok).to.be.true + + return expect(res.text()).to.eventually.be.rejectedWith(Error) + }) + }) + + it('should handle DNS-error response', () => { + const url = 'http://domain.invalid' + return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError) + }) + + it('should reject invalid json response', () => { + const url = `${base}error/json` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('application/json') + return expect(res.json()).to.eventually.be.rejectedWith(Error) + }) + }) + + it('should handle response with no status text', () => { + const url = `${base}no-status-text` + return fetch(url).then(res => { + expect(res.statusText).to.equal('') + }) + }) + + it('should handle no content response', () => { + const url = `${base}no-content` + return fetch(url).then(res => { + expect(res.status).to.equal(204) + expect(res.statusText).to.equal('No Content') + expect(res.ok).to.be.true + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.be.empty + }) + }) + }) + + it('should reject when trying to parse no content response as json', () => { + const url = `${base}no-content` + return fetch(url).then(res => { + expect(res.status).to.equal(204) + expect(res.statusText).to.equal('No Content') + expect(res.ok).to.be.true + return expect(res.json()).to.eventually.be.rejectedWith(Error) + }) + }) + + it('should handle no content response with gzip encoding', () => { + const url = `${base}no-content/gzip` + return fetch(url).then(res => { + expect(res.status).to.equal(204) + expect(res.statusText).to.equal('No Content') + expect(res.headers.get('content-encoding')).to.equal('gzip') + expect(res.ok).to.be.true + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.be.empty + }) + }) + }) + + it('should handle not modified response', () => { + const url = `${base}not-modified` + return fetch(url).then(res => { + expect(res.status).to.equal(304) + expect(res.statusText).to.equal('Not Modified') + expect(res.ok).to.be.false + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.be.empty + }) + }) + }) + + it('should handle not modified response with gzip encoding', () => { + const url = `${base}not-modified/gzip` + return fetch(url).then(res => { + expect(res.status).to.equal(304) + expect(res.statusText).to.equal('Not Modified') + expect(res.headers.get('content-encoding')).to.equal('gzip') + expect(res.ok).to.be.false + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.be.empty + }) + }) + }) + + it('should decompress gzip response', () => { + const url = `${base}gzip` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.equal('hello world') + }) + }) + }) + + it('should decompress slightly invalid gzip response', async () => { + const url = `${base}gzip-truncated` + const res = await fetch(url) + expect(res.headers.get('content-type')).to.equal('text/plain') + const result = await res.text() + expect(result).to.be.a('string') + expect(result).to.equal('hello world') + }) + + it('should decompress deflate response', () => { + const url = `${base}deflate` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.equal('hello world') + }) + }) + }) + + xit('should decompress deflate raw response from old apache server', () => { + const url = `${base}deflate-raw` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.equal('hello world') + }) + }) + }) + + it('should decompress brotli response', function () { + if (typeof zlib.createBrotliDecompress !== 'function') { + this.skip() + } + + const url = `${base}brotli` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.equal('hello world') + }) + }) + }) + + it('should handle no content response with brotli encoding', function () { + if (typeof zlib.createBrotliDecompress !== 'function') { + this.skip() + } + + const url = `${base}no-content/brotli` + return fetch(url).then(res => { + expect(res.status).to.equal(204) + expect(res.statusText).to.equal('No Content') + expect(res.headers.get('content-encoding')).to.equal('br') + expect(res.ok).to.be.true + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.be.empty + }) + }) + }) + + it('should skip decompression if unsupported', () => { + const url = `${base}sdch` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.equal('fake sdch string') + }) + }) + }) + + it('should skip decompression if unsupported codings', () => { + const url = `${base}multiunsupported` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.equal('multiunsupported') + }) + }) + }) + + it('should decompress multiple coding', () => { + const url = `${base}multisupported` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + return res.text().then(result => { + expect(result).to.be.a('string') + expect(result).to.equal('hello world') + }) + }) + }) + + it('should reject if response compression is invalid', () => { + const url = `${base}invalid-content-encoding` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + return expect(res.text()).to.eventually.be.rejected + }) + }) + + it('should handle errors on the body stream even if it is not used', done => { + const url = `${base}invalid-content-encoding` + fetch(url) + .then(res => { + expect(res.status).to.equal(200) + }) + .catch(() => {}) + .then(() => { + // Wait a few ms to see if a uncaught error occurs + setTimeout(() => { + done() + }, 20) + }) + }) + + it('should collect handled errors on the body stream to reject if the body is used later', () => { + const url = `${base}invalid-content-encoding` + return fetch(url).then(delay(20)).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + return expect(res.text()).to.eventually.be.rejected + }) + }) + + it('should not overwrite existing accept-encoding header when auto decompression is true', () => { + const url = `${base}inspect` + const options = { + compress: true, + headers: { + 'Accept-Encoding': 'gzip' + } + } + return fetch(url, options).then(res => res.json()).then(res => { + expect(res.headers['accept-encoding']).to.equal('gzip') + }) + }) + + describe('AbortController', () => { + let controller + + beforeEach(() => { + controller = new AbortController() + }) + + it('should support request cancellation with signal', () => { + const fetches = [ + fetch( + `${base}timeout`, + { + method: 'POST', + signal: controller.signal, + headers: { + 'Content-Type': 'application/json', + body: JSON.stringify({ hello: 'world' }) + } + } + ) + ] + + controller.abort() + + return Promise.all(fetches.map(fetched => expect(fetched) + .to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('name', 'AbortError') + )) + }) + + it('should support multiple request cancellation with signal', () => { + const fetches = [ + fetch(`${base}timeout`, { signal: controller.signal }), + fetch( + `${base}timeout`, + { + method: 'POST', + signal: controller.signal, + headers: { + 'Content-Type': 'application/json', + body: JSON.stringify({ hello: 'world' }) + } + } + ) + ] + + controller.abort() + + return Promise.all(fetches.map(fetched => expect(fetched) + .to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('name', 'AbortError') + )) + }) + + it('should reject immediately if signal has already been aborted', () => { + const url = `${base}timeout` + const options = { + signal: controller.signal + } + controller.abort() + const fetched = fetch(url, options) + return expect(fetched).to.eventually.be.rejected + .and.be.an.instanceOf(Error) + .and.have.property('name', 'AbortError') + }) + + it('should allow redirects to be aborted', () => { + const request = new Request(`${base}redirect/slow`, { + signal: controller.signal + }) + setTimeout(() => { + controller.abort() + }, 20) + return expect(fetch(request)).to.be.eventually.rejected + .and.be.an.instanceOf(Error) + .and.have.property('name', 'AbortError') + }) + + it('should allow redirected response body to be aborted', () => { + const request = new Request(`${base}redirect/slow-stream`, { + signal: controller.signal + }) + return expect(fetch(request).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + const result = res.text() + controller.abort() + return result + })).to.be.eventually.rejected + .and.be.an.instanceOf(Error) + .and.have.property('name', 'AbortError') + }) + + it('should reject response body with AbortError when aborted before stream has been read completely', () => { + return expect(fetch( + `${base}slow`, + { signal: controller.signal } + )) + .to.eventually.be.fulfilled + .then(res => { + const promise = res.text() + controller.abort() + return expect(promise) + .to.eventually.be.rejected + .and.be.an.instanceof(Error) + .and.have.property('name', 'AbortError') + }) + }) + + it('should reject response body methods immediately with AbortError when aborted before stream is disturbed', () => { + return expect(fetch( + `${base}slow`, + { signal: controller.signal } + )) + .to.eventually.be.fulfilled + .then(res => { + controller.abort() + return expect(res.text()) + .to.eventually.be.rejected + .and.be.an.instanceof(Error) + .and.have.property('name', 'AbortError') + }) + }) + }) + + it('should throw a TypeError if a signal is not of type AbortSignal or EventTarget', () => { + return Promise.all([ + expect(fetch(`${base}inspect`, { signal: {} })) + .to.be.eventually.rejected + .and.be.an.instanceof(TypeError), + expect(fetch(`${base}inspect`, { signal: '' })) + .to.be.eventually.rejected + .and.be.an.instanceof(TypeError), + expect(fetch(`${base}inspect`, { signal: Object.create(null) })) + .to.be.eventually.rejected + .and.be.an.instanceof(TypeError) + ]) + }) + + it('should gracefully handle a null signal', () => { + return fetch(`${base}hello`, { signal: null }).then(res => { + return expect(res.ok).to.be.true + }) + }) + + it('should allow setting User-Agent', () => { + const url = `${base}inspect` + const options = { + headers: { + 'user-agent': 'faked' + } + } + return fetch(url, options).then(res => res.json()).then(res => { + expect(res.headers['user-agent']).to.equal('faked') + }) + }) + + it('should set default Accept header', () => { + const url = `${base}inspect` + fetch(url).then(res => res.json()).then(res => { + expect(res.headers.accept).to.equal('*/*') + }) + }) + + it('should allow setting Accept header', () => { + const url = `${base}inspect` + const options = { + headers: { + accept: 'application/json' + } + } + return fetch(url, options).then(res => res.json()).then(res => { + expect(res.headers.accept).to.equal('application/json') + }) + }) + + it('should allow POST request', () => { + const url = `${base}inspect` + const options = { + method: 'POST' + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('POST') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-type']).to.be.undefined + expect(res.headers['content-length']).to.equal('0') + }) + }) + + it('should allow POST request with string body', () => { + const url = `${base}inspect` + const options = { + method: 'POST', + body: 'a=1' + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('a=1') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-type']).to.equal('text/plain;charset=UTF-8') + expect(res.headers['content-length']).to.equal('3') + }) + }) + + it('should allow POST request with buffer body', () => { + const url = `${base}inspect` + const options = { + method: 'POST', + body: Buffer.from('a=1', 'utf-8') + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('a=1') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-type']).to.be.undefined + expect(res.headers['content-length']).to.equal('3') + }) + }) + + it('should allow POST request with ArrayBuffer body', () => { + const encoder = new TextEncoder() + const url = `${base}inspect` + const options = { + method: 'POST', + body: encoder.encode('Hello, world!\n').buffer + } + return fetch(url, options).then(res => res.json()).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('Hello, world!\n') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-type']).to.be.undefined + expect(res.headers['content-length']).to.equal('14') + }) + }) + + it('should allow POST request with ArrayBuffer body from a VM context', () => { + const url = `${base}inspect` + const options = { + method: 'POST', + body: new VMUint8Array(Buffer.from('Hello, world!\n')).buffer + } + return fetch(url, options).then(res => res.json()).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('Hello, world!\n') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-type']).to.be.undefined + expect(res.headers['content-length']).to.equal('14') + }) + }) + + it('should allow POST request with ArrayBufferView (Uint8Array) body', () => { + const encoder = new TextEncoder() + const url = `${base}inspect` + const options = { + method: 'POST', + body: encoder.encode('Hello, world!\n') + } + return fetch(url, options).then(res => res.json()).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('Hello, world!\n') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-type']).to.be.undefined + expect(res.headers['content-length']).to.equal('14') + }) + }) + + it('should allow POST request with ArrayBufferView (BigUint64Array) body', () => { + const encoder = new TextEncoder() + const url = `${base}inspect` + const options = { + method: 'POST', + body: new BigUint64Array(encoder.encode('0123456789abcdef').buffer) + } + return fetch(url, options).then(res => res.json()).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('0123456789abcdef') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-type']).to.be.undefined + expect(res.headers['content-length']).to.equal('16') + }) + }) + + it('should allow POST request with ArrayBufferView (DataView) body', () => { + const encoder = new TextEncoder() + const url = `${base}inspect` + const options = { + method: 'POST', + body: new DataView(encoder.encode('Hello, world!\n').buffer) + } + return fetch(url, options).then(res => res.json()).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('Hello, world!\n') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-type']).to.be.undefined + expect(res.headers['content-length']).to.equal('14') + }) + }) + + it('should allow POST request with ArrayBufferView (Uint8Array) body from a VM context', () => { + const url = `${base}inspect` + const options = { + method: 'POST', + body: new VMUint8Array(Buffer.from('Hello, world!\n')) + } + return fetch(url, options).then(res => res.json()).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('Hello, world!\n') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-type']).to.be.undefined + expect(res.headers['content-length']).to.equal('14') + }) + }) + + it('should allow POST request with ArrayBufferView (Uint8Array, offset, length) body', () => { + const encoder = new TextEncoder() + const url = `${base}inspect` + const options = { + method: 'POST', + body: encoder.encode('Hello, world!\n').subarray(7, 13) + } + return fetch(url, options).then(res => res.json()).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('world!') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-type']).to.be.undefined + expect(res.headers['content-length']).to.equal('6') + }) + }) + + it('should allow POST request with blob body without type', () => { + const url = `${base}inspect` + const options = { + method: 'POST', + body: new Blob(['a=1']) + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('a=1') + expect(res.headers['transfer-encoding']).to.be.undefined + // expect(res.headers['content-type']).to.be.undefined + expect(res.headers['content-length']).to.equal('3') + }) + }) + + it('should allow POST request with blob body with type', () => { + const url = `${base}inspect` + const options = { + method: 'POST', + body: new Blob(['a=1'], { + type: 'text/plain;charset=UTF-8' + }) + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('a=1') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-type']).to.equal('text/plain;charset=utf-8') + expect(res.headers['content-length']).to.equal('3') + }) + }) + + it('should allow POST request with readable stream as body', () => { + const url = `${base}inspect` + const options = { + method: 'POST', + body: stream.Readable.from('a=1'), + duplex: 'half' + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('a=1') + expect(res.headers['transfer-encoding']).to.equal('chunked') + expect(res.headers['content-type']).to.be.undefined + expect(res.headers['content-length']).to.be.undefined + }) + }) + + it('should allow POST request with object body', () => { + const url = `${base}inspect` + // Note that fetch simply calls tostring on an object + const options = { + method: 'POST', + body: { a: 1 } + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('POST') + expect(res.body).to.equal('[object Object]') + expect(res.headers['content-type']).to.equal('text/plain;charset=UTF-8') + expect(res.headers['content-length']).to.equal('15') + }) + }) + + it('should allow POST request with form-data as body', () => { + const form = new FormData() + form.append('a', '1') + + const url = `${base}multipart` + const options = { + method: 'POST', + body: form + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('POST') + expect(res.headers['content-type']).to.startWith('multipart/form-data; boundary=') + expect(res.body).to.equal('a=1') + }) + }) + + it('constructing a Response with URLSearchParams as body should have a Content-Type', () => { + const parameters = new URLSearchParams() + const res = new Response(parameters) + res.headers.get('Content-Type') + expect(res.headers.get('Content-Type')).to.equal('application/x-www-form-urlencoded;charset=UTF-8') + }) + + it('constructing a Request with URLSearchParams as body should have a Content-Type', () => { + const parameters = new URLSearchParams() + const request = new Request(base, { method: 'POST', body: parameters }) + expect(request.headers.get('Content-Type')).to.equal('application/x-www-form-urlencoded;charset=UTF-8') + }) + + it('Reading a body with URLSearchParams should echo back the result', () => { + const parameters = new URLSearchParams() + parameters.append('a', '1') + return new Response(parameters).text().then(text => { + expect(text).to.equal('a=1') + }) + }) + + // Body should been cloned... + it('constructing a Request/Response with URLSearchParams and mutating it should not affected body', () => { + const parameters = new URLSearchParams() + const request = new Request(`${base}inspect`, { method: 'POST', body: parameters }) + parameters.append('a', '1') + return request.text().then(text => { + expect(text).to.equal('') + }) + }) + + it('should allow POST request with URLSearchParams as body', () => { + const parameters = new URLSearchParams() + parameters.append('a', '1') + + const url = `${base}inspect` + const options = { + method: 'POST', + body: parameters + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('POST') + expect(res.headers['content-type']).to.equal('application/x-www-form-urlencoded;charset=UTF-8') + expect(res.headers['content-length']).to.equal('3') + expect(res.body).to.equal('a=1') + }) + }) + + it('should still recognize URLSearchParams when extended', () => { + class CustomSearchParameters extends URLSearchParams {} + const parameters = new CustomSearchParameters() + parameters.append('a', '1') + + const url = `${base}inspect` + const options = { + method: 'POST', + body: parameters + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('POST') + expect(res.headers['content-type']).to.equal('application/x-www-form-urlencoded;charset=UTF-8') + expect(res.headers['content-length']).to.equal('3') + expect(res.body).to.equal('a=1') + }) + }) + + it('should allow PUT request', () => { + const url = `${base}inspect` + const options = { + method: 'PUT', + body: 'a=1' + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('PUT') + expect(res.body).to.equal('a=1') + }) + }) + + it('should allow DELETE request', () => { + const url = `${base}inspect` + const options = { + method: 'DELETE' + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('DELETE') + }) + }) + + it('should allow DELETE request with string body', () => { + const url = `${base}inspect` + const options = { + method: 'DELETE', + body: 'a=1' + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('DELETE') + expect(res.body).to.equal('a=1') + expect(res.headers['transfer-encoding']).to.be.undefined + expect(res.headers['content-length']).to.equal('3') + }) + }) + + it('should allow PATCH request', () => { + const url = `${base}inspect` + const options = { + method: 'PATCH', + body: 'a=1' + } + return fetch(url, options).then(res => { + return res.json() + }).then(res => { + expect(res.method).to.equal('PATCH') + expect(res.body).to.equal('a=1') + }) + }) + + it('should allow HEAD request', () => { + const url = `${base}hello` + const options = { + method: 'HEAD' + } + return fetch(url, options).then(res => { + expect(res.status).to.equal(200) + expect(res.statusText).to.equal('OK') + expect(res.headers.get('content-type')).to.equal('text/plain') + // expect(res.body).to.be.an.instanceof(stream.Transform) + return res.text() + }).then(text => { + expect(text).to.equal('') + }) + }) + + it('should allow HEAD request with content-encoding header', () => { + const url = `${base}error/404` + const options = { + method: 'HEAD' + } + return fetch(url, options).then(res => { + expect(res.status).to.equal(404) + expect(res.headers.get('content-encoding')).to.equal('gzip') + return res.text() + }).then(text => { + expect(text).to.equal('') + }) + }) + + it('should allow OPTIONS request', () => { + const url = `${base}options` + const options = { + method: 'OPTIONS' + } + return fetch(url, options).then(res => { + expect(res.status).to.equal(200) + expect(res.statusText).to.equal('OK') + expect(res.headers.get('allow')).to.equal('GET, HEAD, OPTIONS') + // expect(res.body).to.be.an.instanceof(stream.Transform) + }) + }) + + it('should reject decoding body twice', () => { + const url = `${base}plain` + return fetch(url).then(res => { + expect(res.headers.get('content-type')).to.equal('text/plain') + return res.text().then(() => { + expect(res.bodyUsed).to.be.true + return expect(res.text()).to.eventually.be.rejectedWith(Error) + }) + }) + }) + + it('should allow cloning a json response and log it as text response', () => { + const url = `${base}json` + return fetch(url).then(res => { + const r1 = res.clone() + return Promise.all([res.json(), r1.text()]).then(results => { + expect(results[0]).to.deep.equal({ name: 'value' }) + expect(results[1]).to.equal('{"name":"value"}') + }) + }) + }) + + it('should allow cloning a json response, and then log it as text response', () => { + const url = `${base}json` + return fetch(url).then(res => { + const r1 = res.clone() + return res.json().then(result => { + expect(result).to.deep.equal({ name: 'value' }) + return r1.text().then(result => { + expect(result).to.equal('{"name":"value"}') + }) + }) + }) + }) + + it('should allow cloning a json response, first log as text response, then return json object', () => { + const url = `${base}json` + return fetch(url).then(res => { + const r1 = res.clone() + return r1.text().then(result => { + expect(result).to.equal('{"name":"value"}') + return res.json().then(result => { + expect(result).to.deep.equal({ name: 'value' }) + }) + }) + }) + }) + + it('should not allow cloning a response after its been used', () => { + const url = `${base}hello` + return fetch(url).then(res => + res.text().then(() => { + expect(() => { + res.clone() + }).to.throw(Error) + }) + ) + }) + + xit('should timeout on cloning response without consuming one of the streams when the second packet size is equal default highWaterMark', function () { + this.timeout(300) + const url = local.mockState(res => { + // Observed behavior of TCP packets splitting: + // - response body size <= 65438 → single packet sent + // - response body size > 65438 → multiple packets sent + // Max TCP packet size is 64kB (http://stackoverflow.com/a/2614188/5763764), + // but first packet probably transfers more than the response body. + const firstPacketMaxSize = 65438 + const secondPacketSize = 16 * 1024 // = defaultHighWaterMark + res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize)) + }) + return expect( + fetch(url).then(res => res.clone().buffer()) + ).to.timeout + }) + + xit('should timeout on cloning response without consuming one of the streams when the second packet size is equal custom highWaterMark', function () { + this.timeout(300) + const url = local.mockState(res => { + const firstPacketMaxSize = 65438 + const secondPacketSize = 10 + res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize)) + }) + return expect( + fetch(url, { highWaterMark: 10 }).then(res => res.clone().buffer()) + ).to.timeout + }) + + xit('should not timeout on cloning response without consuming one of the streams when the second packet size is less than default highWaterMark', function () { + // TODO: fix test. + if (!isNodeLowerThan('v16.0.0')) { + this.skip() + } + + this.timeout(300) + const url = local.mockState(res => { + const firstPacketMaxSize = 65438 + const secondPacketSize = 16 * 1024 // = defaultHighWaterMark + res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize - 1)) + }) + return expect( + fetch(url).then(res => res.clone().buffer()) + ).not.to.timeout + }) + + xit('should not timeout on cloning response without consuming one of the streams when the second packet size is less than custom highWaterMark', function () { + // TODO: fix test. + if (!isNodeLowerThan('v16.0.0')) { + this.skip() + } + + this.timeout(300) + const url = local.mockState(res => { + const firstPacketMaxSize = 65438 + const secondPacketSize = 10 + res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize - 1)) + }) + return expect( + fetch(url, { highWaterMark: 10 }).then(res => res.clone().buffer()) + ).not.to.timeout + }) + + xit('should not timeout on cloning response without consuming one of the streams when the response size is double the custom large highWaterMark - 1', function () { + // TODO: fix test. + if (!isNodeLowerThan('v16.0.0')) { + this.skip() + } + + this.timeout(300) + const url = local.mockState(res => { + res.end(crypto.randomBytes((2 * 512 * 1024) - 1)) + }) + return expect( + fetch(url, { highWaterMark: 512 * 1024 }).then(res => res.clone().buffer()) + ).not.to.timeout + }) + + xit('should allow get all responses of a header', () => { + // TODO: fix test. + const url = `${base}cookie` + return fetch(url).then(res => { + const expected = 'a=1, b=1' + expect(res.headers.get('set-cookie')).to.equal(expected) + expect(res.headers.get('Set-Cookie')).to.equal(expected) + }) + }) + + it('should support fetch with Request instance', () => { + const url = `${base}hello` + const request = new Request(url) + return fetch(request).then(res => { + expect(res.url).to.equal(url) + expect(res.ok).to.be.true + expect(res.status).to.equal(200) + }) + }) + + it('should support fetch with Node.js URL object', () => { + const url = `${base}hello` + const urlObject = new URL(url) + const request = new Request(urlObject) + return fetch(request).then(res => { + expect(res.url).to.equal(url) + expect(res.ok).to.be.true + expect(res.status).to.equal(200) + }) + }) + + it('should support fetch with WHATWG URL object', () => { + const url = `${base}hello` + const urlObject = new URL(url) + const request = new Request(urlObject) + return fetch(request).then(res => { + expect(res.url).to.equal(url) + expect(res.ok).to.be.true + expect(res.status).to.equal(200) + }) + }) + + it('if params are given, do not modify anything', () => { + const url = `${base}question?a=1` + const urlObject = new URL(url) + const request = new Request(urlObject) + return fetch(request).then(res => { + expect(res.url).to.equal(url) + expect(res.ok).to.be.true + expect(res.status).to.equal(200) + }) + }) + + it('should support reading blob as text', () => { + return new Response('hello') + .blob() + .then(blob => blob.text()) + .then(body => { + expect(body).to.equal('hello') + }) + }) + + it('should support reading blob as arrayBuffer', () => { + return new Response('hello') + .blob() + .then(blob => blob.arrayBuffer()) + .then(ab => { + const string = String.fromCharCode.apply(null, new Uint8Array(ab)) + expect(string).to.equal('hello') + }) + }) + + it('should support blob round-trip', () => { + const url = `${base}hello` + + let length + let type + + return fetch(url).then(res => res.blob()).then(async blob => { + const url = `${base}inspect` + length = blob.size + type = blob.type + return fetch(url, { + method: 'POST', + body: blob + }) + }).then(res => res.json()).then(({ body, headers }) => { + expect(body).to.equal('world') + expect(headers['content-type']).to.equal(type) + expect(headers['content-length']).to.equal(String(length)) + }) + }) + + it('should support overwrite Request instance', () => { + const url = `${base}inspect` + const request = new Request(url, { + method: 'POST', + headers: { + a: '1' + } + }) + return fetch(request, { + method: 'GET', + headers: { + a: '2' + } + }).then(res => { + return res.json() + }).then(body => { + expect(body.method).to.equal('GET') + expect(body.headers.a).to.equal('2') + }) + }) + + it('should support http request', function () { + this.timeout(5000) + const url = 'https://github.com/' + const options = { + method: 'HEAD' + } + return fetch(url, options).then(res => { + expect(res.status).to.equal(200) + expect(res.ok).to.be.true + }) + }) + + it('should encode URLs as UTF-8', async () => { + const url = `${base}möbius` + const res = await fetch(url) + expect(res.url).to.equal(`${base}m%C3%B6bius`) + }) + + it('should allow manual redirect handling', function () { + this.timeout(5000) + const url = `${base}redirect/302` + const options = { + redirect: 'manual' + } + return fetch(url, options).then(res => { + expect(res.status).to.equal(302) + expect(res.url).to.equal(url) + expect(res.type).to.equal('basic') + expect(res.headers.get('Location')).to.equal('/inspect') + expect(res.ok).to.be.false + }) + }) +}) diff --git a/test/node-fetch/mock.js b/test/node-fetch/mock.js new file mode 100644 index 0000000..a53f464 --- /dev/null +++ b/test/node-fetch/mock.js @@ -0,0 +1,112 @@ +/* eslint no-unused-expressions: "off" */ + +// Test tools +const chai = require('chai') + +const { + fetch, + MockAgent, + setGlobalDispatcher, + Headers +} = require('../../index.js') + +const { expect } = chai + +describe('node-fetch with MockAgent', () => { + it('should match the url', async () => { + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + const mockPool = mockAgent.get('http://localhost:3000') + + mockPool + .intercept({ + path: '/test', + method: 'GET' + }) + .reply(200, { success: true }) + .persist() + + const res = await fetch('http://localhost:3000/test', { + method: 'GET' + }) + + expect(res.status).to.equal(200) + expect(await res.json()).to.deep.equal({ success: true }) + }) + + it('should match the body', async () => { + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + const mockPool = mockAgent.get('http://localhost:3000') + + mockPool + .intercept({ + path: '/test', + method: 'POST', + body: (value) => { + return value === 'request body' + } + }) + .reply(200, { success: true }) + .persist() + + const res = await fetch('http://localhost:3000/test', { + method: 'POST', + body: 'request body' + }) + + expect(res.status).to.equal(200) + expect(await res.json()).to.deep.equal({ success: true }) + }) + + it('should match the headers', async () => { + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + const mockPool = mockAgent.get('http://localhost:3000') + + mockPool + .intercept({ + path: '/test', + method: 'GET', + headers: (h) => { + return h['user-agent'] === 'undici' + } + }) + .reply(200, { success: true }) + .persist() + + const res = await fetch('http://localhost:3000/test', { + method: 'GET', + headers: new Headers({ 'User-Agent': 'undici' }) + }) + expect(res.status).to.equal(200) + expect(await res.json()).to.deep.equal({ success: true }) + }) + + it('should match the headers with a matching function', async () => { + const mockAgent = new MockAgent() + setGlobalDispatcher(mockAgent) + const mockPool = mockAgent.get('http://localhost:3000') + + mockPool + .intercept({ + path: '/test', + method: 'GET', + headers (headers) { + expect(headers).to.be.an('object') + expect(headers).to.have.property('user-agent', 'undici') + return true + } + }) + .reply(200, { success: true }) + .persist() + + const res = await fetch('http://localhost:3000/test', { + method: 'GET', + headers: new Headers({ 'User-Agent': 'undici' }) + }) + + expect(res.status).to.equal(200) + expect(await res.json()).to.deep.equal({ success: true }) + }) +}) diff --git a/test/node-fetch/request.js b/test/node-fetch/request.js new file mode 100644 index 0000000..2d29d51 --- /dev/null +++ b/test/node-fetch/request.js @@ -0,0 +1,281 @@ +const stream = require('stream') +const http = require('http') + +const chai = require('chai') +const { Blob } = require('buffer') + +const Request = require('../../lib/fetch/request.js').Request +const TestServer = require('./utils/server.js') + +const { expect } = chai + +describe('Request', () => { + const local = new TestServer() + let base + + before(async () => { + await local.start() + base = `http://${local.hostname}:${local.port}/` + }) + + after(async () => { + return local.stop() + }) + + it('should have attributes conforming to Web IDL', () => { + const request = new Request('http://github.com/') + const enumerableProperties = [] + for (const property in request) { + enumerableProperties.push(property) + } + + for (const toCheck of [ + 'body', + 'bodyUsed', + 'arrayBuffer', + 'blob', + 'json', + 'text', + 'method', + 'url', + 'headers', + 'redirect', + 'clone', + 'signal' + ]) { + expect(enumerableProperties).to.contain(toCheck) + } + + // for (const toCheck of [ + // 'body', 'bodyUsed', 'method', 'url', 'headers', 'redirect', 'signal' + // ]) { + // expect(() => { + // request[toCheck] = 'abc' + // }).to.throw() + // } + }) + + // it('should support wrapping Request instance', () => { + // const url = `${base}hello` + + // const form = new FormData() + // form.append('a', '1') + // const { signal } = new AbortController() + + // const r1 = new Request(url, { + // method: 'POST', + // follow: 1, + // body: form, + // signal + // }) + // const r2 = new Request(r1, { + // follow: 2 + // }) + + // expect(r2.url).to.equal(url) + // expect(r2.method).to.equal('POST') + // expect(r2.signal).to.equal(signal) + // // Note that we didn't clone the body + // expect(r2.body).to.equal(form) + // expect(r1.follow).to.equal(1) + // expect(r2.follow).to.equal(2) + // expect(r1.counter).to.equal(0) + // expect(r2.counter).to.equal(0) + // }) + + xit('should override signal on derived Request instances', () => { + const parentAbortController = new AbortController() + const derivedAbortController = new AbortController() + const parentRequest = new Request(`${base}hello`, { + signal: parentAbortController.signal + }) + const derivedRequest = new Request(parentRequest, { + signal: derivedAbortController.signal + }) + expect(parentRequest.signal).to.equal(parentAbortController.signal) + expect(derivedRequest.signal).to.equal(derivedAbortController.signal) + }) + + xit('should allow removing signal on derived Request instances', () => { + const parentAbortController = new AbortController() + const parentRequest = new Request(`${base}hello`, { + signal: parentAbortController.signal + }) + const derivedRequest = new Request(parentRequest, { + signal: null + }) + expect(parentRequest.signal).to.equal(parentAbortController.signal) + expect(derivedRequest.signal).to.equal(null) + }) + + it('should throw error with GET/HEAD requests with body', () => { + expect(() => new Request(base, { body: '' })) + .to.throw(TypeError) + expect(() => new Request(base, { body: 'a' })) + .to.throw(TypeError) + expect(() => new Request(base, { body: '', method: 'HEAD' })) + .to.throw(TypeError) + expect(() => new Request(base, { body: 'a', method: 'HEAD' })) + .to.throw(TypeError) + expect(() => new Request(base, { body: 'a', method: 'get' })) + .to.throw(TypeError) + expect(() => new Request(base, { body: 'a', method: 'head' })) + .to.throw(TypeError) + }) + + it('should default to null as body', () => { + const request = new Request(base) + expect(request.body).to.equal(null) + return request.text().then(result => expect(result).to.equal('')) + }) + + it('should support parsing headers', () => { + const url = base + const request = new Request(url, { + headers: { + a: '1' + } + }) + expect(request.url).to.equal(url) + expect(request.headers.get('a')).to.equal('1') + }) + + it('should support arrayBuffer() method', () => { + const url = base + const request = new Request(url, { + method: 'POST', + body: 'a=1' + }) + expect(request.url).to.equal(url) + return request.arrayBuffer().then(result => { + expect(result).to.be.an.instanceOf(ArrayBuffer) + const string = String.fromCharCode.apply(null, new Uint8Array(result)) + expect(string).to.equal('a=1') + }) + }) + + it('should support text() method', () => { + const url = base + const request = new Request(url, { + method: 'POST', + body: 'a=1' + }) + expect(request.url).to.equal(url) + return request.text().then(result => { + expect(result).to.equal('a=1') + }) + }) + + it('should support json() method', () => { + const url = base + const request = new Request(url, { + method: 'POST', + body: '{"a":1}' + }) + expect(request.url).to.equal(url) + return request.json().then(result => { + expect(result.a).to.equal(1) + }) + }) + + it('should support blob() method', () => { + const url = base + const request = new Request(url, { + method: 'POST', + body: Buffer.from('a=1') + }) + expect(request.url).to.equal(url) + return request.blob().then(result => { + expect(result).to.be.an.instanceOf(Blob) + expect(result.size).to.equal(3) + expect(result.type).to.equal('') + }) + }) + + it('should support clone() method', () => { + const url = base + const body = stream.Readable.from('a=1') + const agent = new http.Agent() + const { signal } = new AbortController() + const request = new Request(url, { + body, + method: 'POST', + redirect: 'manual', + headers: { + b: '2' + }, + follow: 3, + compress: false, + agent, + signal, + duplex: 'half' + }) + const cl = request.clone() + expect(cl.url).to.equal(url) + expect(cl.method).to.equal('POST') + expect(cl.redirect).to.equal('manual') + expect(cl.headers.get('b')).to.equal('2') + expect(cl.method).to.equal('POST') + // Clone body shouldn't be the same body + expect(cl.body).to.not.equal(body) + return Promise.all([cl.text(), request.text()]).then(results => { + expect(results[0]).to.equal('a=1') + expect(results[1]).to.equal('a=1') + }) + }) + + it('should support ArrayBuffer as body', () => { + const encoder = new TextEncoder() + const body = encoder.encode('a=12345678901234').buffer + const request = new Request(base, { + method: 'POST', + body + }) + new Uint8Array(body)[0] = 0 + return request.text().then(result => { + expect(result).to.equal('a=12345678901234') + }) + }) + + it('should support Uint8Array as body', () => { + const encoder = new TextEncoder() + const fullbuffer = encoder.encode('a=12345678901234').buffer + const body = new Uint8Array(fullbuffer, 2, 9) + const request = new Request(base, { + method: 'POST', + body + }) + body[0] = 0 + return request.text().then(result => { + expect(result).to.equal('123456789') + }) + }) + + it('should support BigUint64Array as body', () => { + const encoder = new TextEncoder() + const fullbuffer = encoder.encode('a=12345678901234').buffer + const body = new BigUint64Array(fullbuffer, 8, 1) + const request = new Request(base, { + method: 'POST', + body + }) + body[0] = 0n + return request.text().then(result => { + expect(result).to.equal('78901234') + }) + }) + + it('should support DataView as body', () => { + const encoder = new TextEncoder() + const fullbuffer = encoder.encode('a=12345678901234').buffer + const body = new Uint8Array(fullbuffer, 2, 9) + const request = new Request(base, { + method: 'POST', + body + }) + body[0] = 0 + return request.text().then(result => { + expect(result).to.equal('123456789') + }) + }) +}) diff --git a/test/node-fetch/response.js b/test/node-fetch/response.js new file mode 100644 index 0000000..4bb7c42 --- /dev/null +++ b/test/node-fetch/response.js @@ -0,0 +1,251 @@ +/* eslint no-unused-expressions: "off" */ + +const chai = require('chai') +const stream = require('stream') +const { Response } = require('../../lib/fetch/response.js') +const TestServer = require('./utils/server.js') +const { Blob } = require('buffer') +const { kState } = require('../../lib/fetch/symbols.js') + +const { expect } = chai + +describe('Response', () => { + const local = new TestServer() + let base + + before(async () => { + await local.start() + base = `http://${local.hostname}:${local.port}/` + }) + + after(async () => { + return local.stop() + }) + + it('should have attributes conforming to Web IDL', () => { + const res = new Response() + const enumerableProperties = [] + for (const property in res) { + enumerableProperties.push(property) + } + + for (const toCheck of [ + 'body', + 'bodyUsed', + 'arrayBuffer', + 'blob', + 'json', + 'text', + 'type', + 'url', + 'status', + 'ok', + 'redirected', + 'statusText', + 'headers', + 'clone' + ]) { + expect(enumerableProperties).to.contain(toCheck) + } + + // TODO + // for (const toCheck of [ + // 'body', + // 'bodyUsed', + // 'type', + // 'url', + // 'status', + // 'ok', + // 'redirected', + // 'statusText', + // 'headers' + // ]) { + // expect(() => { + // res[toCheck] = 'abc' + // }).to.throw() + // } + }) + + it('should support empty options', () => { + const res = new Response(stream.Readable.from('a=1')) + return res.text().then(result => { + expect(result).to.equal('a=1') + }) + }) + + it('should support parsing headers', () => { + const res = new Response(null, { + headers: { + a: '1' + } + }) + expect(res.headers.get('a')).to.equal('1') + }) + + it('should support text() method', () => { + const res = new Response('a=1') + return res.text().then(result => { + expect(result).to.equal('a=1') + }) + }) + + it('should support json() method', () => { + const res = new Response('{"a":1}') + return res.json().then(result => { + expect(result.a).to.equal(1) + }) + }) + + if (Blob) { + it('should support blob() method', () => { + const res = new Response('a=1', { + method: 'POST', + headers: { + 'Content-Type': 'text/plain' + } + }) + return res.blob().then(result => { + expect(result).to.be.an.instanceOf(Blob) + expect(result.size).to.equal(3) + expect(result.type).to.equal('text/plain') + }) + }) + } + + it('should support clone() method', () => { + const body = stream.Readable.from('a=1') + const res = new Response(body, { + headers: { + a: '1' + }, + status: 346, + statusText: 'production' + }) + res[kState].urlList = [new URL(base)] + const cl = res.clone() + expect(cl.headers.get('a')).to.equal('1') + expect(cl.type).to.equal('default') + expect(cl.url).to.equal(base) + expect(cl.status).to.equal(346) + expect(cl.statusText).to.equal('production') + expect(cl.ok).to.be.false + // Clone body shouldn't be the same body + expect(cl.body).to.not.equal(body) + return Promise.all([cl.text(), res.text()]).then(results => { + expect(results[0]).to.equal('a=1') + expect(results[1]).to.equal('a=1') + }) + }) + + it('should support stream as body', () => { + const body = stream.Readable.from('a=1') + const res = new Response(body) + return res.text().then(result => { + expect(result).to.equal('a=1') + }) + }) + + it('should support string as body', () => { + const res = new Response('a=1') + return res.text().then(result => { + expect(result).to.equal('a=1') + }) + }) + + it('should support buffer as body', () => { + const res = new Response(Buffer.from('a=1')) + return res.text().then(result => { + expect(result).to.equal('a=1') + }) + }) + + it('should support ArrayBuffer as body', () => { + const encoder = new TextEncoder() + const fullbuffer = encoder.encode('a=12345678901234').buffer + const res = new Response(fullbuffer) + new Uint8Array(fullbuffer)[0] = 0 + return res.text().then(result => { + expect(result).to.equal('a=12345678901234') + }) + }) + + it('should support blob as body', async () => { + const res = new Response(new Blob(['a=1'])) + return res.text().then(result => { + expect(result).to.equal('a=1') + }) + }) + + it('should support Uint8Array as body', () => { + const encoder = new TextEncoder() + const fullbuffer = encoder.encode('a=12345678901234').buffer + const body = new Uint8Array(fullbuffer, 2, 9) + const res = new Response(body) + body[0] = 0 + return res.text().then(result => { + expect(result).to.equal('123456789') + }) + }) + + it('should support BigUint64Array as body', () => { + const encoder = new TextEncoder() + const fullbuffer = encoder.encode('a=12345678901234').buffer + const body = new BigUint64Array(fullbuffer, 8, 1) + const res = new Response(body) + body[0] = 0n + return res.text().then(result => { + expect(result).to.equal('78901234') + }) + }) + + it('should support DataView as body', () => { + const encoder = new TextEncoder() + const fullbuffer = encoder.encode('a=12345678901234').buffer + const body = new Uint8Array(fullbuffer, 2, 9) + const res = new Response(body) + body[0] = 0 + return res.text().then(result => { + expect(result).to.equal('123456789') + }) + }) + + it('should default to null as body', () => { + const res = new Response() + expect(res.body).to.equal(null) + + return res.text().then(result => expect(result).to.equal('')) + }) + + it('should default to 200 as status code', () => { + const res = new Response(null) + expect(res.status).to.equal(200) + }) + + it('should default to empty string as url', () => { + const res = new Response() + expect(res.url).to.equal('') + }) + + it('should support error() static method', () => { + const res = Response.error() + expect(res).to.be.an.instanceof(Response) + expect(res.type).to.equal('error') + expect(res.status).to.equal(0) + expect(res.statusText).to.equal('') + }) + + it('should support undefined status', () => { + const res = new Response(null, { status: undefined }) + expect(res.status).to.equal(200) + }) + + it('should support undefined statusText', () => { + const res = new Response(null, { statusText: undefined }) + expect(res.statusText).to.equal('') + }) + + it('should not set bodyUsed to undefined', () => { + const res = new Response() + expect(res.bodyUsed).to.be.false + }) +}) diff --git a/test/node-fetch/utils/chai-timeout.js b/test/node-fetch/utils/chai-timeout.js new file mode 100644 index 0000000..6838a4c --- /dev/null +++ b/test/node-fetch/utils/chai-timeout.js @@ -0,0 +1,15 @@ +const pTimeout = require('p-timeout') + +module.exports = ({ Assertion }, utils) => { + utils.addProperty(Assertion.prototype, 'timeout', async function () { + let timeouted = false + await pTimeout(this._obj, 150, () => { + timeouted = true + }) + return this.assert( + timeouted, + 'expected promise to timeout but it was resolved', + 'expected promise not to timeout but it timed out' + ) + }) +} diff --git a/test/node-fetch/utils/dummy.txt b/test/node-fetch/utils/dummy.txt new file mode 100644 index 0000000..5ca5191 --- /dev/null +++ b/test/node-fetch/utils/dummy.txt @@ -0,0 +1 @@ +i am a dummy \ No newline at end of file diff --git a/test/node-fetch/utils/read-stream.js b/test/node-fetch/utils/read-stream.js new file mode 100644 index 0000000..7d79153 --- /dev/null +++ b/test/node-fetch/utils/read-stream.js @@ -0,0 +1,9 @@ +module.exports = async function readStream (stream) { + const chunks = [] + + for await (const chunk of stream) { + chunks.push(chunk instanceof Buffer ? chunk : Buffer.from(chunk)) + } + + return Buffer.concat(chunks) +} diff --git a/test/node-fetch/utils/server.js b/test/node-fetch/utils/server.js new file mode 100644 index 0000000..46dc983 --- /dev/null +++ b/test/node-fetch/utils/server.js @@ -0,0 +1,467 @@ +const http = require('http') +const zlib = require('zlib') +const { once } = require('events') +const Busboy = require('@fastify/busboy') + +module.exports = class TestServer { + constructor () { + this.server = http.createServer(this.router) + // Node 8 default keepalive timeout is 5000ms + // make it shorter here as we want to close server quickly at the end of tests + this.server.keepAliveTimeout = 1000 + this.server.on('error', err => { + console.log(err.stack) + }) + this.server.on('connection', socket => { + socket.setTimeout(1500) + }) + } + + async start () { + this.server.listen(0, 'localhost') + return once(this.server, 'listening') + } + + async stop () { + this.server.close() + return once(this.server, 'close') + } + + get port () { + return this.server.address().port + } + + get hostname () { + return 'localhost' + } + + mockState (responseHandler) { + this.server.nextResponseHandler = responseHandler + return `http://${this.hostname}:${this.port}/mocked` + } + + router (request, res) { + const p = request.url + + if (p === '/mocked') { + if (this.nextResponseHandler) { + this.nextResponseHandler(res) + this.nextResponseHandler = undefined + } else { + throw new Error('No mocked response. Use ’TestServer.mockState()’.') + } + } + + if (p === '/hello') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.end('world') + } + + if (p.includes('question')) { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.end('ok') + } + + if (p === '/plain') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.end('text') + } + + if (p === '/no-status-text') { + res.writeHead(200, '', {}).end() + } + + if (p === '/options') { + res.statusCode = 200 + res.setHeader('Allow', 'GET, HEAD, OPTIONS') + res.end('hello world') + } + + if (p === '/html') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/html') + res.end('') + } + + if (p === '/json') { + res.statusCode = 200 + res.setHeader('Content-Type', 'application/json') + res.end(JSON.stringify({ + name: 'value' + })) + } + + if (p === '/gzip') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.setHeader('Content-Encoding', 'gzip') + zlib.gzip('hello world', (err, buffer) => { + if (err) { + throw err + } + + res.end(buffer) + }) + } + + if (p === '/gzip-truncated') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.setHeader('Content-Encoding', 'gzip') + zlib.gzip('hello world', (err, buffer) => { + if (err) { + throw err + } + + // Truncate the CRC checksum and size check at the end of the stream + res.end(buffer.slice(0, -8)) + }) + } + + if (p === '/gzip-capital') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.setHeader('Content-Encoding', 'GZip') + zlib.gzip('hello world', (err, buffer) => { + if (err) { + throw err + } + + res.end(buffer) + }) + } + + if (p === '/deflate') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.setHeader('Content-Encoding', 'deflate') + zlib.deflate('hello world', (err, buffer) => { + if (err) { + throw err + } + + res.end(buffer) + }) + } + + if (p === '/brotli') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + if (typeof zlib.createBrotliDecompress === 'function') { + res.setHeader('Content-Encoding', 'br') + zlib.brotliCompress('hello world', (err, buffer) => { + if (err) { + throw err + } + + res.end(buffer) + }) + } + } + + if (p === '/multiunsupported') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + if (typeof zlib.createBrotliDecompress === 'function') { + res.setHeader('Content-Encoding', 'br,asd,br') + res.end('multiunsupported') + } + } + + if (p === '/multisupported') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + if (typeof zlib.createBrotliDecompress === 'function') { + res.setHeader('Content-Encoding', 'br,br') + zlib.brotliCompress('hello world', (err, buffer) => { + if (err) { + throw err + } + + zlib.brotliCompress(buffer, (err, buffer) => { + if (err) { + throw err + } + + res.end(buffer) + }) + }) + } + } + + if (p === '/deflate-raw') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.setHeader('Content-Encoding', 'deflate') + zlib.deflateRaw('hello world', (err, buffer) => { + if (err) { + throw err + } + + res.end(buffer) + }) + } + + if (p === '/sdch') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.setHeader('Content-Encoding', 'sdch') + res.end('fake sdch string') + } + + if (p === '/invalid-content-encoding') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.setHeader('Content-Encoding', 'gzip') + res.end('fake gzip string') + } + + if (p === '/timeout') { + setTimeout(() => { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.end('text') + }, 1000) + } + + if (p === '/slow') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.write('test') + setTimeout(() => { + res.end('test') + }, 1000) + } + + if (p === '/cookie') { + res.statusCode = 200 + res.setHeader('Set-Cookie', ['a=1', 'b=1']) + res.end('cookie') + } + + if (p === '/size/chunk') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + setTimeout(() => { + res.write('test') + }, 10) + setTimeout(() => { + res.end('test') + }, 20) + } + + if (p === '/size/long') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.end('testtest') + } + + if (p === '/redirect/301') { + res.statusCode = 301 + res.setHeader('Location', '/inspect') + res.end() + } + + if (p === '/redirect/302') { + res.statusCode = 302 + res.setHeader('Location', '/inspect') + res.end() + } + + if (p === '/redirect/303') { + res.statusCode = 303 + res.setHeader('Location', '/inspect') + res.end() + } + + if (p === '/redirect/307') { + res.statusCode = 307 + res.setHeader('Location', '/inspect') + res.end() + } + + if (p === '/redirect/308') { + res.statusCode = 308 + res.setHeader('Location', '/inspect') + res.end() + } + + if (p === '/redirect/chain') { + res.statusCode = 301 + res.setHeader('Location', '/redirect/301') + res.end() + } + + if (p.startsWith('/redirect/chain/')) { + const count = parseInt(p.split('/').pop()) - 1 + res.statusCode = 301 + res.setHeader('Location', count ? `/redirect/chain/${count}` : '/redirect/301') + res.end() + } + + if (p === '/redirect/no-location') { + res.statusCode = 301 + res.end() + } + + if (p === '/redirect/slow') { + res.statusCode = 301 + res.setHeader('Location', '/redirect/301') + setTimeout(() => { + res.end() + }, 1000) + } + + if (p === '/redirect/slow-chain') { + res.statusCode = 301 + res.setHeader('Location', '/redirect/slow') + setTimeout(() => { + res.end() + }, 10) + } + + if (p === '/redirect/slow-stream') { + res.statusCode = 301 + res.setHeader('Location', '/slow') + res.end() + } + + if (p === '/redirect/bad-location') { + res.socket.write('HTTP/1.1 301\r\nLocation: ☃\r\nContent-Length: 0\r\n') + res.socket.end('\r\n') + } + + if (p === '/error/400') { + res.statusCode = 400 + res.setHeader('Content-Type', 'text/plain') + res.end('client error') + } + + if (p === '/error/404') { + res.statusCode = 404 + res.setHeader('Content-Encoding', 'gzip') + res.end() + } + + if (p === '/error/500') { + res.statusCode = 500 + res.setHeader('Content-Type', 'text/plain') + res.end('server error') + } + + if (p === '/error/reset') { + res.destroy() + } + + if (p === '/error/premature') { + res.writeHead(200, { 'content-length': 50 }) + res.write('foo') + setTimeout(() => { + res.destroy() + }, 100) + } + + if (p === '/error/premature/chunked') { + res.writeHead(200, { + 'Content-Type': 'application/json', + 'Transfer-Encoding': 'chunked' + }) + + res.write(`${JSON.stringify({ data: 'hi' })}\n`) + + setTimeout(() => { + res.write(`${JSON.stringify({ data: 'bye' })}\n`) + }, 200) + + setTimeout(() => { + res.destroy() + }, 400) + } + + if (p === '/error/json') { + res.statusCode = 200 + res.setHeader('Content-Type', 'application/json') + res.end('invalid json') + } + + if (p === '/no-content') { + res.statusCode = 204 + res.end() + } + + if (p === '/no-content/gzip') { + res.statusCode = 204 + res.setHeader('Content-Encoding', 'gzip') + res.end() + } + + if (p === '/no-content/brotli') { + res.statusCode = 204 + res.setHeader('Content-Encoding', 'br') + res.end() + } + + if (p === '/not-modified') { + res.statusCode = 304 + res.end() + } + + if (p === '/not-modified/gzip') { + res.statusCode = 304 + res.setHeader('Content-Encoding', 'gzip') + res.end() + } + + if (p === '/inspect') { + res.statusCode = 200 + res.setHeader('Content-Type', 'application/json') + let body = '' + request.on('data', c => { + body += c + }) + request.on('end', () => { + res.end(JSON.stringify({ + method: request.method, + url: request.url, + headers: request.headers, + body + })) + }) + } + + if (p === '/multipart') { + res.statusCode = 200 + res.setHeader('Content-Type', 'application/json') + const busboy = new Busboy({ headers: request.headers }) + let body = '' + busboy.on('file', async (fieldName, file, fileName) => { + body += `${fieldName}=${fileName}` + // consume file data + // eslint-disable-next-line no-empty, no-unused-vars + for await (const c of file) {} + }) + + busboy.on('field', (fieldName, value) => { + body += `${fieldName}=${value}` + }) + busboy.on('finish', () => { + res.end(JSON.stringify({ + method: request.method, + url: request.url, + headers: request.headers, + body + })) + }) + request.pipe(busboy) + } + + if (p === '/m%C3%B6bius') { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + res.end('ok') + } + } +} diff --git a/test/parser-issues.js b/test/parser-issues.js new file mode 100644 index 0000000..b98edf1 --- /dev/null +++ b/test/parser-issues.js @@ -0,0 +1,114 @@ +const net = require('net') +const { test } = require('tap') +const { Client, errors } = require('..') + +test('https://github.com/mcollina/undici/issues/268', (t) => { + t.plan(2) + + const server = net.createServer(socket => { + socket.write('HTTP/1.1 200 OK\r\n') + socket.write('Transfer-Encoding: chunked\r\n\r\n') + setTimeout(() => { + socket.write('1\r\n') + socket.write('\n\r\n') + setTimeout(() => { + socket.write('1\r\n') + socket.write('\n\r\n') + }, 500) + }, 500) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + method: 'GET', + path: '/nxt/_changes?feed=continuous&heartbeat=5000', + headersTimeout: 1e3 + }, (err, data) => { + t.error(err) + data.body + .resume() + setTimeout(() => { + t.pass() + data.body.on('error', () => {}) + }, 2e3) + }) + }) +}) + +test('parser fail', (t) => { + t.plan(2) + + const server = net.createServer(socket => { + socket.write('HTT/1.1 200 OK\r\n') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + method: 'GET', + path: '/' + }, (err, data) => { + t.ok(err) + t.type(err, errors.HTTPParserError) + }) + }) +}) + +test('split header field', (t) => { + t.plan(2) + + const server = net.createServer(socket => { + socket.write('HTTP/1.1 200 OK\r\nA') + setTimeout(() => { + socket.write('SD: asd,asd\r\n\r\n\r\n') + }, 100) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + method: 'GET', + path: '/' + }, (err, data) => { + t.error(err) + t.equal(data.headers.asd, 'asd,asd') + data.body.destroy().on('error', () => {}) + }) + }) +}) + +test('split header value', (t) => { + t.plan(2) + + const server = net.createServer(socket => { + socket.write('HTTP/1.1 200 OK\r\nASD: asd') + setTimeout(() => { + socket.write(',asd\r\n\r\n\r\n') + }, 100) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + method: 'GET', + path: '/' + }, (err, data) => { + t.error(err) + t.equal(data.headers.asd, 'asd,asd') + data.body.destroy().on('error', () => {}) + }) + }) +}) diff --git a/test/pipeline-pipelining.js b/test/pipeline-pipelining.js new file mode 100644 index 0000000..52a29d7 --- /dev/null +++ b/test/pipeline-pipelining.js @@ -0,0 +1,108 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const { kConnect } = require('../lib/core/symbols') +const { kBusy, kPending, kRunning } = require('../lib/core/symbols') + +test('pipeline pipelining', (t) => { + t.plan(10) + + const server = createServer((req, res) => { + t.strictSame(req.headers['transfer-encoding'], undefined) + res.end() + }) + + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 2 + }) + t.teardown(client.close.bind(client)) + + client[kConnect](() => { + t.equal(client[kRunning], 0) + client.pipeline({ + method: 'GET', + path: '/' + }, ({ body }) => body).end().resume() + t.equal(client[kBusy], true) + t.strictSame(client[kRunning], 0) + t.strictSame(client[kPending], 1) + + client.pipeline({ + method: 'GET', + path: '/' + }, ({ body }) => body).end().resume() + t.equal(client[kBusy], true) + t.strictSame(client[kRunning], 0) + t.strictSame(client[kPending], 2) + process.nextTick(() => { + t.equal(client[kRunning], 2) + }) + }) + }) +}) + +test('pipeline pipelining retry', (t) => { + t.plan(13) + + let count = 0 + const server = createServer((req, res) => { + if (count++ === 0) { + res.destroy() + } else { + res.end() + } + }) + + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 3 + }) + t.teardown(client.destroy.bind(client)) + + client.once('disconnect', () => { + t.pass() + }) + + client[kConnect](() => { + client.pipeline({ + method: 'GET', + path: '/' + }, ({ body }) => body).end().resume() + .on('error', (err) => { + t.ok(err) + }) + t.equal(client[kBusy], true) + t.strictSame(client[kRunning], 0) + t.strictSame(client[kPending], 1) + + client.pipeline({ + method: 'GET', + path: '/' + }, ({ body }) => body).end().resume() + t.equal(client[kBusy], true) + t.strictSame(client[kRunning], 0) + t.strictSame(client[kPending], 2) + + client.pipeline({ + method: 'GET', + path: '/' + }, ({ body }) => body).end().resume() + t.equal(client[kBusy], true) + t.strictSame(client[kRunning], 0) + t.strictSame(client[kPending], 3) + + process.nextTick(() => { + t.equal(client[kRunning], 3) + }) + + client.close(() => { + t.pass() + }) + }) + }) +}) diff --git a/test/pool.js b/test/pool.js new file mode 100644 index 0000000..8cf7195 --- /dev/null +++ b/test/pool.js @@ -0,0 +1,1101 @@ +'use strict' + +const { EventEmitter } = require('events') +const { createServer } = require('http') +const net = require('net') +const { + finished, + PassThrough, + Readable +} = require('stream') +const { promisify } = require('util') +const proxyquire = require('proxyquire') +const { test } = require('tap') +const { + kBusy, + kPending, + kRunning, + kSize, + kUrl +} = require('../lib/core/symbols') +const { + Client, + Pool, + errors +} = require('..') + +test('throws when connection is inifinite', (t) => { + t.plan(2) + + try { + new Pool(null, { connections: 0 / 0 }) // eslint-disable-line + } catch (e) { + t.type(e, errors.InvalidArgumentError) + t.equal(e.message, 'invalid connections') + } +}) + +test('throws when connections is negative', (t) => { + t.plan(2) + + try { + new Pool(null, { connections: -1 }) // eslint-disable-line no-new + } catch (e) { + t.type(e, errors.InvalidArgumentError) + t.equal(e.message, 'invalid connections') + } +}) + +test('throws when connection is not number', (t) => { + t.plan(2) + + try { + new Pool(null, { connections: true }) // eslint-disable-line no-new + } catch (e) { + t.type(e, errors.InvalidArgumentError) + t.equal(e.message, 'invalid connections') + } +}) + +test('throws when factory is not a function', (t) => { + t.plan(2) + + try { + new Pool(null, { factory: '' }) // eslint-disable-line no-new + } catch (e) { + t.type(e, errors.InvalidArgumentError) + t.equal(e.message, 'factory must be a function.') + } +}) + +test('does not throw when connect is a function', (t) => { + t.plan(1) + + t.doesNotThrow(() => new Pool('http://localhost', { connect: () => {} })) +}) + +test('connect/disconnect event(s)', (t) => { + const clients = 2 + + t.plan(clients * 6) + + const server = createServer((req, res) => { + res.writeHead(200, { + Connection: 'keep-alive', + 'Keep-Alive': 'timeout=1s' + }) + res.end('ok') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const pool = new Pool(`http://localhost:${server.address().port}`, { + connections: clients, + keepAliveTimeoutThreshold: 100 + }) + t.teardown(pool.close.bind(pool)) + + pool.on('connect', (origin, [pool, client]) => { + t.equal(client instanceof Client, true) + }) + pool.on('disconnect', (origin, [pool, client], error) => { + t.ok(client instanceof Client) + t.type(error, errors.InformationalError) + t.equal(error.code, 'UND_ERR_INFO') + t.equal(error.message, 'socket idle timeout') + }) + + for (let i = 0; i < clients; i++) { + pool.request({ + path: '/', + method: 'GET' + }, (err, { headers, body }) => { + t.error(err) + body.resume() + }) + } + }) +}) + +test('basic get', (t) => { + t.plan(14) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + t.equal(client[kUrl].origin, `http://localhost:${server.address().port}`) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + + t.equal(client.destroyed, false) + t.equal(client.closed, false) + client.close((err) => { + t.error(err) + t.equal(client.destroyed, true) + client.destroy((err) => { + t.error(err) + client.close((err) => { + t.type(err, errors.ClientDestroyedError) + }) + }) + }) + t.equal(client.closed, true) + }) +}) + +test('URL as arg', (t) => { + t.plan(9) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const url = new URL('http://localhost') + url.port = server.address().port + const client = new Pool(url) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + + client.close((err) => { + t.error(err) + client.destroy((err) => { + t.error(err) + client.close((err) => { + t.type(err, errors.ClientDestroyedError) + }) + }) + }) + }) +}) + +test('basic get error async/await', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.destroy() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + await client.request({ path: '/', method: 'GET' }) + .catch((err) => { + t.ok(err) + }) + + await client.destroy() + + await client.close().catch((err) => { + t.type(err, errors.ClientDestroyedError) + }) + }) +}) + +test('basic get with async/await', async (t) => { + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { statusCode, headers, body } = await client.request({ path: '/', method: 'GET' }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + + body.resume() + await promisify(finished)(body) + + await client.close() + await client.destroy() +}) + +test('stream get async/await', async (t) => { + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + await promisify(server.listen.bind(server))(0) + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + await client.stream({ path: '/', method: 'GET' }, ({ statusCode, headers }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + return new PassThrough() + }) +}) + +test('stream get error async/await', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.destroy() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + await client.stream({ path: '/', method: 'GET' }, () => { + + }) + .catch((err) => { + t.ok(err) + }) + }) +}) + +test('pipeline get', (t) => { + t.plan(5) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const bufs = [] + client.pipeline({ path: '/', method: 'GET' }, ({ statusCode, headers, body }) => { + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + return body + }) + .end() + .on('data', (buf) => { + bufs.push(buf) + }) + .on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) +}) + +test('backpressure algorithm', (t) => { + const seen = [] + let total = 0 + + let writeMore = true + + class FakeClient extends EventEmitter { + constructor () { + super() + + this.id = total++ + } + + dispatch (req, handler) { + seen.push({ req, client: this, id: this.id }) + return writeMore + } + } + + const Pool = proxyquire('../lib/pool', { + './client': FakeClient + }) + + const noopHandler = { + onError (err) { + throw err + } + } + + const pool = new Pool('http://notahost') + + pool.dispatch({}, noopHandler) + pool.dispatch({}, noopHandler) + + const d1 = seen.shift() // d1 = c0 + t.equal(d1.id, 0) + const d2 = seen.shift() // d2 = c0 + t.equal(d2.id, 0) + + t.equal(d1.id, d2.id) + + writeMore = false + + pool.dispatch({}, noopHandler) // d3 = c0 + + pool.dispatch({}, noopHandler) // d4 = c1 + + const d3 = seen.shift() + t.equal(d3.id, 0) + const d4 = seen.shift() + t.equal(d4.id, 1) + + t.equal(d3.id, d2.id) + t.not(d3.id, d4.id) + + writeMore = true + + d4.client.emit('drain', new URL('http://notahost'), []) + + pool.dispatch({}, noopHandler) // d5 = c1 + + d3.client.emit('drain', new URL('http://notahost'), []) + + pool.dispatch({}, noopHandler) // d6 = c0 + + const d5 = seen.shift() + t.equal(d5.id, 1) + const d6 = seen.shift() + t.equal(d6.id, 0) + + t.equal(d5.id, d4.id) + t.equal(d3.id, d6.id) + + t.equal(total, 3) + + t.end() +}) + +test('busy', (t) => { + t.plan(8 * 16 + 2 + 1) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + const connections = 2 + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`, { + connections, + pipelining: 2 + }) + client.on('drain', () => { + t.pass() + }) + client.on('connect', () => { + t.pass() + }) + t.teardown(client.destroy.bind(client)) + + for (let n = 1; n <= 8; ++n) { + client.request({ path: '/', method: 'GET' }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + t.equal(client[kPending], n) + t.equal(client[kBusy], n > 1) + t.equal(client[kSize], n) + t.equal(client[kRunning], 0) + + t.equal(client.stats.connected, 0) + t.equal(client.stats.free, 0) + t.equal(client.stats.queued, Math.max(n - connections, 0)) + t.equal(client.stats.pending, n) + t.equal(client.stats.size, n) + t.equal(client.stats.running, 0) + } + }) +}) + +test('invalid pool dispatch options', (t) => { + t.plan(2) + const pool = new Pool('http://notahost') + t.throws(() => pool.dispatch({}), errors.InvalidArgumentError, 'throws on invalid handler') + t.throws(() => pool.dispatch({}, {}), errors.InvalidArgumentError, 'throws on invalid handler') +}) + +test('pool upgrade promise', (t) => { + t.plan(2) + + const server = net.createServer((c) => { + c.on('data', (d) => { + c.write('HTTP/1.1 101\r\n') + c.write('hello: world\r\n') + c.write('connection: upgrade\r\n') + c.write('upgrade: websocket\r\n') + c.write('\r\n') + c.write('Body') + }) + + c.on('end', () => { + c.end() + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const { headers, socket } = await client.upgrade({ + path: '/', + method: 'GET', + protocol: 'Websocket' + }) + + let recvData = '' + socket.on('data', (d) => { + recvData += d + }) + + socket.on('close', () => { + t.equal(recvData.toString(), 'Body') + }) + + t.same(headers, { + hello: 'world', + connection: 'upgrade', + upgrade: 'websocket' + }) + socket.end() + }) +}) + +test('pool connect', (t) => { + t.plan(1) + + const server = createServer((c) => { + t.fail() + }) + server.on('connect', (req, socket, firstBodyChunk) => { + socket.write('HTTP/1.1 200 Connection established\r\n\r\n') + + let data = firstBodyChunk.toString() + socket.on('data', (buf) => { + data += buf.toString() + }) + + socket.on('end', () => { + socket.end(data) + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + const { socket } = await client.connect({ + path: '/' + }) + + let recvData = '' + socket.on('data', (d) => { + recvData += d + }) + + socket.on('end', () => { + t.equal(recvData.toString(), 'Body') + }) + + socket.write('Body') + socket.end() + }) +}) + +test('pool dispatch', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + let buf = '' + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.equal(statusCode, 200) + }, + onData (chunk) { + buf += chunk + }, + onComplete () { + t.equal(buf, 'asd') + }, + onError () { + } + }) + }) +}) + +test('pool pipeline args validation', (t) => { + t.plan(2) + + const client = new Pool('http://localhost:5000') + + const ret = client.pipeline(null, () => {}) + ret.on('error', (err) => { + t.ok(/opts/.test(err.message)) + t.type(err, errors.InvalidArgumentError) + }) +}) + +test('300 requests succeed', (t) => { + t.plan(300 * 3) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Pool(`http://localhost:${server.address().port}`, { + connections: 1 + }) + t.teardown(client.destroy.bind(client)) + + for (let n = 0; n < 300; ++n) { + client.request({ + path: '/', + method: 'GET' + }, (err, data) => { + t.error(err) + data.body.on('data', (chunk) => { + t.equal(chunk.toString(), 'asd') + }).on('end', () => { + t.pass() + }) + }) + } + }) +}) + +test('pool connect error', (t) => { + t.plan(1) + + const server = createServer((c) => { + t.fail() + }) + server.on('connect', (req, socket, firstBodyChunk) => { + socket.destroy() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + await client.connect({ + path: '/' + }) + } catch (err) { + t.ok(err) + } + }) +}) + +test('pool upgrade error', (t) => { + t.plan(1) + + const server = net.createServer((c) => { + c.on('data', (d) => { + c.write('HTTP/1.1 101\r\n') + c.write('hello: world\r\n') + c.write('connection: upgrade\r\n') + c.write('\r\n') + c.write('Body') + }) + c.on('error', () => { + // Whether we get an error, end or close is undefined. + // Ignore error. + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + await client.upgrade({ + path: '/', + method: 'GET', + protocol: 'Websocket' + }) + } catch (err) { + t.ok(err) + } + }) +}) + +test('pool dispatch error', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`, { + connections: 1, + pipelining: 1 + }) + t.teardown(client.close.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.equal(statusCode, 200) + }, + onData (chunk) { + }, + onComplete () { + t.pass() + }, + onError () { + } + }) + + client.dispatch({ + path: '/', + method: 'GET', + headers: { + 'transfer-encoding': 'fail' + } + }, { + onConnect () { + t.fail() + }, + onHeaders (statusCode, headers) { + t.fail() + }, + onData (chunk) { + t.fail() + }, + onError (err) { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + } + }) + }) +}) + +test('pool request abort in queue', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`, { + connections: 1, + pipelining: 1 + }) + t.teardown(client.close.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.equal(statusCode, 200) + }, + onData (chunk) { + }, + onComplete () { + t.pass() + }, + onError () { + } + }) + + const signal = new EventEmitter() + client.request({ + path: '/', + method: 'GET', + signal + }, (err) => { + t.equal(err.code, 'UND_ERR_ABORTED') + }) + signal.emit('abort') + }) +}) + +test('pool stream abort in queue', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`, { + connections: 1, + pipelining: 1 + }) + t.teardown(client.close.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.equal(statusCode, 200) + }, + onData (chunk) { + }, + onComplete () { + t.pass() + }, + onError () { + } + }) + + const signal = new EventEmitter() + client.stream({ + path: '/', + method: 'GET', + signal + }, ({ body }) => body, (err) => { + t.equal(err.code, 'UND_ERR_ABORTED') + }) + signal.emit('abort') + }) +}) + +test('pool pipeline abort in queue', (t) => { + t.plan(3) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`, { + connections: 1, + pipelining: 1 + }) + t.teardown(client.close.bind(client)) + + client.dispatch({ + path: '/', + method: 'GET' + }, { + onConnect () { + }, + onHeaders (statusCode, headers) { + t.equal(statusCode, 200) + }, + onData (chunk) { + }, + onComplete () { + t.pass() + }, + onError () { + } + }) + + const signal = new EventEmitter() + client.pipeline({ + path: '/', + method: 'GET', + signal + }, ({ body }) => body).end().on('error', (err) => { + t.equal(err.code, 'UND_ERR_ABORTED') + }) + signal.emit('abort') + }) +}) + +test('pool stream constructor error destroy body', (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`, { + connections: 1, + pipelining: 1 + }) + t.teardown(client.close.bind(client)) + + { + const body = new Readable({ + read () { + } + }) + client.stream({ + path: '/', + method: 'GET', + body, + headers: { + 'transfer-encoding': 'fail' + } + }, () => { + t.fail() + }, (err) => { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + t.equal(body.destroyed, true) + }) + } + + { + const body = new Readable({ + read () { + } + }) + client.stream({ + path: '/', + method: 'CONNECT', + body + }, () => { + t.fail() + }, (err) => { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + t.equal(body.destroyed, true) + }) + } + }) +}) + +test('pool request constructor error destroy body', (t) => { + t.plan(4) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`, { + connections: 1, + pipelining: 1 + }) + t.teardown(client.close.bind(client)) + + { + const body = new Readable({ + read () { + } + }) + client.request({ + path: '/', + method: 'GET', + body, + headers: { + 'transfer-encoding': 'fail' + } + }, (err) => { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + t.equal(body.destroyed, true) + }) + } + + { + const body = new Readable({ + read () { + } + }) + client.request({ + path: '/', + method: 'CONNECT', + body + }, (err) => { + t.equal(err.code, 'UND_ERR_INVALID_ARG') + t.equal(body.destroyed, true) + }) + } + }) +}) + +test('pool close waits for all requests', (t) => { + t.plan(5) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Pool(`http://localhost:${server.address().port}`, { + connections: 1, + pipelining: 1 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.error(err) + }) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.error(err) + }) + + client.close(() => { + t.pass() + }) + + client.close(() => { + t.pass() + }) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.type(err, errors.ClientClosedError) + }) + }) +}) + +test('pool destroyed', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Pool(`http://localhost:${server.address().port}`, { + connections: 1, + pipelining: 1 + }) + t.teardown(client.destroy.bind(client)) + + client.destroy() + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.type(err, errors.ClientDestroyedError) + }) + }) +}) + +test('pool destroy fails queued requests', (t) => { + t.plan(6) + + const server = createServer((req, res) => { + res.end('asd') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`, { + connections: 1, + pipelining: 1 + }) + t.teardown(client.destroy.bind(client)) + + const _err = new Error() + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.equal(err, _err) + }) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.equal(err, _err) + }) + + t.equal(client.destroyed, false) + client.destroy(_err, () => { + t.pass() + }) + t.equal(client.destroyed, true) + + client.request({ + path: '/', + method: 'GET' + }, (err) => { + t.type(err, errors.ClientDestroyedError) + }) + }) +}) diff --git a/test/promises.js b/test/promises.js new file mode 100644 index 0000000..524fdfc --- /dev/null +++ b/test/promises.js @@ -0,0 +1,280 @@ +'use strict' + +const { test } = require('tap') +const { Client, Pool } = require('..') +const { createServer } = require('http') +const { readFileSync, createReadStream } = require('fs') +const { wrapWithAsyncIterable } = require('./utils/async-iterators') + +test('basic get, async await support', (t) => { + t.plan(5) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + const { statusCode, headers, body } = await client.request({ path: '/', method: 'GET' }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + } catch (err) { + t.fail(err) + } + }) +}) + +function postServer (t, expected) { + return function (req, res) { + t.equal(req.url, '/') + t.equal(req.method, 'POST') + + req.setEncoding('utf8') + let data = '' + + req.on('data', function (d) { data += d }) + + req.on('end', () => { + t.equal(data, expected) + res.end('hello') + }) + } +} + +test('basic POST with string, async await support', (t) => { + t.plan(5) + + const expected = readFileSync(__filename, 'utf8') + + const server = createServer(postServer(t, expected)) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + const { statusCode, body } = await client.request({ path: '/', method: 'POST', body: expected }) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + } catch (err) { + t.fail(err) + } + }) +}) + +test('basic POST with Buffer, async await support', (t) => { + t.plan(5) + + const expected = readFileSync(__filename) + + const server = createServer(postServer(t, expected.toString())) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + const { statusCode, body } = await client.request({ path: '/', method: 'POST', body: expected }) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + } catch (err) { + t.fail(err) + } + }) +}) + +test('basic POST with stream, async await support', (t) => { + t.plan(5) + + const expected = readFileSync(__filename, 'utf8') + + const server = createServer(postServer(t, expected)) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + const { statusCode, body } = await client.request({ + path: '/', + method: 'POST', + headers: { + 'content-length': Buffer.byteLength(expected) + }, + body: createReadStream(__filename) + }) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + } catch (err) { + t.fail(err) + } + }) +}) + +test('basic POST with async-iterator, async await support', (t) => { + t.plan(5) + + const expected = readFileSync(__filename, 'utf8') + + const server = createServer(postServer(t, expected)) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + const { statusCode, body } = await client.request({ + path: '/', + method: 'POST', + headers: { + 'content-length': Buffer.byteLength(expected) + }, + body: wrapWithAsyncIterable(createReadStream(__filename)) + }) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + } catch (err) { + t.fail(err) + } + }) +}) + +test('20 times GET with pipelining 10, async await support', (t) => { + const num = 20 + t.plan(2 * num + 1) + + const sleep = ms => new Promise((resolve, reject) => { + setTimeout(resolve, ms) + }) + + let count = 0 + let countGreaterThanOne = false + const server = createServer(async (req, res) => { + count++ + await sleep(10) + countGreaterThanOne = countGreaterThanOne || count > 1 + res.end(req.url) + }) + t.teardown(server.close.bind(server)) + + // needed to check for a warning on the maxListeners on the socket + function onWarning (warning) { + if (!/ExperimentalWarning/.test(warning)) { + t.fail() + } + } + process.on('warning', onWarning) + t.teardown(() => { + process.removeListener('warning', onWarning) + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 10 + }) + t.teardown(client.close.bind(client)) + + for (let i = 0; i < num; i++) { + makeRequest(i) + } + + async function makeRequest (i) { + await makeRequestAndExpectUrl(client, i, t) + count-- + if (i === num - 1) { + t.ok(countGreaterThanOne, 'seen more than one parallel request') + } + } + }) +}) + +async function makeRequestAndExpectUrl (client, i, t) { + try { + const { statusCode, body } = await client.request({ path: '/' + i, method: 'GET' }) + t.equal(statusCode, 200) + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('/' + i, Buffer.concat(bufs).toString('utf8')) + }) + } catch (err) { + t.fail(err) + } + return true +} + +test('pool, async await support', (t) => { + t.plan(5) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + const client = new Pool(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + try { + const { statusCode, headers, body } = await client.request({ path: '/', method: 'GET' }) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + } catch (err) { + t.fail(err) + } + }) +}) diff --git a/test/proxy-agent.js b/test/proxy-agent.js new file mode 100644 index 0000000..0a92126 --- /dev/null +++ b/test/proxy-agent.js @@ -0,0 +1,720 @@ +'use strict' + +const { test, teardown } = require('tap') +const { request, fetch, setGlobalDispatcher, getGlobalDispatcher } = require('..') +const { InvalidArgumentError } = require('../lib/core/errors') +const { nodeMajor } = require('../lib/core/util') +const { readFileSync } = require('fs') +const { join } = require('path') +const ProxyAgent = require('../lib/proxy-agent') +const Pool = require('../lib/pool') +const { createServer } = require('http') +const https = require('https') +const proxy = require('proxy') + +test('should throw error when no uri is provided', (t) => { + t.plan(2) + t.throws(() => new ProxyAgent(), InvalidArgumentError) + t.throws(() => new ProxyAgent({}), InvalidArgumentError) +}) + +test('using auth in combination with token should throw', (t) => { + t.plan(1) + t.throws(() => new ProxyAgent({ + auth: 'foo', + token: 'Bearer bar', + uri: 'http://example.com' + }), + InvalidArgumentError + ) +}) + +test('should accept string and object as options', (t) => { + t.plan(2) + t.doesNotThrow(() => new ProxyAgent('http://example.com')) + t.doesNotThrow(() => new ProxyAgent({ uri: 'http://example.com' })) +}) + +test('use proxy-agent to connect through proxy', async (t) => { + t.plan(6) + const server = await buildServer() + const proxy = await buildProxy() + delete proxy.authenticate + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent(proxyUrl) + const parsedOrigin = new URL(serverUrl) + + proxy.on('connect', () => { + t.pass('should connect to proxy') + }) + + server.on('request', (req, res) => { + t.equal(req.url, '/') + t.equal(req.headers.host, parsedOrigin.host, 'should not use proxyUrl as host') + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) + }) + + const { + statusCode, + headers, + body + } = await request(serverUrl, { dispatcher: proxyAgent }) + const json = await body.json() + + t.equal(statusCode, 200) + t.same(json, { hello: 'world' }) + t.equal(headers.connection, 'keep-alive', 'should remain the connection open') + + server.close() + proxy.close() + proxyAgent.close() +}) + +test('use proxy agent to connect through proxy using Pool', async (t) => { + t.plan(3) + const server = await buildServer() + const proxy = await buildProxy() + let resolveFirstConnect + let connectCount = 0 + + proxy.authenticate = async function (req, fn) { + if (++connectCount === 2) { + t.pass('second connect should arrive while first is still inflight') + resolveFirstConnect() + fn(null, true) + } else { + await new Promise((resolve) => { + resolveFirstConnect = resolve + }) + fn(null, true) + } + } + + server.on('request', (req, res) => { + res.end() + }) + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + const clientFactory = (url, options) => { + return new Pool(url, options) + } + const proxyAgent = new ProxyAgent({ auth: Buffer.from('user:pass').toString('base64'), uri: proxyUrl, clientFactory }) + const firstRequest = request(`${serverUrl}`, { dispatcher: proxyAgent }) + const secondRequest = await request(`${serverUrl}`, { dispatcher: proxyAgent }) + t.equal((await firstRequest).statusCode, 200) + t.equal(secondRequest.statusCode, 200) + server.close() + proxy.close() + proxyAgent.close() +}) + +test('use proxy-agent to connect through proxy using path with params', async (t) => { + t.plan(6) + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent(proxyUrl) + const parsedOrigin = new URL(serverUrl) + + proxy.on('connect', () => { + t.pass('should call proxy') + }) + server.on('request', (req, res) => { + t.equal(req.url, '/hello?foo=bar') + t.equal(req.headers.host, parsedOrigin.host, 'should not use proxyUrl as host') + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) + }) + + const { + statusCode, + headers, + body + } = await request(serverUrl + '/hello?foo=bar', { dispatcher: proxyAgent }) + const json = await body.json() + + t.equal(statusCode, 200) + t.same(json, { hello: 'world' }) + t.equal(headers.connection, 'keep-alive', 'should remain the connection open') + + server.close() + proxy.close() + proxyAgent.close() +}) + +test('use proxy-agent with auth', async (t) => { + t.plan(7) + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent({ + auth: Buffer.from('user:pass').toString('base64'), + uri: proxyUrl + }) + const parsedOrigin = new URL(serverUrl) + + proxy.authenticate = function (req, fn) { + t.pass('authentication should be called') + fn(null, req.headers['proxy-authorization'] === `Basic ${Buffer.from('user:pass').toString('base64')}`) + } + proxy.on('connect', () => { + t.pass('proxy should be called') + }) + + server.on('request', (req, res) => { + t.equal(req.url, '/hello?foo=bar') + t.equal(req.headers.host, parsedOrigin.host, 'should not use proxyUrl as host') + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) + }) + + const { + statusCode, + headers, + body + } = await request(serverUrl + '/hello?foo=bar', { dispatcher: proxyAgent }) + const json = await body.json() + + t.equal(statusCode, 200) + t.same(json, { hello: 'world' }) + t.equal(headers.connection, 'keep-alive', 'should remain the connection open') + + server.close() + proxy.close() + proxyAgent.close() +}) + +test('use proxy-agent with token', async (t) => { + t.plan(7) + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent({ + token: `Bearer ${Buffer.from('user:pass').toString('base64')}`, + uri: proxyUrl + }) + const parsedOrigin = new URL(serverUrl) + + proxy.authenticate = function (req, fn) { + t.pass('authentication should be called') + fn(null, req.headers['proxy-authorization'] === `Bearer ${Buffer.from('user:pass').toString('base64')}`) + } + proxy.on('connect', () => { + t.pass('proxy should be called') + }) + + server.on('request', (req, res) => { + t.equal(req.url, '/hello?foo=bar') + t.equal(req.headers.host, parsedOrigin.host, 'should not use proxyUrl as host') + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) + }) + + const { + statusCode, + headers, + body + } = await request(serverUrl + '/hello?foo=bar', { dispatcher: proxyAgent }) + const json = await body.json() + + t.equal(statusCode, 200) + t.same(json, { hello: 'world' }) + t.equal(headers.connection, 'keep-alive', 'should remain the connection open') + + server.close() + proxy.close() + proxyAgent.close() +}) + +test('use proxy-agent with custom headers', async (t) => { + t.plan(2) + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent({ + uri: proxyUrl, + headers: { + 'User-Agent': 'Foobar/1.0.0' + } + }) + + proxy.on('connect', (req) => { + t.equal(req.headers['user-agent'], 'Foobar/1.0.0') + }) + + server.on('request', (req, res) => { + t.equal(req.headers['user-agent'], 'BarBaz/1.0.0') + res.end() + }) + + await request(serverUrl + '/hello?foo=bar', { + headers: { 'user-agent': 'BarBaz/1.0.0' }, + dispatcher: proxyAgent + }) + + server.close() + proxy.close() + proxyAgent.close() +}) + +test('sending proxy-authorization in request headers should throw', async (t) => { + t.plan(3) + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent(proxyUrl) + + server.on('request', (req, res) => { + res.end(JSON.stringify({ hello: 'world' })) + }) + + await t.rejects( + request( + serverUrl + '/hello?foo=bar', + { + dispatcher: proxyAgent, + headers: { + 'proxy-authorization': Buffer.from('user:pass').toString('base64') + } + } + ), + 'Proxy-Authorization should be sent in ProxyAgent' + ) + + await t.rejects( + request( + serverUrl + '/hello?foo=bar', + { + dispatcher: proxyAgent, + headers: { + 'PROXY-AUTHORIZATION': Buffer.from('user:pass').toString('base64') + } + } + ), + 'Proxy-Authorization should be sent in ProxyAgent' + ) + + await t.rejects( + request( + serverUrl + '/hello?foo=bar', + { + dispatcher: proxyAgent, + headers: { + 'Proxy-Authorization': Buffer.from('user:pass').toString('base64') + } + } + ), + 'Proxy-Authorization should be sent in ProxyAgent' + ) + + server.close() + proxy.close() + proxyAgent.close() +}) + +test('use proxy-agent with setGlobalDispatcher', async (t) => { + t.plan(6) + const defaultDispatcher = getGlobalDispatcher() + + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent(proxyUrl) + const parsedOrigin = new URL(serverUrl) + setGlobalDispatcher(proxyAgent) + + t.teardown(() => setGlobalDispatcher(defaultDispatcher)) + + proxy.on('connect', () => { + t.pass('should call proxy') + }) + server.on('request', (req, res) => { + t.equal(req.url, '/hello?foo=bar') + t.equal(req.headers.host, parsedOrigin.host, 'should not use proxyUrl as host') + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) + }) + + const { + statusCode, + headers, + body + } = await request(serverUrl + '/hello?foo=bar') + const json = await body.json() + + t.equal(statusCode, 200) + t.same(json, { hello: 'world' }) + t.equal(headers.connection, 'keep-alive', 'should remain the connection open') + + server.close() + proxy.close() + proxyAgent.close() +}) + +test('ProxyAgent correctly sends headers when using fetch - #1355, #1623', { skip: nodeMajor < 16 }, async (t) => { + t.plan(2) + const defaultDispatcher = getGlobalDispatcher() + + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + + const proxyAgent = new ProxyAgent(proxyUrl) + setGlobalDispatcher(proxyAgent) + + t.teardown(() => setGlobalDispatcher(defaultDispatcher)) + + const expectedHeaders = { + host: `localhost:${server.address().port}`, + connection: 'keep-alive', + 'test-header': 'value', + accept: '*/*', + 'accept-language': '*', + 'sec-fetch-mode': 'cors', + 'user-agent': 'undici', + 'accept-encoding': 'gzip, deflate' + } + + const expectedProxyHeaders = { + host: `localhost:${proxy.address().port}`, + connection: 'close' + } + + proxy.on('connect', (req, res) => { + t.same(req.headers, expectedProxyHeaders) + }) + + server.on('request', (req, res) => { + t.same(req.headers, expectedHeaders) + res.end('goodbye') + }) + + await fetch(serverUrl, { + headers: { 'Test-header': 'value' } + }) + + server.close() + proxy.close() + proxyAgent.close() + t.end() +}) + +test('should throw when proxy does not return 200', async (t) => { + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + + proxy.authenticate = function (req, fn) { + fn(null, false) + } + + const proxyAgent = new ProxyAgent(proxyUrl) + try { + await request(serverUrl, { dispatcher: proxyAgent }) + t.fail() + } catch (e) { + t.pass() + t.ok(e) + } + + server.close() + proxy.close() + proxyAgent.close() + t.end() +}) + +test('pass ProxyAgent proxy status code error when using fetch - #2161', { skip: nodeMajor < 16 }, async (t) => { + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + + proxy.authenticate = function (req, fn) { + fn(null, false) + } + + const proxyAgent = new ProxyAgent(proxyUrl) + try { + await fetch(serverUrl, { dispatcher: proxyAgent }) + } catch (e) { + t.hasProp(e, 'cause') + } + + server.close() + proxy.close() + proxyAgent.close() + t.end() +}) + +test('Proxy via HTTP to HTTPS endpoint', async (t) => { + t.plan(4) + + const server = await buildSSLServer() + const proxy = await buildProxy() + + const serverUrl = `https://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent({ + uri: proxyUrl, + requestTls: { + ca: [ + readFileSync(join(__dirname, 'fixtures', 'ca.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'fixtures', 'client-key-2048.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'fixtures', 'client-crt-2048.pem'), 'utf8'), + servername: 'agent1' + } + }) + + server.on('request', function (req, res) { + t.ok(req.connection.encrypted) + res.end(JSON.stringify(req.headers)) + }) + + server.on('secureConnection', () => { + t.pass('server should be connected secured') + }) + + proxy.on('secureConnection', () => { + t.fail('proxy over http should not call secureConnection') + }) + + proxy.on('connect', function () { + t.pass('proxy should be connected') + }) + + proxy.on('request', function () { + t.fail('proxy should never receive requests') + }) + + const data = await request(serverUrl, { dispatcher: proxyAgent }) + const json = await data.body.json() + t.strictSame(json, { + host: `localhost:${server.address().port}`, + connection: 'keep-alive' + }) + + server.close() + proxy.close() + proxyAgent.close() +}) + +test('Proxy via HTTPS to HTTPS endpoint', async (t) => { + t.plan(5) + const server = await buildSSLServer() + const proxy = await buildSSLProxy() + + const serverUrl = `https://localhost:${server.address().port}` + const proxyUrl = `https://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent({ + uri: proxyUrl, + proxyTls: { + ca: [ + readFileSync(join(__dirname, 'fixtures', 'ca.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'fixtures', 'client-key-2048.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'fixtures', 'client-crt-2048.pem'), 'utf8'), + servername: 'agent1', + rejectUnauthorized: false + }, + requestTls: { + ca: [ + readFileSync(join(__dirname, 'fixtures', 'ca.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'fixtures', 'client-key-2048.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'fixtures', 'client-crt-2048.pem'), 'utf8'), + servername: 'agent1' + } + }) + + server.on('request', function (req, res) { + t.ok(req.connection.encrypted) + res.end(JSON.stringify(req.headers)) + }) + + server.on('secureConnection', () => { + t.pass('server should be connected secured') + }) + + proxy.on('secureConnection', () => { + t.pass('proxy over http should call secureConnection') + }) + + proxy.on('connect', function () { + t.pass('proxy should be connected') + }) + + proxy.on('request', function () { + t.fail('proxy should never receive requests') + }) + + const data = await request(serverUrl, { dispatcher: proxyAgent }) + const json = await data.body.json() + t.strictSame(json, { + host: `localhost:${server.address().port}`, + connection: 'keep-alive' + }) + + server.close() + proxy.close() + proxyAgent.close() +}) + +test('Proxy via HTTPS to HTTP endpoint', async (t) => { + t.plan(3) + const server = await buildServer() + const proxy = await buildSSLProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `https://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent({ + uri: proxyUrl, + proxyTls: { + ca: [ + readFileSync(join(__dirname, 'fixtures', 'ca.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'fixtures', 'client-key-2048.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'fixtures', 'client-crt-2048.pem'), 'utf8'), + servername: 'agent1', + rejectUnauthorized: false + } + }) + + server.on('request', function (req, res) { + t.ok(!req.connection.encrypted) + res.end(JSON.stringify(req.headers)) + }) + + server.on('secureConnection', () => { + t.fail('server is http') + }) + + proxy.on('secureConnection', () => { + t.pass('proxy over http should call secureConnection') + }) + + proxy.on('request', function () { + t.fail('proxy should never receive requests') + }) + + const data = await request(serverUrl, { dispatcher: proxyAgent }) + const json = await data.body.json() + t.strictSame(json, { + host: `localhost:${server.address().port}`, + connection: 'keep-alive' + }) + + server.close() + proxy.close() + proxyAgent.close() +}) + +test('Proxy via HTTP to HTTP endpoint', async (t) => { + t.plan(3) + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + const proxyAgent = new ProxyAgent(proxyUrl) + + server.on('request', function (req, res) { + t.ok(!req.connection.encrypted) + res.end(JSON.stringify(req.headers)) + }) + + server.on('secureConnection', () => { + t.fail('server is http') + }) + + proxy.on('secureConnection', () => { + t.fail('proxy is http') + }) + + proxy.on('connect', () => { + t.pass('connect to proxy') + }) + + proxy.on('request', function () { + t.fail('proxy should never receive requests') + }) + + const data = await request(serverUrl, { dispatcher: proxyAgent }) + const json = await data.body.json() + t.strictSame(json, { + host: `localhost:${server.address().port}`, + connection: 'keep-alive' + }) + + server.close() + proxy.close() + proxyAgent.close() +}) + +function buildServer () { + return new Promise((resolve) => { + const server = createServer() + server.listen(0, () => resolve(server)) + }) +} + +function buildSSLServer () { + const serverOptions = { + ca: [ + readFileSync(join(__dirname, 'fixtures', 'client-ca-crt.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'fixtures', 'key.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'fixtures', 'cert.pem'), 'utf8') + } + return new Promise((resolve) => { + const server = https.createServer(serverOptions) + server.listen(0, () => resolve(server)) + }) +} + +function buildProxy (listener) { + return new Promise((resolve) => { + const server = listener + ? proxy(createServer(listener)) + : proxy(createServer()) + server.listen(0, () => resolve(server)) + }) +} + +function buildSSLProxy () { + const serverOptions = { + ca: [ + readFileSync(join(__dirname, 'fixtures', 'client-ca-crt.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'fixtures', 'key.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'fixtures', 'cert.pem'), 'utf8') + } + + return new Promise((resolve) => { + const server = proxy(https.createServer(serverOptions)) + server.listen(0, () => resolve(server)) + }) +} + +teardown(() => process.exit()) diff --git a/test/proxy.js b/test/proxy.js new file mode 100644 index 0000000..d6d8d42 --- /dev/null +++ b/test/proxy.js @@ -0,0 +1,132 @@ +'use strict' + +const { test } = require('tap') +const { Client, Pool } = require('..') +const { createServer } = require('http') +const proxy = require('proxy') + +test('connect through proxy', async (t) => { + t.plan(3) + + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + + server.on('request', (req, res) => { + t.equal(req.url, '/hello?foo=bar') + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) + }) + + const client = new Client(proxyUrl) + + const response = await client.request({ + method: 'GET', + path: serverUrl + '/hello?foo=bar' + }) + + response.body.setEncoding('utf8') + let data = '' + for await (const chunk of response.body) { + data += chunk + } + t.equal(response.statusCode, 200) + t.same(JSON.parse(data), { hello: 'world' }) + + server.close() + proxy.close() + client.close() +}) + +test('connect through proxy with auth', async (t) => { + t.plan(3) + + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + + proxy.authenticate = function (req, fn) { + fn(null, req.headers['proxy-authorization'] === `Basic ${Buffer.from('user:pass').toString('base64')}`) + } + + server.on('request', (req, res) => { + t.equal(req.url, '/hello?foo=bar') + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) + }) + + const client = new Client(proxyUrl) + + const response = await client.request({ + method: 'GET', + path: serverUrl + '/hello?foo=bar', + headers: { + 'proxy-authorization': `Basic ${Buffer.from('user:pass').toString('base64')}` + } + }) + + response.body.setEncoding('utf8') + let data = '' + for await (const chunk of response.body) { + data += chunk + } + t.equal(response.statusCode, 200) + t.same(JSON.parse(data), { hello: 'world' }) + + server.close() + proxy.close() + client.close() +}) + +test('connect through proxy (with pool)', async (t) => { + t.plan(3) + + const server = await buildServer() + const proxy = await buildProxy() + + const serverUrl = `http://localhost:${server.address().port}` + const proxyUrl = `http://localhost:${proxy.address().port}` + + server.on('request', (req, res) => { + t.equal(req.url, '/hello?foo=bar') + res.setHeader('content-type', 'application/json') + res.end(JSON.stringify({ hello: 'world' })) + }) + + const pool = new Pool(proxyUrl) + + const response = await pool.request({ + method: 'GET', + path: serverUrl + '/hello?foo=bar' + }) + + response.body.setEncoding('utf8') + let data = '' + for await (const chunk of response.body) { + data += chunk + } + t.equal(response.statusCode, 200) + t.same(JSON.parse(data), { hello: 'world' }) + + server.close() + proxy.close() + pool.close() +}) + +function buildServer () { + return new Promise((resolve, reject) => { + const server = createServer() + server.listen(0, () => resolve(server)) + }) +} + +function buildProxy () { + return new Promise((resolve, reject) => { + const server = proxy(createServer()) + server.listen(0, () => resolve(server)) + }) +} diff --git a/test/readable.test.js b/test/readable.test.js new file mode 100644 index 0000000..3f4f793 --- /dev/null +++ b/test/readable.test.js @@ -0,0 +1,23 @@ +'use strict' + +const { test } = require('tap') +const Readable = require('../lib/api/readable') + +test('avoid body reordering', async function (t) { + function resume () { + } + function abort () { + } + const r = new Readable({ resume, abort }) + + r.push(Buffer.from('hello')) + + process.nextTick(() => { + r.push(Buffer.from('world')) + r.push(null) + }) + + const text = await r.text() + + t.equal(text, 'helloworld') +}) diff --git a/test/redirect-pipeline.js b/test/redirect-pipeline.js new file mode 100644 index 0000000..e4be837 --- /dev/null +++ b/test/redirect-pipeline.js @@ -0,0 +1,50 @@ +'use strict' + +const t = require('tap') +const { pipeline: undiciPipeline } = require('..') +const { pipeline: streamPipelineCb } = require('stream') +const { promisify } = require('util') +const { createReadable, createWritable } = require('./utils/stream') +const { startRedirectingServer } = require('./utils/redirecting-servers') + +const streamPipeline = promisify(streamPipelineCb) + +t.test('should not follow redirection by default if not using RedirectAgent', async t => { + t.plan(3) + + const body = [] + const serverRoot = await startRedirectingServer(t) + + await streamPipeline( + createReadable('REQUEST'), + undiciPipeline(`http://${serverRoot}/`, {}, ({ statusCode, headers, body }) => { + t.equal(statusCode, 302) + t.equal(headers.location, `http://${serverRoot}/302/1`) + + return body + }), + createWritable(body) + ) + + t.equal(body.length, 0) +}) + +t.test('should not follow redirects when using RedirectAgent within pipeline', async t => { + t.plan(3) + + const body = [] + const serverRoot = await startRedirectingServer(t) + + await streamPipeline( + createReadable('REQUEST'), + undiciPipeline(`http://${serverRoot}/`, { maxRedirections: 1 }, ({ statusCode, headers, body }) => { + t.equal(statusCode, 302) + t.equal(headers.location, `http://${serverRoot}/302/1`) + + return body + }), + createWritable(body) + ) + + t.equal(body.length, 0) +}) diff --git a/test/redirect-relative.js b/test/redirect-relative.js new file mode 100644 index 0000000..ca9c541 --- /dev/null +++ b/test/redirect-relative.js @@ -0,0 +1,22 @@ +'use strict' + +const t = require('tap') +const { request } = require('..') +const { + startRedirectingWithRelativePath +} = require('./utils/redirecting-servers') + +t.test('should redirect to relative URL according to RFC 7231', async t => { + t.plan(2) + + const server = await startRedirectingWithRelativePath(t) + + const { statusCode, body } = await request(`http://${server}`, { + maxRedirections: 3 + }) + + const finalPath = await body.text() + + t.equal(statusCode, 200) + t.equal(finalPath, '/absolute/b') +}) diff --git a/test/redirect-request.js b/test/redirect-request.js new file mode 100644 index 0000000..5a1ae6d --- /dev/null +++ b/test/redirect-request.js @@ -0,0 +1,420 @@ +'use strict' + +const t = require('tap') +const undici = require('..') +const { nodeMajor } = require('../lib/core/util') +const { + startRedirectingServer, + startRedirectingWithBodyServer, + startRedirectingChainServers, + startRedirectingWithoutLocationServer, + startRedirectingWithAuthorization, + startRedirectingWithCookie, + startRedirectingWithQueryParams +} = require('./utils/redirecting-servers') +const { createReadable, createReadableStream } = require('./utils/stream') + +for (const factory of [ + (server, opts) => new undici.Agent(opts), + (server, opts) => new undici.Pool(`http://${server}`, opts), + (server, opts) => new undici.Client(`http://${server}`, opts) +]) { + const request = (t, server, opts, ...args) => { + const dispatcher = factory(server, opts) + t.teardown(() => dispatcher.close()) + return undici.request(args[0], { ...args[1], dispatcher }, args[2]) + } + + t.test('should always have a history with the final URL even if no redirections were followed', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream, context: { history } } = await request(t, server, undefined, `http://${server}/200?key=value`, { + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.same(history.map(x => x.toString()), [`http://${server}/200?key=value`]) + t.equal(body, `GET /5 key=value :: host@${server} connection@keep-alive`) + }) + + t.test('should not follow redirection by default if not using RedirectAgent', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream } = await request(t, server, undefined, `http://${server}`) + const body = await bodyStream.text() + + t.equal(statusCode, 302) + t.equal(headers.location, `http://${server}/302/1`) + t.equal(body.length, 0) + }) + + t.test('should follow redirection after a HTTP 300', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream, context: { history } } = await request(t, server, undefined, `http://${server}/300?key=value`, { + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.same(history.map(x => x.toString()), [ + `http://${server}/300?key=value`, + `http://${server}/300/1?key=value`, + `http://${server}/300/2?key=value`, + `http://${server}/300/3?key=value`, + `http://${server}/300/4?key=value`, + `http://${server}/300/5?key=value` + ]) + t.equal(body, `GET /5 key=value :: host@${server} connection@keep-alive`) + }) + + t.test('should follow redirection after a HTTP 300 default', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream, context: { history } } = await request(t, server, { maxRedirections: 10 }, `http://${server}/300?key=value`) + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.same(history.map(x => x.toString()), [ + `http://${server}/300?key=value`, + `http://${server}/300/1?key=value`, + `http://${server}/300/2?key=value`, + `http://${server}/300/3?key=value`, + `http://${server}/300/4?key=value`, + `http://${server}/300/5?key=value` + ]) + t.equal(body, `GET /5 key=value :: host@${server} connection@keep-alive`) + }) + + t.test('should follow redirection after a HTTP 301', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream } = await request(t, server, undefined, `http://${server}/301`, { + method: 'POST', + body: 'REQUEST', + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.equal(body, `POST /5 :: host@${server} connection@keep-alive content-length@7 :: REQUEST`) + }) + + t.test('should follow redirection after a HTTP 302', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream } = await request(t, server, undefined, `http://${server}/302`, { + method: 'PUT', + body: Buffer.from('REQUEST'), + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.equal(body, `PUT /5 :: host@${server} connection@keep-alive content-length@7 :: REQUEST`) + }) + + t.test('should follow redirection after a HTTP 303 changing method to GET', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream } = await request(t, server, undefined, `http://${server}/303`, { + method: 'PATCH', + body: 'REQUEST', + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.equal(body, `GET /5 :: host@${server} connection@keep-alive`) + }) + + t.test('should remove Host and request body related headers when following HTTP 303 (array)', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream } = await request(t, server, undefined, `http://${server}/303`, { + method: 'PATCH', + headers: [ + 'Content-Encoding', + 'gzip', + 'X-Foo1', + '1', + 'X-Foo2', + '2', + 'Content-Type', + 'application/json', + 'X-Foo3', + '3', + 'Host', + 'localhost', + 'X-Bar', + '4' + ], + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.equal(body, `GET /5 :: host@${server} connection@keep-alive x-foo1@1 x-foo2@2 x-foo3@3 x-bar@4`) + }) + + t.test('should remove Host and request body related headers when following HTTP 303 (object)', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream } = await request(t, server, undefined, `http://${server}/303`, { + method: 'PATCH', + headers: { + 'Content-Encoding': 'gzip', + 'X-Foo1': '1', + 'X-Foo2': '2', + 'Content-Type': 'application/json', + 'X-Foo3': '3', + Host: 'localhost', + 'X-Bar': '4' + }, + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.equal(body, `GET /5 :: host@${server} connection@keep-alive x-foo1@1 x-foo2@2 x-foo3@3 x-bar@4`) + }) + + t.test('should follow redirection after a HTTP 307', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream } = await request(t, server, undefined, `http://${server}/307`, { + method: 'DELETE', + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.equal(body, `DELETE /5 :: host@${server} connection@keep-alive`) + }) + + t.test('should follow redirection after a HTTP 308', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream } = await request(t, server, undefined, `http://${server}/308`, { + method: 'OPTIONS', + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.equal(body, `OPTIONS /5 :: host@${server} connection@keep-alive`) + }) + + t.test('should ignore HTTP 3xx response bodies', async t => { + const server = await startRedirectingWithBodyServer(t) + + const { statusCode, headers, body: bodyStream, context: { history } } = await request(t, server, undefined, `http://${server}/`, { + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.same(history.map(x => x.toString()), [`http://${server}/`, `http://${server}/end`]) + t.equal(body, 'FINAL') + }) + + t.test('should ignore query after redirection', async t => { + const server = await startRedirectingWithQueryParams(t) + + const { statusCode, headers, context: { history } } = await request(t, server, undefined, `http://${server}/`, { + maxRedirections: 10, + query: { param1: 'first' } + }) + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.same(history.map(x => x.toString()), [`http://${server}/`, `http://${server}/?param2=second`]) + }) + + t.test('should follow a redirect chain up to the allowed number of times', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream, context: { history } } = await request(t, server, undefined, `http://${server}/300`, { + maxRedirections: 2 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 300) + t.equal(headers.location, `http://${server}/300/3`) + t.same(history.map(x => x.toString()), [`http://${server}/300`, `http://${server}/300/1`, `http://${server}/300/2`]) + t.equal(body.length, 0) + }) + + t.test('when a Location response header is NOT present', async t => { + const redirectCodes = [300, 301, 302, 303, 307, 308] + const server = await startRedirectingWithoutLocationServer(t) + + for (const code of redirectCodes) { + const { statusCode, headers, body: bodyStream } = await request(t, server, undefined, `http://${server}/${code}`, { + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, code) + t.notOk(headers.location) + t.equal(body.length, 0) + } + }) + + t.test('should not allow invalid maxRedirections arguments', async t => { + try { + await request(t, 'localhost', undefined, 'http://localhost', { + method: 'GET', + maxRedirections: 'INVALID' + }) + + t.fail('Did not throw') + } catch (err) { + t.equal(err.message, 'maxRedirections must be a positive number') + } + }) + + t.test('should not allow invalid maxRedirections arguments default', async t => { + try { + await request(t, 'localhost', { + maxRedirections: 'INVALID' + }, 'http://localhost', { + method: 'GET' + }) + + t.fail('Did not throw') + } catch (err) { + t.equal(err.message, 'maxRedirections must be a positive number') + } + }) + + t.test('should not follow redirects when using ReadableStream request bodies', { skip: nodeMajor < 16 }, async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream } = await request(t, server, undefined, `http://${server}/301`, { + method: 'POST', + body: createReadableStream('REQUEST'), + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 301) + t.equal(headers.location, `http://${server}/301/2`) + t.equal(body.length, 0) + }) + + t.test('should not follow redirects when using Readable request bodies', async t => { + const server = await startRedirectingServer(t) + + const { statusCode, headers, body: bodyStream } = await request(t, server, undefined, `http://${server}/301`, { + method: 'POST', + body: createReadable('REQUEST'), + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 301) + t.equal(headers.location, `http://${server}/301/1`) + t.equal(body.length, 0) + }) +} + +t.test('should follow redirections when going cross origin', async t => { + const [server1, server2, server3] = await startRedirectingChainServers(t) + + const { statusCode, headers, body: bodyStream, context: { history } } = await undici.request(`http://${server1}`, { + method: 'POST', + maxRedirections: 10 + }) + + const body = await bodyStream.text() + + t.equal(statusCode, 200) + t.notOk(headers.location) + t.same(history.map(x => x.toString()), [ + `http://${server1}/`, + `http://${server2}/`, + `http://${server3}/`, + `http://${server2}/end`, + `http://${server3}/end`, + `http://${server1}/end` + ]) + t.equal(body, 'POST') +}) + +t.test('should handle errors (callback)', t => { + t.plan(1) + + undici.request( + 'http://localhost:0', + { + maxRedirections: 10 + }, + error => { + t.match(error.code, /EADDRNOTAVAIL|ECONNREFUSED/) + } + ) +}) + +t.test('should handle errors (promise)', async t => { + try { + await undici.request('http://localhost:0', { maxRedirections: 10 }) + t.fail('Did not throw') + } catch (error) { + t.match(error.code, /EADDRNOTAVAIL|ECONNREFUSED/) + } +}) + +t.test('removes authorization header on third party origin', async t => { + const [server1] = await startRedirectingWithAuthorization(t, 'secret') + const { body: bodyStream } = await undici.request(`http://${server1}`, { + maxRedirections: 10, + headers: { + authorization: 'secret' + } + }) + + const body = await bodyStream.text() + + t.equal(body, '') +}) + +t.test('removes cookie header on third party origin', async t => { + const [server1] = await startRedirectingWithCookie(t, 'a=b') + const { body: bodyStream } = await undici.request(`http://${server1}`, { + maxRedirections: 10, + headers: { + cookie: 'a=b' + } + }) + + const body = await bodyStream.text() + + t.equal(body, '') +}) diff --git a/test/redirect-stream.js b/test/redirect-stream.js new file mode 100644 index 0000000..55dd97b --- /dev/null +++ b/test/redirect-stream.js @@ -0,0 +1,423 @@ +'use strict' + +const t = require('tap') +const { stream } = require('..') +const { + startRedirectingServer, + startRedirectingWithBodyServer, + startRedirectingChainServers, + startRedirectingWithoutLocationServer, + startRedirectingWithAuthorization, + startRedirectingWithCookie +} = require('./utils/redirecting-servers') +const { createReadable, createWritable } = require('./utils/stream') + +t.test('should always have a history with the final URL even if no redirections were followed', async t => { + t.plan(4) + + const body = [] + const server = await startRedirectingServer(t) + + await stream( + `http://${server}/200?key=value`, + { opaque: body, maxRedirections: 10 }, + ({ statusCode, headers, opaque, context: { history } }) => { + t.equal(statusCode, 200) + t.notOk(headers.location) + t.same(history.map(x => x.toString()), [ + `http://${server}/200?key=value` + ]) + + return createWritable(opaque) + } + ) + + t.equal(body.join(''), `GET /5 key=value :: host@${server} connection@keep-alive`) +}) + +t.test('should not follow redirection by default if not using RedirectAgent', async t => { + t.plan(3) + + const body = [] + const server = await startRedirectingServer(t) + + await stream(`http://${server}`, { opaque: body }, ({ statusCode, headers, opaque }) => { + t.equal(statusCode, 302) + t.equal(headers.location, `http://${server}/302/1`) + + return createWritable(opaque) + }) + + t.equal(body.length, 0) +}) + +t.test('should follow redirection after a HTTP 300', async t => { + t.plan(4) + + const body = [] + const server = await startRedirectingServer(t) + + await stream( + `http://${server}/300?key=value`, + { opaque: body, maxRedirections: 10 }, + ({ statusCode, headers, opaque, context: { history } }) => { + t.equal(statusCode, 200) + t.notOk(headers.location) + t.same(history.map(x => x.toString()), [ + `http://${server}/300?key=value`, + `http://${server}/300/1?key=value`, + `http://${server}/300/2?key=value`, + `http://${server}/300/3?key=value`, + `http://${server}/300/4?key=value`, + `http://${server}/300/5?key=value` + ]) + + return createWritable(opaque) + } + ) + + t.equal(body.join(''), `GET /5 key=value :: host@${server} connection@keep-alive`) +}) + +t.test('should follow redirection after a HTTP 301', async t => { + t.plan(3) + + const body = [] + const server = await startRedirectingServer(t) + + await stream( + `http://${server}/301`, + { method: 'POST', body: 'REQUEST', opaque: body, maxRedirections: 10 }, + ({ statusCode, headers, opaque }) => { + t.equal(statusCode, 200) + t.notOk(headers.location) + + return createWritable(opaque) + } + ) + + t.equal(body.join(''), `POST /5 :: host@${server} connection@keep-alive content-length@7 :: REQUEST`) +}) + +t.test('should follow redirection after a HTTP 302', async t => { + t.plan(3) + + const body = [] + const server = await startRedirectingServer(t) + + await stream( + `http://${server}/302`, + { method: 'PUT', body: Buffer.from('REQUEST'), opaque: body, maxRedirections: 10 }, + ({ statusCode, headers, opaque }) => { + t.equal(statusCode, 200) + t.notOk(headers.location) + + return createWritable(opaque) + } + ) + + t.equal(body.join(''), `PUT /5 :: host@${server} connection@keep-alive content-length@7 :: REQUEST`) +}) + +t.test('should follow redirection after a HTTP 303 changing method to GET', async t => { + t.plan(3) + + const body = [] + const server = await startRedirectingServer(t) + + await stream(`http://${server}/303`, { opaque: body, maxRedirections: 10 }, ({ statusCode, headers, opaque }) => { + t.equal(statusCode, 200) + t.notOk(headers.location) + + return createWritable(opaque) + }) + + t.equal(body.join(''), `GET /5 :: host@${server} connection@keep-alive`) +}) + +t.test('should remove Host and request body related headers when following HTTP 303 (array)', async t => { + t.plan(3) + + const body = [] + const server = await startRedirectingServer(t) + + await stream( + `http://${server}/303`, + { + method: 'PATCH', + headers: [ + 'Content-Encoding', + 'gzip', + 'X-Foo1', + '1', + 'X-Foo2', + '2', + 'Content-Type', + 'application/json', + 'X-Foo3', + '3', + 'Host', + 'localhost', + 'X-Bar', + '4' + ], + opaque: body, + maxRedirections: 10 + }, + ({ statusCode, headers, opaque }) => { + t.equal(statusCode, 200) + t.notOk(headers.location) + + return createWritable(opaque) + } + ) + + t.equal(body.join(''), `GET /5 :: host@${server} connection@keep-alive x-foo1@1 x-foo2@2 x-foo3@3 x-bar@4`) +}) + +t.test('should remove Host and request body related headers when following HTTP 303 (object)', async t => { + t.plan(3) + + const body = [] + const server = await startRedirectingServer(t) + + await stream( + `http://${server}/303`, + { + method: 'PATCH', + headers: { + 'Content-Encoding': 'gzip', + 'X-Foo1': '1', + 'X-Foo2': '2', + 'Content-Type': 'application/json', + 'X-Foo3': '3', + Host: 'localhost', + 'X-Bar': '4' + }, + opaque: body, + maxRedirections: 10 + }, + ({ statusCode, headers, opaque }) => { + t.equal(statusCode, 200) + t.notOk(headers.location) + + return createWritable(opaque) + } + ) + + t.equal(body.join(''), `GET /5 :: host@${server} connection@keep-alive x-foo1@1 x-foo2@2 x-foo3@3 x-bar@4`) +}) + +t.test('should follow redirection after a HTTP 307', async t => { + t.plan(3) + + const body = [] + const server = await startRedirectingServer(t) + + await stream( + `http://${server}/307`, + { method: 'DELETE', opaque: body, maxRedirections: 10 }, + ({ statusCode, headers, opaque }) => { + t.equal(statusCode, 200) + t.notOk(headers.location) + + return createWritable(opaque) + } + ) + + t.equal(body.join(''), `DELETE /5 :: host@${server} connection@keep-alive`) +}) + +t.test('should follow redirection after a HTTP 308', async t => { + t.plan(3) + + const body = [] + const server = await startRedirectingServer(t) + + await stream( + `http://${server}/308`, + { method: 'OPTIONS', opaque: body, maxRedirections: 10 }, + ({ statusCode, headers, opaque }) => { + t.equal(statusCode, 200) + t.notOk(headers.location) + + return createWritable(opaque) + } + ) + + t.equal(body.join(''), `OPTIONS /5 :: host@${server} connection@keep-alive`) +}) + +t.test('should ignore HTTP 3xx response bodies', async t => { + t.plan(4) + + const body = [] + const server = await startRedirectingWithBodyServer(t) + + await stream( + `http://${server}/`, + { opaque: body, maxRedirections: 10 }, + ({ statusCode, headers, opaque, context: { history } }) => { + t.equal(statusCode, 200) + t.notOk(headers.location) + t.same(history.map(x => x.toString()), [`http://${server}/`, `http://${server}/end`]) + + return createWritable(opaque) + } + ) + + t.equal(body.join(''), 'FINAL') +}) + +t.test('should follow a redirect chain up to the allowed number of times', async t => { + t.plan(4) + + const body = [] + const server = await startRedirectingServer(t) + + await stream( + `http://${server}/300`, + { opaque: body, maxRedirections: 2 }, + ({ statusCode, headers, opaque, context: { history } }) => { + t.equal(statusCode, 300) + t.equal(headers.location, `http://${server}/300/3`) + t.same(history.map(x => x.toString()), [`http://${server}/300`, `http://${server}/300/1`, `http://${server}/300/2`]) + + return createWritable(opaque) + } + ) + + t.equal(body.length, 0) +}) + +t.test('should follow redirections when going cross origin', async t => { + t.plan(4) + + const [server1, server2, server3] = await startRedirectingChainServers(t) + const body = [] + + await stream( + `http://${server1}`, + { method: 'POST', opaque: body, maxRedirections: 10 }, + ({ statusCode, headers, opaque, context: { history } }) => { + t.equal(statusCode, 200) + t.notOk(headers.location) + t.same(history.map(x => x.toString()), [ + `http://${server1}/`, + `http://${server2}/`, + `http://${server3}/`, + `http://${server2}/end`, + `http://${server3}/end`, + `http://${server1}/end` + ]) + + return createWritable(opaque) + } + ) + + t.equal(body.join(''), 'POST') +}) + +t.test('when a Location response header is NOT present', async t => { + const redirectCodes = [300, 301, 302, 303, 307, 308] + const server = await startRedirectingWithoutLocationServer(t) + + for (const code of redirectCodes) { + t.test(`should return the original response after a HTTP ${code}`, async t => { + t.plan(3) + + const body = [] + + await stream( + `http://${server}/${code}`, + { opaque: body, maxRedirections: 10 }, + ({ statusCode, headers, opaque }) => { + t.equal(statusCode, code) + t.notOk(headers.location) + + return createWritable(opaque) + } + ) + + t.equal(body.length, 0) + }) + } +}) + +t.test('should not follow redirects when using Readable request bodies', async t => { + t.plan(3) + + const body = [] + const server = await startRedirectingServer(t) + + await stream( + `http://${server}`, + { + method: 'POST', + body: createReadable('REQUEST'), + opaque: body, + maxRedirections: 10 + }, + ({ statusCode, headers, opaque }) => { + t.equal(statusCode, 302) + t.equal(headers.location, `http://${server}/302/1`) + + return createWritable(opaque) + } + ) + + t.equal(body.length, 0) +}) + +t.test('should handle errors', async t => { + t.plan(2) + + const body = [] + + try { + await stream('http://localhost:0', { opaque: body, maxRedirections: 10 }, ({ statusCode, headers, opaque }) => { + return createWritable(opaque) + }) + + throw new Error('Did not throw') + } catch (error) { + t.match(error.code, /EADDRNOTAVAIL|ECONNREFUSED/) + t.equal(body.length, 0) + } +}) + +t.test('removes authorization header on third party origin', async t => { + t.plan(1) + + const body = [] + + const [server1] = await startRedirectingWithAuthorization(t, 'secret') + await stream(`http://${server1}`, { + maxRedirections: 10, + opaque: body, + headers: { + authorization: 'secret' + } + }, ({ statusCode, headers, opaque }) => createWritable(opaque)) + + t.equal(body.length, 0) +}) + +t.test('removes cookie header on third party origin', async t => { + t.plan(1) + + const body = [] + + const [server1] = await startRedirectingWithCookie(t, 'a=b') + await stream(`http://${server1}`, { + maxRedirections: 10, + opaque: body, + headers: { + cookie: 'a=b' + } + }, ({ statusCode, headers, opaque }) => createWritable(opaque)) + + t.equal(body.length, 0) +}) + +t.teardown(() => process.exit()) diff --git a/test/redirect-upgrade.js b/test/redirect-upgrade.js new file mode 100644 index 0000000..dbe5840 --- /dev/null +++ b/test/redirect-upgrade.js @@ -0,0 +1,34 @@ +'use strict' + +const t = require('tap') +const { upgrade } = require('..') +const { startServer } = require('./utils/redirecting-servers') + +t.test('should upgrade the connection when no redirects are present', async t => { + t.plan(2) + + const server = await startServer(t, (req, res) => { + if (req.url === '/') { + res.statusCode = 301 + res.setHeader('Location', `http://${server}/end`) + res.end('REDIRECT') + return + } + + res.statusCode = 101 + res.setHeader('Connection', 'upgrade') + res.setHeader('Upgrade', req.headers.upgrade) + res.end('') + }) + + const { headers, socket } = await upgrade(`http://${server}/`, { + method: 'GET', + protocol: 'foo/1', + maxRedirections: 10 + }) + + socket.end() + + t.equal(headers.connection, 'upgrade') + t.equal(headers.upgrade, 'foo/1') +}) diff --git a/test/request-crlf.js b/test/request-crlf.js new file mode 100644 index 0000000..abcecf0 --- /dev/null +++ b/test/request-crlf.js @@ -0,0 +1,32 @@ +'use strict' + +const { createServer } = require('http') +const { test } = require('tap') +const { request, errors } = require('..') + +test('should validate content-type CRLF Injection', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + t.fail('should not receive any request') + res.statusCode = 200 + res.end('hello') + }) + + t.teardown(server.close.bind(server)) + + server.listen(0, async () => { + try { + await request(`http://localhost:${server.address().port}`, { + method: 'GET', + headers: { + 'content-type': 'application/json\r\n\r\nGET /foo2 HTTP/1.1' + } + }) + t.fail('request should fail') + } catch (e) { + t.type(e, errors.InvalidArgumentError) + t.equal(e.message, 'invalid content-type header') + } + }) +}) diff --git a/test/request-timeout.js b/test/request-timeout.js new file mode 100644 index 0000000..3ec5c10 --- /dev/null +++ b/test/request-timeout.js @@ -0,0 +1,820 @@ +'use strict' + +const { test } = require('tap') +const { createReadStream, writeFileSync, unlinkSync } = require('fs') +const { Client, errors } = require('..') +const { kConnect } = require('../lib/core/symbols') +const { nodeMajor } = require('../lib/core/util') +const timers = require('../lib/timers') +const { createServer } = require('http') +const EventEmitter = require('events') +const FakeTimers = require('@sinonjs/fake-timers') +const { AbortController } = require('abort-controller') +const { + pipeline, + Readable, + Writable, + PassThrough +} = require('stream') + +test('request timeout', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 1000) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { headersTimeout: 500 }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + }) +}) + +test('request timeout with readable body', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + }) + t.teardown(server.close.bind(server)) + + const tempfile = `${__filename}.10mb.txt` + writeFileSync(tempfile, Buffer.alloc(10 * 1024 * 1024)) + t.teardown(() => unlinkSync(tempfile)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { headersTimeout: 1e3 }) + t.teardown(client.destroy.bind(client)) + + const body = createReadStream(tempfile) + client.request({ path: '/', method: 'POST', body }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + }) +}, { skip: nodeMajor < 14 }) + +test('body timeout', (t) => { + t.plan(2) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + res.write('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { bodyTimeout: 50 }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, { body }) => { + t.error(err) + body.on('data', () => { + clock.tick(100) + }).on('error', (err) => { + t.type(err, errors.BodyTimeoutError) + }) + }) + + clock.tick(50) + }) +}) + +test('overridden request timeout', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 100) + clock.tick(100) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { headersTimeout: 500 }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', headersTimeout: 50 }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + + clock.tick(50) + }) +}) + +test('overridden body timeout', (t) => { + t.plan(2) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + res.write('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { bodyTimeout: 500 }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', bodyTimeout: 50 }, (err, { body }) => { + t.error(err) + body.on('data', () => { + clock.tick(100) + }).on('error', (err) => { + t.type(err, errors.BodyTimeoutError) + }) + }) + + clock.tick(50) + }) +}) + +test('With EE signal', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 100) + clock.tick(100) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 50 + }) + const ee = new EventEmitter() + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', signal: ee }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + + clock.tick(50) + }) +}) + +test('With abort-controller signal', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 100) + clock.tick(100) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 50 + }) + const abortController = new AbortController() + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', signal: abortController.signal }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + + clock.tick(50) + }) +}) + +test('Abort before timeout (EE)', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const ee = new EventEmitter() + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 100) + ee.emit('abort') + clock.tick(50) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 50 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', signal: ee }, (err, response) => { + t.type(err, errors.RequestAbortedError) + clock.tick(100) + }) + }) +}) + +test('Abort before timeout (abort-controller)', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const abortController = new AbortController() + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 100) + abortController.abort() + clock.tick(50) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 50 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', signal: abortController.signal }, (err, response) => { + t.type(err, errors.RequestAbortedError) + clock.tick(100) + }) + }) +}) + +test('Timeout with pipelining', (t) => { + t.plan(3) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 100) + clock.tick(50) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 10, + headersTimeout: 50 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + }) +}) + +test('Global option', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 100) + clock.tick(100) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 50 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + + clock.tick(50) + }) +}) + +test('Request options overrides global option', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 100) + clock.tick(100) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 50 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + + clock.tick(50) + }) +}) + +test('client.destroy should cancel the timeout', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 100 + }) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.type(err, errors.ClientDestroyedError) + }) + + client.destroy(err => { + t.error(err) + }) + }) +}) + +test('client.close should wait for the timeout', (t) => { + t.plan(2) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 100 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + + client.close((err) => { + t.error(err) + }) + + client.on('connect', () => { + process.nextTick(() => { + clock.tick(100) + }) + }) + }) +}) + +test('Validation', (t) => { + t.plan(4) + + try { + const client = new Client('http://localhost:3000', { + headersTimeout: 'foobar' + }) + t.teardown(client.destroy.bind(client)) + } catch (err) { + t.type(err, errors.InvalidArgumentError) + } + + try { + const client = new Client('http://localhost:3000', { + headersTimeout: -1 + }) + t.teardown(client.destroy.bind(client)) + } catch (err) { + t.type(err, errors.InvalidArgumentError) + } + + try { + const client = new Client('http://localhost:3000', { + bodyTimeout: 'foobar' + }) + t.teardown(client.destroy.bind(client)) + } catch (err) { + t.type(err, errors.InvalidArgumentError) + } + + try { + const client = new Client('http://localhost:3000', { + bodyTimeout: -1 + }) + t.teardown(client.destroy.bind(client)) + } catch (err) { + t.type(err, errors.InvalidArgumentError) + } +}) + +test('Disable request timeout', (t) => { + t.plan(2) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 32e3) + clock.tick(33e3) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 0, + connectTimeout: 0 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.error(err) + const bufs = [] + response.body.on('data', (buf) => { + bufs.push(buf) + }) + response.body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + + clock.tick(31e3) + }) +}) + +test('Disable request timeout for a single request', (t) => { + t.plan(2) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 32e3) + clock.tick(33e3) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 0, + connectTimeout: 0 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, response) => { + t.error(err) + const bufs = [] + response.body.on('data', (buf) => { + bufs.push(buf) + }) + response.body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + + clock.tick(31e3) + }) +}) + +test('stream timeout', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 301e3) + clock.tick(301e3) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { connectTimeout: 0 }) + t.teardown(client.destroy.bind(client)) + + client.stream({ + path: '/', + method: 'GET', + opaque: new PassThrough() + }, (result) => { + t.fail('Should not be called') + }, (err) => { + t.type(err, errors.HeadersTimeoutError) + }) + }) +}) + +test('stream custom timeout', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + res.end('hello') + }, 31e3) + clock.tick(31e3) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 30e3 + }) + t.teardown(client.destroy.bind(client)) + + client.stream({ + path: '/', + method: 'GET', + opaque: new PassThrough() + }, (result) => { + t.fail('Should not be called') + }, (err) => { + t.type(err, errors.HeadersTimeoutError) + }) + }) +}) + +test('pipeline timeout', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + req.pipe(res) + }, 301e3) + clock.tick(301e3) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const buf = Buffer.alloc(1e6).toString() + pipeline( + new Readable({ + read () { + this.push(buf) + this.push(null) + } + }), + client.pipeline({ + path: '/', + method: 'PUT' + }, (result) => { + t.fail('Should not be called') + }, (e) => { + t.fail('Should not be called') + }), + new Writable({ + write (chunk, encoding, callback) { + callback() + }, + final (callback) { + callback() + } + }), + (err) => { + t.type(err, errors.HeadersTimeoutError) + } + ) + }) +}) + +test('pipeline timeout', (t) => { + t.plan(1) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + setTimeout(() => { + req.pipe(res) + }, 31e3) + clock.tick(31e3) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + headersTimeout: 30e3 + }) + t.teardown(client.destroy.bind(client)) + + const buf = Buffer.alloc(1e6).toString() + pipeline( + new Readable({ + read () { + this.push(buf) + this.push(null) + } + }), + client.pipeline({ + path: '/', + method: 'PUT' + }, (result) => { + t.fail('Should not be called') + }, (e) => { + t.fail('Should not be called') + }), + new Writable({ + write (chunk, encoding, callback) { + callback() + }, + final (callback) { + callback() + } + }), + (err) => { + t.type(err, errors.HeadersTimeoutError) + } + ) + }) +}) + +test('client.close should not deadlock', (t) => { + t.plan(2) + + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + const server = createServer((req, res) => { + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 200, + headersTimeout: 100 + }) + t.teardown(client.destroy.bind(client)) + + client[kConnect](() => { + client.request({ + path: '/', + method: 'GET' + }, (err, response) => { + t.type(err, errors.HeadersTimeoutError) + }) + + client.close((err) => { + t.error(err) + }) + + clock.tick(100) + }) + }) +}) diff --git a/test/request-timeout2.js b/test/request-timeout2.js new file mode 100644 index 0000000..53943fb --- /dev/null +++ b/test/request-timeout2.js @@ -0,0 +1,48 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const { Readable } = require('stream') + +test('request timeout with slow readable body', (t) => { + t.plan(1) + + const server = createServer(async (req, res) => { + let str = '' + for await (const x of req) { + str += x + } + res.end(str) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { headersTimeout: 50 }) + t.teardown(client.close.bind(client)) + + const body = new Readable({ + read () { + if (this._reading) { + return + } + this._reading = true + + this.push('asd') + setTimeout(() => { + this.push('asd') + this.push(null) + }, 2e3) + } + }) + client.request({ + path: '/', + method: 'POST', + headersTimeout: 1e3, + body + }, async (err, response) => { + t.error(err) + await response.body.dump() + }) + }) +}) diff --git a/test/request.js b/test/request.js new file mode 100644 index 0000000..d3a2f74 --- /dev/null +++ b/test/request.js @@ -0,0 +1,248 @@ +'use strict' + +const { createServer } = require('http') +const { test } = require('tap') +const { request } = require('..') + +test('no-slash/one-slash pathname should be included in req.path', async (t) => { + const pathServer = createServer((req, res) => { + t.fail('it shouldn\'t be called') + res.statusCode = 200 + res.end('hello') + }) + + const requestedServer = createServer((req, res) => { + t.equal(`/localhost:${pathServer.address().port}`, req.url) + t.equal('GET', req.method) + t.equal(`localhost:${requestedServer.address().port}`, req.headers.host) + res.statusCode = 200 + res.end('hello') + }) + + t.teardown(requestedServer.close.bind(requestedServer)) + t.teardown(pathServer.close.bind(pathServer)) + + await Promise.all([ + requestedServer.listen(0), + pathServer.listen(0) + ]) + + const noSlashPathname = await request({ + method: 'GET', + origin: `http://localhost:${requestedServer.address().port}`, + pathname: `localhost:${pathServer.address().port}` + }) + t.equal(noSlashPathname.statusCode, 200) + const noSlashPath = await request({ + method: 'GET', + origin: `http://localhost:${requestedServer.address().port}`, + path: `localhost:${pathServer.address().port}` + }) + t.equal(noSlashPath.statusCode, 200) + const noSlashPath2Arg = await request( + `http://localhost:${requestedServer.address().port}`, + { path: `localhost:${pathServer.address().port}` } + ) + t.equal(noSlashPath2Arg.statusCode, 200) + const oneSlashPathname = await request({ + method: 'GET', + origin: `http://localhost:${requestedServer.address().port}`, + pathname: `/localhost:${pathServer.address().port}` + }) + t.equal(oneSlashPathname.statusCode, 200) + const oneSlashPath = await request({ + method: 'GET', + origin: `http://localhost:${requestedServer.address().port}`, + path: `/localhost:${pathServer.address().port}` + }) + t.equal(oneSlashPath.statusCode, 200) + const oneSlashPath2Arg = await request( + `http://localhost:${requestedServer.address().port}`, + { path: `/localhost:${pathServer.address().port}` } + ) + t.equal(oneSlashPath2Arg.statusCode, 200) + t.end() +}) + +test('protocol-relative URL as pathname should be included in req.path', async (t) => { + const pathServer = createServer((req, res) => { + t.fail('it shouldn\'t be called') + res.statusCode = 200 + res.end('hello') + }) + + const requestedServer = createServer((req, res) => { + t.equal(`//localhost:${pathServer.address().port}`, req.url) + t.equal('GET', req.method) + t.equal(`localhost:${requestedServer.address().port}`, req.headers.host) + res.statusCode = 200 + res.end('hello') + }) + + t.teardown(requestedServer.close.bind(requestedServer)) + t.teardown(pathServer.close.bind(pathServer)) + + await Promise.all([ + requestedServer.listen(0), + pathServer.listen(0) + ]) + + const noSlashPathname = await request({ + method: 'GET', + origin: `http://localhost:${requestedServer.address().port}`, + pathname: `//localhost:${pathServer.address().port}` + }) + t.equal(noSlashPathname.statusCode, 200) + const noSlashPath = await request({ + method: 'GET', + origin: `http://localhost:${requestedServer.address().port}`, + path: `//localhost:${pathServer.address().port}` + }) + t.equal(noSlashPath.statusCode, 200) + const noSlashPath2Arg = await request( + `http://localhost:${requestedServer.address().port}`, + { path: `//localhost:${pathServer.address().port}` } + ) + t.equal(noSlashPath2Arg.statusCode, 200) + t.end() +}) + +test('Absolute URL as pathname should be included in req.path', async (t) => { + const pathServer = createServer((req, res) => { + t.fail('it shouldn\'t be called') + res.statusCode = 200 + res.end('hello') + }) + + const requestedServer = createServer((req, res) => { + t.equal(`/http://localhost:${pathServer.address().port}`, req.url) + t.equal('GET', req.method) + t.equal(`localhost:${requestedServer.address().port}`, req.headers.host) + res.statusCode = 200 + res.end('hello') + }) + + t.teardown(requestedServer.close.bind(requestedServer)) + t.teardown(pathServer.close.bind(pathServer)) + + await Promise.all([ + requestedServer.listen(0), + pathServer.listen(0) + ]) + + const noSlashPathname = await request({ + method: 'GET', + origin: `http://localhost:${requestedServer.address().port}`, + pathname: `http://localhost:${pathServer.address().port}` + }) + t.equal(noSlashPathname.statusCode, 200) + const noSlashPath = await request({ + method: 'GET', + origin: `http://localhost:${requestedServer.address().port}`, + path: `http://localhost:${pathServer.address().port}` + }) + t.equal(noSlashPath.statusCode, 200) + const noSlashPath2Arg = await request( + `http://localhost:${requestedServer.address().port}`, + { path: `http://localhost:${pathServer.address().port}` } + ) + t.equal(noSlashPath2Arg.statusCode, 200) + t.end() +}) + +test('DispatchOptions#reset', scope => { + scope.plan(4) + + scope.test('Should throw if invalid reset option', t => { + t.plan(1) + + t.rejects(request({ + method: 'GET', + origin: 'http://somehost.xyz', + reset: 0 + }), 'invalid reset') + }) + + scope.test('Should include "connection:close" if reset true', async t => { + const server = createServer((req, res) => { + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + t.equal(req.headers.connection, 'close') + res.statusCode = 200 + res.end('hello') + }) + + t.plan(3) + + t.teardown(server.close.bind(server)) + + await new Promise((resolve, reject) => { + server.listen(0, (err) => { + if (err != null) reject(err) + else resolve() + }) + }) + + await request({ + method: 'GET', + origin: `http://localhost:${server.address().port}`, + reset: true + }) + }) + + scope.test('Should include "connection:keep-alive" if reset false', async t => { + const server = createServer((req, res) => { + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + t.equal(req.headers.connection, 'keep-alive') + res.statusCode = 200 + res.end('hello') + }) + + t.plan(3) + + t.teardown(server.close.bind(server)) + + await new Promise((resolve, reject) => { + server.listen(0, (err) => { + if (err != null) reject(err) + else resolve() + }) + }) + + await request({ + method: 'GET', + origin: `http://localhost:${server.address().port}`, + reset: false + }) + }) + + scope.test('Should react to manual set of "connection:close" header', async t => { + const server = createServer((req, res) => { + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + t.equal(req.headers.connection, 'close') + res.statusCode = 200 + res.end('hello') + }) + + t.plan(3) + + t.teardown(server.close.bind(server)) + + await new Promise((resolve, reject) => { + server.listen(0, (err) => { + if (err != null) reject(err) + else resolve() + }) + }) + + await request({ + method: 'GET', + origin: `http://localhost:${server.address().port}`, + headers: { + connection: 'close' + } + }) + }) +}) diff --git a/test/retry-handler.js b/test/retry-handler.js new file mode 100644 index 0000000..a4577a6 --- /dev/null +++ b/test/retry-handler.js @@ -0,0 +1,622 @@ +'use strict' +const { createServer } = require('node:http') +const { once } = require('node:events') + +const tap = require('tap') + +const { RetryHandler, Client } = require('..') +const { RequestHandler } = require('../lib/api/api-request') + +tap.test('Should retry status code', t => { + let counter = 0 + const chunks = [] + const server = createServer() + const dispatchOptions = { + retryOptions: { + retry: (err, { state, opts }, done) => { + counter++ + + if ( + err.statusCode === 500 || + err.message.includes('other side closed') + ) { + setTimeout(done, 500) + return + } + + return done(err) + } + }, + method: 'GET', + path: '/', + headers: { + 'content-type': 'application/json' + } + } + + t.plan(4) + + server.on('request', (req, res) => { + switch (counter) { + case 0: + req.destroy() + return + case 1: + res.writeHead(500) + res.end('failed') + return + case 2: + res.writeHead(200) + res.end('hello world!') + return + default: + t.fail() + } + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const handler = new RetryHandler(dispatchOptions, { + dispatch: client.dispatch.bind(client), + handler: { + onConnect () { + t.pass() + }, + onBodySent () { + t.pass() + }, + onHeaders (status, _rawHeaders, resume, _statusMessage) { + t.equal(status, 200) + return true + }, + onData (chunk) { + chunks.push(chunk) + return true + }, + onComplete () { + t.equal(Buffer.concat(chunks).toString('utf-8'), 'hello world!') + t.equal(counter, 2) + }, + onError () { + t.fail() + } + } + }) + + t.teardown(async () => { + await client.close() + server.close() + + await once(server, 'close') + }) + + client.dispatch( + { + method: 'GET', + path: '/', + headers: { + 'content-type': 'application/json' + } + }, + handler + ) + }) +}) + +tap.test('Should use retry-after header for retries', t => { + let counter = 0 + const chunks = [] + const server = createServer() + let checkpoint + const dispatchOptions = { + method: 'PUT', + path: '/', + headers: { + 'content-type': 'application/json' + } + } + + t.plan(4) + + server.on('request', (req, res) => { + switch (counter) { + case 0: + res.writeHead(429, { + 'retry-after': 1 + }) + res.end('rate limit') + checkpoint = Date.now() + counter++ + return + case 1: + res.writeHead(200) + res.end('hello world!') + t.ok(Date.now() - checkpoint >= 500) + counter++ + return + default: + t.fail('unexpected request') + } + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const handler = new RetryHandler(dispatchOptions, { + dispatch: client.dispatch.bind(client), + handler: { + onConnect () { + t.pass() + }, + onBodySent () { + t.pass() + }, + onHeaders (status, _rawHeaders, resume, _statusMessage) { + t.equal(status, 200) + return true + }, + onData (chunk) { + chunks.push(chunk) + return true + }, + onComplete () { + t.equal(Buffer.concat(chunks).toString('utf-8'), 'hello world!') + }, + onError (err) { + t.error(err) + } + } + }) + + t.teardown(async () => { + await client.close() + server.close() + + await once(server, 'close') + }) + + client.dispatch( + { + method: 'PUT', + path: '/', + headers: { + 'content-type': 'application/json' + } + }, + handler + ) + }) +}) + +tap.test('Should use retry-after header for retries (date)', t => { + let counter = 0 + const chunks = [] + const server = createServer() + let checkpoint + const dispatchOptions = { + method: 'PUT', + path: '/', + headers: { + 'content-type': 'application/json' + } + } + + t.plan(4) + + server.on('request', (req, res) => { + switch (counter) { + case 0: + res.writeHead(429, { + 'retry-after': new Date( + new Date().setSeconds(new Date().getSeconds() + 1) + ).toUTCString() + }) + res.end('rate limit') + checkpoint = Date.now() + counter++ + return + case 1: + res.writeHead(200) + res.end('hello world!') + t.ok(Date.now() - checkpoint >= 1) + counter++ + return + default: + t.fail('unexpected request') + } + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const handler = new RetryHandler(dispatchOptions, { + dispatch: client.dispatch.bind(client), + handler: { + onConnect () { + t.pass() + }, + onBodySent () { + t.pass() + }, + onHeaders (status, _rawHeaders, resume, _statusMessage) { + t.equal(status, 200) + return true + }, + onData (chunk) { + chunks.push(chunk) + return true + }, + onComplete () { + t.equal(Buffer.concat(chunks).toString('utf-8'), 'hello world!') + }, + onError (err) { + t.error(err) + } + } + }) + + t.teardown(async () => { + await client.close() + server.close() + + await once(server, 'close') + }) + + client.dispatch( + { + method: 'PUT', + path: '/', + headers: { + 'content-type': 'application/json' + } + }, + handler + ) + }) +}) + +tap.test('Should retry with defaults', t => { + let counter = 0 + const chunks = [] + const server = createServer() + const dispatchOptions = { + method: 'GET', + path: '/', + headers: { + 'content-type': 'application/json' + } + } + + server.on('request', (req, res) => { + switch (counter) { + case 0: + req.destroy() + counter++ + return + case 1: + res.writeHead(500) + res.end('failed') + counter++ + return + case 2: + res.writeHead(200) + res.end('hello world!') + counter++ + return + default: + t.fail() + } + }) + + t.plan(3) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const handler = new RetryHandler(dispatchOptions, { + dispatch: client.dispatch.bind(client), + handler: { + onConnect () { + t.pass() + }, + onBodySent () { + t.pass() + }, + onHeaders (status, _rawHeaders, resume, _statusMessage) { + t.equal(status, 200) + return true + }, + onData (chunk) { + chunks.push(chunk) + return true + }, + onComplete () { + t.equal(Buffer.concat(chunks).toString('utf-8'), 'hello world!') + }, + onError (err) { + t.error(err) + } + } + }) + + t.teardown(async () => { + await client.close() + server.close() + + await once(server, 'close') + }) + + client.dispatch( + { + method: 'GET', + path: '/', + headers: { + 'content-type': 'application/json' + } + }, + handler + ) + }) +}) + +tap.test('Should handle 206 partial content', t => { + const chunks = [] + let counter = 0 + + // Took from: https://github.com/nxtedition/nxt-lib/blob/4b001ebc2f22cf735a398f35ff800dd553fe5933/test/undici/retry.js#L47 + let x = 0 + const server = createServer((req, res) => { + if (x === 0) { + t.pass() + res.setHeader('etag', 'asd') + res.write('abc') + setTimeout(() => { + res.destroy() + }, 1e2) + } else if (x === 1) { + t.same(req.headers.range, 'bytes=3-') + res.setHeader('content-range', 'bytes 3-6/6') + res.setHeader('etag', 'asd') + res.statusCode = 206 + res.end('def') + } + x++ + }) + + const dispatchOptions = { + retryOptions: { + retry: function (err, _, done) { + counter++ + + if (err.code && err.code === 'UND_ERR_DESTROYED') { + return done(false) + } + + if (err.statusCode === 206) return done(err) + + setTimeout(done, 800) + } + }, + method: 'GET', + path: '/', + headers: { + 'content-type': 'application/json' + } + } + + t.plan(8) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const handler = new RetryHandler(dispatchOptions, { + dispatch: (...args) => { + return client.dispatch(...args) + }, + handler: { + onRequestSent () { + t.pass() + }, + onConnect () { + t.pass() + }, + onBodySent () { + t.pass() + }, + onHeaders (status, _rawHeaders, resume, _statusMessage) { + t.equal(status, 200) + return true + }, + onData (chunk) { + chunks.push(chunk) + return true + }, + onComplete () { + t.equal(Buffer.concat(chunks).toString('utf-8'), 'abcdef') + t.equal(counter, 1) + }, + onError () { + t.fail() + } + } + }) + + client.dispatch( + { + method: 'GET', + path: '/', + headers: { + 'content-type': 'application/json' + } + }, + handler + ) + + t.teardown(async () => { + await client.close() + + server.close() + await once(server, 'close') + }) + }) +}) + +tap.test('Should handle 206 partial content - bad-etag', t => { + const chunks = [] + + // Took from: https://github.com/nxtedition/nxt-lib/blob/4b001ebc2f22cf735a398f35ff800dd553fe5933/test/undici/retry.js#L47 + let x = 0 + const server = createServer((req, res) => { + if (x === 0) { + t.pass() + res.setHeader('etag', 'asd') + res.write('abc') + setTimeout(() => { + res.destroy() + }, 1e2) + } else if (x === 1) { + t.same(req.headers.range, 'bytes=3-') + res.setHeader('content-range', 'bytes 3-6/6') + res.setHeader('etag', 'erwsd') + res.statusCode = 206 + res.end('def') + } + x++ + }) + + const dispatchOptions = { + method: 'GET', + path: '/', + headers: { + 'content-type': 'application/json' + } + } + + t.plan(6) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const handler = new RetryHandler( + dispatchOptions, + { + dispatch: (...args) => { + return client.dispatch(...args) + }, + handler: { + onConnect () { + t.pass() + }, + onBodySent () { + t.pass() + }, + onHeaders (status, _rawHeaders, resume, _statusMessage) { + t.pass() + return true + }, + onData (chunk) { + chunks.push(chunk) + return true + }, + onComplete () { + t.error('should not complete') + }, + onError (err) { + t.equal(Buffer.concat(chunks).toString('utf-8'), 'abc') + t.equal(err.code, 'UND_ERR_REQ_RETRY') + } + } + } + ) + + client.dispatch( + { + method: 'GET', + path: '/', + headers: { + 'content-type': 'application/json' + } + }, + handler + ) + + t.teardown(async () => { + await client.close() + + server.close() + await once(server, 'close') + }) + }) +}) + +tap.test('retrying a request with a body', t => { + let counter = 0 + const server = createServer() + const dispatchOptions = { + retryOptions: { + retry: (err, { state, opts }, done) => { + counter++ + + if ( + err.statusCode === 500 || + err.message.includes('other side closed') + ) { + setTimeout(done, 500) + return + } + + return done(err) + } + }, + method: 'POST', + path: '/', + headers: { + 'content-type': 'application/json' + }, + body: JSON.stringify({ hello: 'world' }) + } + + t.plan(1) + + server.on('request', (req, res) => { + switch (counter) { + case 0: + req.destroy() + return + case 1: + res.writeHead(500) + res.end('failed') + return + case 2: + res.writeHead(200) + res.end('hello world!') + return + default: + t.fail() + } + }) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + const handler = new RetryHandler(dispatchOptions, { + dispatch: client.dispatch.bind(client), + handler: new RequestHandler(dispatchOptions, (err, data) => { + t.error(err) + }) + }) + + t.teardown(async () => { + await client.close() + server.close() + + await once(server, 'close') + }) + + client.dispatch( + { + method: 'POST', + path: '/', + headers: { + 'content-type': 'application/json' + }, + body: JSON.stringify({ hello: 'world' }) + }, + handler + ) + }) +}) diff --git a/test/socket-back-pressure.js b/test/socket-back-pressure.js new file mode 100644 index 0000000..9e774b3 --- /dev/null +++ b/test/socket-back-pressure.js @@ -0,0 +1,54 @@ +'use strict' + +const { Client } = require('..') +const { createServer } = require('http') +const { Readable } = require('stream') +const { test } = require('tap') + +test('socket back-pressure', (t) => { + t.plan(3) + + const server = createServer() + let bytesWritten = 0 + + const buf = Buffer.allocUnsafe(16384) + const src = new Readable({ + read () { + bytesWritten += buf.length + this.push(buf) + if (bytesWritten >= 1e6) { + this.push(null) + } + } + }) + + server.on('request', (req, res) => { + src.pipe(res) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 1 + }) + t.teardown(client.destroy.bind(client)) + + client.request({ path: '/', method: 'GET', opaque: 'asd' }, (err, data) => { + t.error(err) + data.body + .resume() + .once('data', () => { + data.body.pause() + // TODO: Try to avoid timeout. + setTimeout(() => { + t.ok(data.body._readableState.length < bytesWritten - data.body._readableState.highWaterMark) + src.push(null) + data.body.resume() + }, 1e3) + }) + .on('end', () => { + t.pass() + }) + }) + }) +}) diff --git a/test/socket-timeout.js b/test/socket-timeout.js new file mode 100644 index 0000000..8019c74 --- /dev/null +++ b/test/socket-timeout.js @@ -0,0 +1,100 @@ +'use strict' + +const { test } = require('tap') +const { Client, errors } = require('..') +const timers = require('../lib/timers') +const { createServer } = require('http') +const FakeTimers = require('@sinonjs/fake-timers') + +test('timeout with pipelining 1', (t) => { + t.plan(9) + + const server = createServer() + + server.once('request', (req, res) => { + t.pass('first request received, we are letting this timeout on the client') + + server.once('request', (req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + pipelining: 1, + headersTimeout: 500, + bodyTimeout: 500 + }) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET', + opaque: 'asd' + }, (err, data) => { + t.type(err, errors.HeadersTimeoutError) // we are expecting an error + t.equal(data.opaque, 'asd') + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, { statusCode, headers, body }) => { + t.error(err) + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('Disable socket timeout', (t) => { + t.plan(2) + + const server = createServer() + const clock = FakeTimers.install() + t.teardown(clock.uninstall.bind(clock)) + + const orgTimers = { ...timers } + Object.assign(timers, { setTimeout, clearTimeout }) + t.teardown(() => { + Object.assign(timers, orgTimers) + }) + + server.once('request', (req, res) => { + setTimeout(() => { + res.end('hello') + }, 31e3) + clock.tick(32e3) + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`, { + bodyTimeout: 0, + headersTimeout: 0 + }) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, result) => { + t.error(err) + const bufs = [] + result.body.on('data', (buf) => { + bufs.push(buf) + }) + result.body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) diff --git a/test/stream-compat.js b/test/stream-compat.js new file mode 100644 index 0000000..71d2410 --- /dev/null +++ b/test/stream-compat.js @@ -0,0 +1,75 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') +const { Readable } = require('stream') +const EE = require('events') + +test('stream body without destroy', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const signal = new EE() + const body = new Readable({ read () {} }) + body.destroy = undefined + body.on('error', (err) => { + t.ok(err) + }) + client.request({ + path: '/', + method: 'PUT', + signal, + body + }, (err, data) => { + t.ok(err) + }) + signal.emit('abort') + }) +}) + +test('IncomingMessage', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.end() + }) + t.teardown(server.close.bind(server)) + + server.listen(0, () => { + const proxyClient = new Client(`http://localhost:${server.address().port}`) + t.teardown(proxyClient.destroy.bind(proxyClient)) + + const proxy = createServer((req, res) => { + proxyClient.request({ + path: '/', + method: 'PUT', + body: req + }, (err, data) => { + t.error(err) + data.body.pipe(res) + }) + }) + t.teardown(proxy.close.bind(proxy)) + + proxy.listen(0, () => { + const client = new Client(`http://localhost:${proxy.address().port}`) + t.teardown(client.destroy.bind(client)) + + client.request({ + path: '/', + method: 'PUT', + body: 'hello world' + }, (err, data) => { + t.error(err) + }) + }) + }) +}) diff --git a/test/tls-client-cert.js b/test/tls-client-cert.js new file mode 100644 index 0000000..8ae301d --- /dev/null +++ b/test/tls-client-cert.js @@ -0,0 +1,70 @@ +'use strict' + +const { readFileSync } = require('fs') +const { join } = require('path') +const https = require('https') +const { test } = require('tap') +const { Client } = require('..') +const { kSocket } = require('../lib/core/symbols') +const { nodeMajor } = require('../lib/core/util') + +const serverOptions = { + ca: [ + readFileSync(join(__dirname, 'fixtures', 'client-ca-crt.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'fixtures', 'key.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'fixtures', 'cert.pem'), 'utf8'), + requestCert: true, + rejectUnauthorized: false +} + +test('Client using valid client certificate', { skip: nodeMajor > 16 }, t => { + t.plan(5) + + const server = https.createServer(serverOptions, (req, res) => { + const authorized = req.client.authorized + t.ok(authorized) + + res.setHeader('Content-Type', 'text/plain') + res.end('hello') + }) + + server.listen(0, function () { + const tls = { + ca: [ + readFileSync(join(__dirname, 'fixtures', 'ca.pem'), 'utf8') + ], + key: readFileSync(join(__dirname, 'fixtures', 'client-key.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'fixtures', 'client-crt.pem'), 'utf8'), + rejectUnauthorized: false, + servername: 'agent1' + } + const client = new Client(`https://localhost:${server.address().port}`, { + connect: tls + }) + + t.teardown(() => { + client.close() + server.close() + }) + + client.request({ + path: '/', + method: 'GET' + }, (err, { statusCode, body }) => { + t.error(err) + t.equal(statusCode, 200) + + const authorized = client[kSocket].authorized + t.ok(authorized) + + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) diff --git a/test/tls-session-reuse.js b/test/tls-session-reuse.js new file mode 100644 index 0000000..ab012f1 --- /dev/null +++ b/test/tls-session-reuse.js @@ -0,0 +1,185 @@ +'use strict' + +const { readFileSync } = require('fs') +const { join } = require('path') +const https = require('https') +const crypto = require('crypto') +const { test, teardown } = require('tap') +const { Client, Pool } = require('..') +const { kSocket } = require('../lib/core/symbols') +const { nodeMajor } = require('../lib/core/util') + +const options = { + key: readFileSync(join(__dirname, 'fixtures', 'key.pem'), 'utf8'), + cert: readFileSync(join(__dirname, 'fixtures', 'cert.pem'), 'utf8') +} +const ca = readFileSync(join(__dirname, 'fixtures', 'ca.pem'), 'utf8') + +test('A client should disable session caching', { + skip: nodeMajor < 11 // tls socket session event has been added in Node 11. Cf. https://nodejs.org/api/tls.html#tls_event_session +}, t => { + const clientSessions = {} + let serverRequests = 0 + + t.test('Prepare request', t => { + t.plan(3) + const server = https.createServer(options, (req, res) => { + if (req.url === '/drop-key') { + server.setTicketKeys(crypto.randomBytes(48)) + } + serverRequests++ + res.end() + }) + + server.listen(0, function () { + const tls = { + ca, + rejectUnauthorized: false, + servername: 'agent1' + } + const client = new Client(`https://localhost:${server.address().port}`, { + pipelining: 0, + tls, + maxCachedSessions: 0 + }) + + t.teardown(() => { + client.close() + server.close() + }) + + const queue = [{ + name: 'first', + method: 'GET', + path: '/' + }, { + name: 'second', + method: 'GET', + path: '/' + }] + + function request () { + const options = queue.shift() + if (options.ciphers) { + // Choose different cipher to use different cache entry + tls.ciphers = options.ciphers + } else { + delete tls.ciphers + } + client.request(options, (err, data) => { + t.error(err) + clientSessions[options.name] = client[kSocket].getSession() + data.body.resume().on('end', () => { + if (queue.length !== 0) { + return request() + } + t.pass() + }) + }) + } + request() + }) + }) + + t.test('Verify cached sessions', t => { + t.plan(2) + t.equal(serverRequests, 2) + t.not( + clientSessions.first.toString('hex'), + clientSessions.second.toString('hex') + ) + }) + + t.end() +}) + +test('A pool should be able to reuse TLS sessions between clients', { + skip: nodeMajor < 11 // tls socket session event has been added in Node 11. Cf. https://nodejs.org/api/tls.html#tls_event_session +}, t => { + let serverRequests = 0 + + const REQ_COUNT = 10 + const ASSERT_PERFORMANCE_GAIN = false + + t.test('Prepare request', t => { + t.plan(2 + 1 + (ASSERT_PERFORMANCE_GAIN ? 1 : 0)) + const server = https.createServer(options, (req, res) => { + serverRequests++ + res.end() + }) + + let numSessions = 0 + const sessions = [] + + server.listen(0, async () => { + const poolWithSessionReuse = new Pool(`https://localhost:${server.address().port}`, { + pipelining: 0, + connections: 100, + maxCachedSessions: 1, + tls: { + ca, + rejectUnauthorized: false, + servername: 'agent1' + } + }) + const poolWithoutSessionReuse = new Pool(`https://localhost:${server.address().port}`, { + pipelining: 0, + connections: 100, + maxCachedSessions: 0, + tls: { + ca, + rejectUnauthorized: false, + servername: 'agent1' + } + }) + + poolWithSessionReuse.on('connect', (url, targets) => { + const y = targets[1][kSocket].getSession() + if (sessions.some(x => x.equals(y))) { + return + } + sessions.push(y) + numSessions++ + }) + + t.teardown(() => { + poolWithSessionReuse.close() + poolWithoutSessionReuse.close() + server.close() + }) + + function request (pool, expectTLSSessionCache) { + return new Promise((resolve, reject) => { + pool.request({ + method: 'GET', + path: '/' + }, (err, data) => { + if (err) return reject(err) + data.body.resume().on('end', resolve) + }) + }) + } + + async function runRequests (pool, numIterations, expectTLSSessionCache) { + const requests = [] + // For the session reuse, we first need one client to connect to receive a valid tls session to reuse + await request(pool, false) + while (numIterations--) { + requests.push(request(pool, expectTLSSessionCache)) + } + return await Promise.all(requests) + } + + await runRequests(poolWithoutSessionReuse, REQ_COUNT, false) + await runRequests(poolWithSessionReuse, REQ_COUNT, true) + + t.equal(numSessions, 2) + t.equal(serverRequests, 2 + REQ_COUNT * 2) + t.pass() + }) + }) + + t.end() +}) + +teardown(() => process.exit()) diff --git a/test/tls.js b/test/tls.js new file mode 100644 index 0000000..fbe07b0 --- /dev/null +++ b/test/tls.js @@ -0,0 +1,188 @@ +'use strict' + +// TODO: Don't depend on external URLs. + +// const { test } = require('tap') +// const { Client } = require('..') +// const { kSocket } = require('../lib/core/symbols') +// const { Readable } = require('stream') +// const { kRunning } = require('../lib/core/symbols') + +// test('tls get 1', (t) => { +// t.plan(4) + +// const client = new Client('https://www.github.com') +// t.teardown(client.close.bind(client)) + +// client.request({ method: 'GET', path: '/' }, (err, data) => { +// t.error(err) +// t.equal(data.statusCode, 301) +// t.equal(client[kSocket].authorized, true) + +// data.body +// .resume() +// .on('end', () => { +// t.pass() +// }) +// }) +// }) + +// test('tls get 2', (t) => { +// t.plan(4) + +// const client = new Client('https://140.82.112.4', { +// tls: { +// servername: 'www.github.com' +// } +// }) +// t.teardown(client.close.bind(client)) + +// client.request({ method: 'GET', path: '/' }, (err, data) => { +// t.error(err) +// t.equal(data.statusCode, 301) +// t.equal(client[kSocket].authorized, true) + +// data.body +// .resume() +// .on('end', () => { +// t.pass() +// }) +// }) +// }) + +// test('tls get 3', (t) => { +// t.plan(8) + +// const client = new Client('https://140.82.112.4') +// t.teardown(client.destroy.bind(client)) + +// let didDisconnect = false +// client.request({ +// method: 'GET', +// path: '/', +// headers: { +// host: 'www.github.com' +// } +// }, (err, data) => { +// t.error(err) +// t.equal(data.statusCode, 301) +// t.equal(client[kSocket].authorized, true) + +// data.body +// .resume() +// .on('end', () => { +// t.pass() +// }) +// client.once('disconnect', () => { +// t.pass() +// didDisconnect = true +// }) +// }) + +// const body = new Readable({ read () {} }) +// body.on('error', (err) => { +// t.ok(err) +// }) +// client.request({ +// method: 'POST', +// path: '/', +// body, +// headers: { +// host: 'www.asd.com' +// } +// }, (err, data) => { +// t.equal(didDisconnect, true) +// t.ok(err) +// }) +// }) + +// test('tls get 4', (t) => { +// t.plan(9) + +// const client = new Client('https://140.82.112.4', { +// tls: { +// servername: 'www.github.com' +// }, +// pipelining: 2 +// }) +// t.teardown(client.close.bind(client)) + +// client.request({ +// method: 'GET', +// path: '/', +// headers: { +// host: '140.82.112.4' +// } +// }, (err, data) => { +// t.error(err) +// t.equal(client[kRunning], 1) +// t.equal(data.statusCode, 301) +// t.equal(client[kSocket].authorized, true) + +// client.request({ +// method: 'GET', +// path: '/', +// headers: { +// host: 'www.github.com' +// } +// }, (err, data) => { +// t.error(err) +// t.equal(data.statusCode, 301) +// t.equal(client[kSocket].authorized, true) + +// data.body +// .resume() +// .on('end', () => { +// t.pass() +// }) +// }) + +// data.body +// .resume() +// .on('end', () => { +// t.pass() +// }) +// }) +// }) + +// test('tls get 5', (t) => { +// t.plan(7) + +// const client = new Client('https://140.82.112.4') +// t.teardown(client.destroy.bind(client)) + +// let didDisconnect = false +// client.request({ +// method: 'GET', +// path: '/', +// headers: { +// host: 'www.github.com' +// } +// }, (err, data) => { +// t.error(err) +// t.equal(data.statusCode, 301) +// t.equal(client[kSocket].authorized, true) + +// data.body +// .resume() +// .on('end', () => { +// t.pass() +// }) +// client.once('disconnect', () => { +// t.pass() +// didDisconnect = true +// }) +// }) + +// client.request({ +// method: 'POST', +// path: '/', +// body: [], +// headers: { +// host: 'www.asd.com' +// } +// }, (err, data) => { +// t.equal(didDisconnect, true) +// t.ok(err) +// }) +// }) diff --git a/test/trailers.js b/test/trailers.js new file mode 100644 index 0000000..ca56de2 --- /dev/null +++ b/test/trailers.js @@ -0,0 +1,57 @@ +'use strict' + +const { test } = require('tap') +const { Client } = require('..') +const { createServer } = require('http') + +test('response trailers missing is OK', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.writeHead(200, { + Trailer: 'content-length' + }) + res.end('response') + }) + t.teardown(server.close.bind(server)) + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body } = await client.request({ + path: '/', + method: 'GET', + body: 'asd' + }) + + t.equal(await body.text(), 'response') + }) +}) + +test('response trailers missing w trailers is OK', (t) => { + t.plan(2) + + const server = createServer((req, res) => { + res.writeHead(200, { + Trailer: 'content-length' + }) + res.addTrailers({ + asd: 'foo' + }) + res.end('response') + }) + t.teardown(server.close.bind(server)) + server.listen(0, async () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.destroy.bind(client)) + + const { body, trailers } = await client.request({ + path: '/', + method: 'GET', + body: 'asd' + }) + + t.equal(await body.text(), 'response') + t.same(trailers, { asd: 'foo' }) + }) +}) diff --git a/test/types/agent.test-d.ts b/test/types/agent.test-d.ts new file mode 100644 index 0000000..5e5275f --- /dev/null +++ b/test/types/agent.test-d.ts @@ -0,0 +1,110 @@ +import { Duplex, Readable, Writable } from 'stream' +import { expectAssignable } from 'tsd' +import { Agent, Dispatcher } from '../..' +import { URL } from 'url' + +expectAssignable(new Agent()) +expectAssignable(new Agent({})) +expectAssignable(new Agent({ maxRedirections: 1 })) +expectAssignable(new Agent({ factory: () => new Dispatcher() })) + +{ + const agent = new Agent() + + // properties + expectAssignable(agent.closed) + expectAssignable(agent.destroyed) + + // request + expectAssignable>(agent.request({ origin: '', path: '', method: 'GET' })) + expectAssignable>(agent.request({ origin: '', path: '', method: 'GET', onInfo: ((info) => {}) })) + expectAssignable>(agent.request({ origin: new URL('http://localhost'), path: '', method: 'GET' })) + expectAssignable(agent.request({ origin: '', path: '', method: 'GET' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + expectAssignable(agent.request({ origin: new URL('http://localhost'), path: '', method: 'GET' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // stream + expectAssignable>(agent.stream({ origin: '', path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Writable() + })) + expectAssignable>(agent.stream({ origin: '', path: '', method: 'GET', onInfo: ((info) => {}) }, data => { + expectAssignable(data) + return new Writable() + })) + expectAssignable>(agent.stream({ origin: new URL('http://localhost'), path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Writable() + })) + expectAssignable(agent.stream( + { origin: '', path: '', method: 'GET' }, + data => { + expectAssignable(data) + return new Writable() + }, + (err, data) => { + expectAssignable(err) + expectAssignable(data) + } + )) + expectAssignable(agent.stream( + { origin: new URL('http://localhost'), path: '', method: 'GET' }, + data => { + expectAssignable(data) + return new Writable() + }, + (err, data) => { + expectAssignable(err) + expectAssignable(data) + } + )) + + // pipeline + expectAssignable(agent.pipeline({ origin: '', path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Readable() + })) + expectAssignable(agent.pipeline({ origin: '', path: '', method: 'GET', onInfo: ((info) => {}) }, data => { + expectAssignable(data) + return new Readable() + })) + expectAssignable(agent.pipeline({ origin: new URL('http://localhost'), path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Readable() + })) + + // upgrade + expectAssignable>(agent.upgrade({ path: '' })) + expectAssignable(agent.upgrade({ path: '' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // connect + expectAssignable>(agent.connect({ path: '' })) + expectAssignable(agent.connect({ path: '' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // dispatch + expectAssignable(agent.dispatch({ origin: '', path: '', method: 'GET' }, {})) + expectAssignable(agent.dispatch({ origin: '', path: '', method: 'GET', maxRedirections: 1 }, {})) + + // close + expectAssignable>(agent.close()) + expectAssignable(agent.close(() => {})) + + // destroy + expectAssignable>(agent.destroy()) + expectAssignable>(agent.destroy(new Error())) + expectAssignable>(agent.destroy(null)) + expectAssignable(agent.destroy(() => {})) + expectAssignable(agent.destroy(new Error(), () => {})) + expectAssignable(agent.destroy(null, () => {})) +} diff --git a/test/types/api.test-d.ts b/test/types/api.test-d.ts new file mode 100644 index 0000000..c64b131 --- /dev/null +++ b/test/types/api.test-d.ts @@ -0,0 +1,28 @@ +import { Duplex, Readable, Writable } from 'stream' +import { expectAssignable } from 'tsd' +import { Dispatcher, request, stream, pipeline, connect, upgrade } from '../..' + +// request +expectAssignable>(request('')) +expectAssignable>(request('', { })) +expectAssignable>(request('', { method: 'GET', reset: false })) + +// stream +expectAssignable>(stream('', { method: 'GET' }, data => { + expectAssignable(data) + return new Writable() +})) + +// pipeline +expectAssignable(pipeline('', { method: 'GET' }, data => { + expectAssignable(data) + return new Readable() +})) + +// connect +expectAssignable>(connect('')) +expectAssignable>(connect('', {})) + +// upgrade +expectAssignable>(upgrade('')) +expectAssignable>(upgrade('', {})) diff --git a/test/types/balanced-pool.test-d.ts b/test/types/balanced-pool.test-d.ts new file mode 100644 index 0000000..d7ccf7b --- /dev/null +++ b/test/types/balanced-pool.test-d.ts @@ -0,0 +1,113 @@ +import { Duplex, Readable, Writable } from 'stream' +import { expectAssignable } from 'tsd' +import { Dispatcher, BalancedPool, Client } from '../..' +import { URL } from 'url' + +expectAssignable(new BalancedPool('')) +expectAssignable(new BalancedPool('', {})) +expectAssignable(new BalancedPool(new URL('http://localhost'), {})) +expectAssignable(new BalancedPool('', { factory: () => new Dispatcher() })) +expectAssignable(new BalancedPool('', { factory: (origin, opts) => new Client(origin, opts) })) +expectAssignable(new BalancedPool('', { connections: 1 })) +expectAssignable(new BalancedPool(['http://localhost:4242', 'http://www.nodejs.org'])) +expectAssignable(new BalancedPool([new URL('http://localhost:4242'),new URL('http://www.nodejs.org')], {})) + +{ + const pool = new BalancedPool('', {}) + + // properties + expectAssignable(pool.closed) + expectAssignable(pool.destroyed) + + // upstreams + expectAssignable(pool.addUpstream('http://www.nodejs.org')) + expectAssignable(pool.removeUpstream('http://www.nodejs.org')) + expectAssignable(pool.addUpstream(new URL('http://www.nodejs.org'))) + expectAssignable(pool.removeUpstream(new URL('http://www.nodejs.org'))) + expectAssignable(pool.upstreams) + + + // request + expectAssignable>(pool.request({ origin: '', path: '', method: 'GET' })) + expectAssignable>(pool.request({ origin: new URL('http://localhost'), path: '', method: 'GET' })) + expectAssignable(pool.request({ origin: '', path: '', method: 'GET' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + expectAssignable(pool.request({ origin: new URL('http://localhost'), path: '', method: 'GET' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // stream + expectAssignable>(pool.stream({ origin: '', path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Writable() + })) + expectAssignable>(pool.stream({ origin: new URL('http://localhost'), path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Writable() + })) + expectAssignable(pool.stream( + { origin: '', path: '', method: 'GET' }, + data => { + expectAssignable(data) + return new Writable() + }, + (err, data) => { + expectAssignable(err) + expectAssignable(data) + } + )) + expectAssignable(pool.stream( + { origin: new URL('http://localhost'), path: '', method: 'GET' }, + data => { + expectAssignable(data) + return new Writable() + }, + (err, data) => { + expectAssignable(err) + expectAssignable(data) + } + )) + + // pipeline + expectAssignable(pool.pipeline({ origin: '', path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Readable() + })) + expectAssignable(pool.pipeline({ origin: new URL('http://localhost'), path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Readable() + })) + + // upgrade + expectAssignable>(pool.upgrade({ path: '' })) + expectAssignable(pool.upgrade({ path: '' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // connect + expectAssignable>(pool.connect({ path: '' })) + expectAssignable(pool.connect({ path: '' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // dispatch + expectAssignable(pool.dispatch({ origin: '', path: '', method: 'GET' }, {})) + expectAssignable(pool.dispatch({ origin: new URL('http://localhost'), path: '', method: 'GET' }, {})) + + // close + expectAssignable>(pool.close()) + expectAssignable(pool.close(() => {})) + + // destroy + expectAssignable>(pool.destroy()) + expectAssignable>(pool.destroy(new Error())) + expectAssignable>(pool.destroy(null)) + expectAssignable(pool.destroy(() => {})) + expectAssignable(pool.destroy(new Error(), () => {})) + expectAssignable(pool.destroy(null, () => {})) +} diff --git a/test/types/cache-storage.test-d.ts b/test/types/cache-storage.test-d.ts new file mode 100644 index 0000000..c21efbd --- /dev/null +++ b/test/types/cache-storage.test-d.ts @@ -0,0 +1,39 @@ +import { expectAssignable } from 'tsd' +import { + caches, + CacheStorage, + Cache, + CacheQueryOptions, + MultiCacheQueryOptions, + RequestInfo, + Request, + Response +} from '../..' + +declare const response: Response +declare const request: Request +declare const options: RequestInfo +declare const cache: Cache + +expectAssignable(caches) +expectAssignable({}) +expectAssignable({ cacheName: 'v1' }) +expectAssignable({ ignoreMethod: false, ignoreSearch: true }) + +expectAssignable({}) +expectAssignable({ ignoreVary: false, ignoreMethod: true, ignoreSearch: true }) + +expectAssignable>(caches.open('v1')) +expectAssignable>(caches.match(options)) +expectAssignable>(caches.match(request)) +expectAssignable>(caches.has('v1')) +expectAssignable>(caches.delete('v1')) +expectAssignable>(caches.keys()) + +expectAssignable>(cache.match(options)) +expectAssignable>(cache.matchAll('v1')) +expectAssignable>(cache.delete('v1')) +expectAssignable>(cache.keys()) +expectAssignable>(cache.add(options)) +expectAssignable>(cache.addAll([options])) +expectAssignable>(cache.put(options, response)) diff --git a/test/types/client.test-d.ts b/test/types/client.test-d.ts new file mode 100644 index 0000000..c416d77 --- /dev/null +++ b/test/types/client.test-d.ts @@ -0,0 +1,185 @@ +import { Duplex, Readable, Writable } from 'stream' +import { expectAssignable } from 'tsd' +import { Client, Dispatcher } from '../..' +import { URL } from 'url' + +expectAssignable(new Client('')) +expectAssignable(new Client('', {})) +expectAssignable(new Client('', { + maxRequestsPerClient: 10 +})) +expectAssignable(new Client('', { + connect: { rejectUnauthorized: false } +})) +expectAssignable(new Client(new URL('http://localhost'), {})) + +/** + * Tests for Client.Options: + */ +{ + expectAssignable(new Client('', { + maxHeaderSize: 16384 + })) + expectAssignable(new Client('', { + headersTimeout: 300e3 + })) + expectAssignable(new Client('', { + connectTimeout: 300e3 + })) + expectAssignable(new Client('', { + bodyTimeout: 300e3 + })) + expectAssignable(new Client('', { + keepAliveTimeout: 4e3 + })) + expectAssignable(new Client('', { + keepAliveMaxTimeout: 600e3 + })) + expectAssignable(new Client('', { + keepAliveTimeoutThreshold: 1e3 + })) + expectAssignable(new Client('', { + socketPath: '/var/run/docker.sock' + })) + expectAssignable(new Client('', { + pipelining: 1 + })) + expectAssignable(new Client('', { + strictContentLength: true + })) + expectAssignable(new Client('', { + maxCachedSessions: 1 + })) + expectAssignable(new Client('', { + maxRedirections: 1 + })) + expectAssignable(new Client('', { + maxRequestsPerClient: 1 + })) + expectAssignable(new Client('', { + localAddress: '127.0.0.1' + })) + expectAssignable(new Client('', { + maxResponseSize: -1 + })) + expectAssignable(new Client('', { + autoSelectFamily: true + })) + expectAssignable(new Client('', { + autoSelectFamilyAttemptTimeout: 300e3 + })) + expectAssignable(new Client('', { + interceptors: { + Client: [(dispatcher) => { + expectAssignable(dispatcher); + return (opts, handlers) => { + expectAssignable(opts); + expectAssignable(handlers); + return dispatcher(opts, handlers) + } + }] + } + })) +} + +{ + const client = new Client('') + + // properties + expectAssignable(client.pipelining) + expectAssignable(client.closed) + expectAssignable(client.destroyed) + + // request + expectAssignable>(client.request({ origin: '', path: '', method: 'GET' })) + expectAssignable>(client.request({ origin: new URL('http://localhost:3000'), path: '', method: 'GET' })) + expectAssignable(client.request({ origin: '', path: '', method: 'GET' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + expectAssignable(client.request({ origin: new URL('http://localhost:3000'), path: '', method: 'GET' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // stream + expectAssignable>(client.stream({ origin: '', path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Writable() + })) + expectAssignable>(client.stream({ origin: new URL('http://localhost'), path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Writable() + })) + expectAssignable(client.stream( + { origin: '', path: '', method: 'GET' }, + data => { + expectAssignable(data) + return new Writable() + }, + (err, data) => { + expectAssignable(err) + expectAssignable(data) + } + )) + expectAssignable(client.stream( + { origin: new URL('http://localhost'), path: '', method: 'GET' }, + data => { + expectAssignable(data) + return new Writable() + }, + (err, data) => { + expectAssignable(err) + expectAssignable(data) + } + )) + + // pipeline + expectAssignable(client.pipeline({ origin: '', path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Readable() + })) + expectAssignable(client.pipeline({ origin: new URL('http://localhost'), path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Readable() + })) + + // upgrade + expectAssignable>(client.upgrade({ path: '' })) + expectAssignable>(client.upgrade({ path: '', headers: [] })) + expectAssignable>(client.upgrade({ path: '', headers: {} })) + expectAssignable>(client.upgrade({ path: '', headers: null })) + expectAssignable(client.upgrade({ path: '' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // connect + expectAssignable>(client.connect({ path: '' })) + expectAssignable>(client.connect({ path: '', headers: [] })) + expectAssignable>(client.connect({ path: '', headers: {} })) + expectAssignable>(client.connect({ path: '', headers: null })) + expectAssignable(client.connect({ path: '' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // dispatch + expectAssignable(client.dispatch({ origin: '', path: '', method: 'GET' }, {})) + expectAssignable(client.dispatch({ origin: '', path: '', method: 'GET', headers: [] }, {})) + expectAssignable(client.dispatch({ origin: '', path: '', method: 'GET', headers: {} }, {})) + expectAssignable(client.dispatch({ origin: '', path: '', method: 'GET', headers: null }, {})) + expectAssignable(client.dispatch({ origin: new URL('http://localhost'), path: '', method: 'GET' }, {})) + + // close + expectAssignable>(client.close()) + expectAssignable(client.close(() => {})) + + // destroy + expectAssignable>(client.destroy()) + expectAssignable>(client.destroy(new Error())) + expectAssignable>(client.destroy(null)) + expectAssignable(client.destroy(() => {})) + expectAssignable(client.destroy(new Error(), () => {})) + expectAssignable(client.destroy(null, () => {})) +} diff --git a/test/types/connector.test-d.ts b/test/types/connector.test-d.ts new file mode 100644 index 0000000..9236569 --- /dev/null +++ b/test/types/connector.test-d.ts @@ -0,0 +1,38 @@ +import {expectAssignable} from 'tsd' +import { Client, buildConnector } from '../..' +import {ConnectionOptions, TLSSocket} from 'tls' +import {Socket} from 'net' +import {IpcNetConnectOpts, NetConnectOpts, TcpNetConnectOpts} from "net"; + +const connector = buildConnector({ rejectUnauthorized: false, allowH2: false }) +expectAssignable(new Client('', { + connect (opts: buildConnector.Options, cb: buildConnector.Callback) { + connector(opts, (...args) => { + if (args[0]) { + return cb(args[0], null) + } + if (args[1] instanceof TLSSocket) { + if (args[1].getPeerCertificate().fingerprint256 !== 'FO:OB:AR') { + args[1].destroy() + return cb(new Error('Fingerprint does not match'), null) + } + } + return cb(null, args[1]) + }) + } +})) + +expectAssignable({ + checkServerIdentity: () => undefined, // Test if ConnectionOptions is assignable + localPort: 1234, // Test if TcpNetConnectOpts is assignable + keepAlive: true, + keepAliveInitialDelay: 12345, +}); + +expectAssignable({ + protocol: "http", + hostname: "example.com", + port: "", + localAddress: "127.0.0.1", + httpSocket: new Socket(), +}); diff --git a/test/types/diagnostics-channel.test-d.ts b/test/types/diagnostics-channel.test-d.ts new file mode 100644 index 0000000..334404c --- /dev/null +++ b/test/types/diagnostics-channel.test-d.ts @@ -0,0 +1,72 @@ +import { Socket } from "net"; +import { expectAssignable } from "tsd"; +import { DiagnosticsChannel, buildConnector } from "../.."; + +const request = { + origin: "", + completed: true, + method: "GET" as const, + path: "", + headers: "", + addHeader: (key: string, value: string) => { + return request; + }, +}; + +const response = { + statusCode: 200, + statusText: "OK", + headers: [Buffer.from(""), Buffer.from("")], +}; + +const connectParams = { + host: "", + hostname: "", + protocol: "", + port: "", + servername: "", +}; + +expectAssignable({ request }); +expectAssignable({ request }); +expectAssignable({ + request, + response, +}); +expectAssignable({ + request, + trailers: [Buffer.from(""), Buffer.from("")], +}); +expectAssignable({ + request, + error: new Error("Error"), +}); +expectAssignable({ + request, + headers: "", + socket: new Socket(), +}); +expectAssignable({ + connectParams, + connector: ( + options: buildConnector.Options, + callback: buildConnector.Callback + ) => new Socket(), +}); +expectAssignable({ + socket: new Socket(), + connectParams, + connector: ( + options: buildConnector.Options, + callback: buildConnector.Callback + ) => new Socket(), +}); +expectAssignable({ + error: new Error("Error"), + socket: new Socket(), + connectParams, + connector: ( + options: buildConnector.Options, + callback: buildConnector.Callback + ) => new Socket(), +}); diff --git a/test/types/dispatcher.events.test-d.ts b/test/types/dispatcher.events.test-d.ts new file mode 100644 index 0000000..71057e7 --- /dev/null +++ b/test/types/dispatcher.events.test-d.ts @@ -0,0 +1,45 @@ +import { Dispatcher } from '../..' +import {expectAssignable} from "tsd"; +import {URL} from "url"; +import Errors from "../../types/errors"; + +interface EventHandler { + connect(origin: URL, targets: readonly Dispatcher[]): void + disconnect(origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError): void + connectionError(origin: URL, targets: readonly Dispatcher[], error: Errors.UndiciError): void + drain(origin: URL): void +} + +{ + const dispatcher = new Dispatcher() + const eventHandler: EventHandler = {} as EventHandler + + expectAssignable(dispatcher.rawListeners('connect')) + expectAssignable(dispatcher.rawListeners('disconnect')) + expectAssignable(dispatcher.rawListeners('connectionError')) + expectAssignable(dispatcher.rawListeners('drain')) + + expectAssignable(dispatcher.listeners('connect')) + expectAssignable(dispatcher.listeners('disconnect')) + expectAssignable(dispatcher.listeners('connectionError')) + expectAssignable(dispatcher.listeners('drain')) + + const eventHandlerMethods: ['on', 'once', 'off', 'addListener', "removeListener", "prependListener", "prependOnceListener"] + = ['on', 'once', 'off', 'addListener', "removeListener", "prependListener", "prependOnceListener"] + + for (const method of eventHandlerMethods) { + expectAssignable(dispatcher[method]('connect', eventHandler["connect"])) + expectAssignable(dispatcher[method]('disconnect', eventHandler["disconnect"])) + expectAssignable(dispatcher[method]('connectionError', eventHandler["connectionError"])) + expectAssignable(dispatcher[method]('drain', eventHandler["drain"])) + } + + const origin = new URL('') + const targets = new Array() + const error = new Errors.UndiciError() + expectAssignable(dispatcher.emit('connect', origin, targets)) + expectAssignable(dispatcher.emit('disconnect', origin, targets, error)) + expectAssignable(dispatcher.emit('connectionError', origin, targets, error)) + expectAssignable(dispatcher.emit('drain', origin)) +} + diff --git a/test/types/dispatcher.test-d.ts b/test/types/dispatcher.test-d.ts new file mode 100644 index 0000000..cd4ebfd --- /dev/null +++ b/test/types/dispatcher.test-d.ts @@ -0,0 +1,123 @@ +import { IncomingHttpHeaders } from 'http' +import { Duplex, Readable, Writable } from 'stream' +import { expectAssignable, expectType } from 'tsd' +import { Dispatcher } from '../..' +import { URL } from 'url' +import { Blob } from 'buffer' + +expectAssignable(new Dispatcher()) + +{ + const dispatcher = new Dispatcher() + + const nodeCoreHeaders = { + authorization: undefined, + ['content-type']: 'application/json' + } satisfies IncomingHttpHeaders; + + // dispatch + expectAssignable(dispatcher.dispatch({ path: '', method: 'GET' }, {})) + expectAssignable(dispatcher.dispatch({ origin: '', path: '', method: 'GET' }, {})) + expectAssignable(dispatcher.dispatch({ origin: '', path: '', method: 'GET', headers: { authorization: undefined } }, {})) + expectAssignable(dispatcher.dispatch({ origin: '', path: '', method: 'GET', headers: [] }, {})) + expectAssignable(dispatcher.dispatch({ origin: '', path: '', method: 'GET', headers: {} }, {})) + expectAssignable(dispatcher.dispatch({ origin: '', path: '', method: 'GET', headers: nodeCoreHeaders }, {})) + expectAssignable(dispatcher.dispatch({ origin: '', path: '', method: 'GET', headers: null, reset: true }, {})) + expectAssignable(dispatcher.dispatch({ origin: new URL('http://localhost'), path: '', method: 'GET' }, {})) + + // connect + expectAssignable>(dispatcher.connect({ path: '', maxRedirections: 0 })) + expectAssignable(dispatcher.connect({ path: '' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // request + expectAssignable>(dispatcher.request({ origin: '', path: '', method: 'GET', maxRedirections: 0 })) + expectAssignable>(dispatcher.request({ origin: '', path: '', method: 'GET', maxRedirections: 0, query: {} })) + expectAssignable>(dispatcher.request({ origin: '', path: '', method: 'GET', maxRedirections: 0, query: { pageNum: 1, id: 'abc' } })) + expectAssignable>(dispatcher.request({ origin: '', path: '', method: 'GET', maxRedirections: 0, throwOnError: true })) + expectAssignable>(dispatcher.request({ origin: new URL('http://localhost'), path: '', method: 'GET' })) + expectAssignable(dispatcher.request({ origin: '', path: '', method: 'GET', reset: true }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + expectAssignable(dispatcher.request({ origin: new URL('http://localhost'), path: '', method: 'GET' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // pipeline + expectAssignable(dispatcher.pipeline({ origin: '', path: '', method: 'GET', maxRedirections: 0 }, data => { + expectAssignable(data) + return new Readable() + })) + expectAssignable(dispatcher.pipeline({ origin: new URL('http://localhost'), path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Readable() + })) + + // stream + expectAssignable>(dispatcher.stream({ origin: '', path: '', method: 'GET', maxRedirections: 0 }, data => { + expectAssignable(data) + return new Writable() + })) + expectAssignable>(dispatcher.stream({ origin: new URL('http://localhost'), path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Writable() + })) + expectAssignable(dispatcher.stream( + { origin: '', path: '', method: 'GET', reset: false }, + data => { + expectAssignable(data) + return new Writable() + }, + (err, data) => { + expectAssignable(err) + expectAssignable(data) + } + )) + expectAssignable(dispatcher.stream( + { origin: new URL('http://localhost'), path: '', method: 'GET' }, + data => { + expectAssignable(data) + return new Writable() + }, + (err, data) => { + expectAssignable(err) + expectAssignable(data) + } + )) + + // upgrade + expectAssignable>(dispatcher.upgrade({ path: '', maxRedirections: 0 })) + expectAssignable(dispatcher.upgrade({ path: '' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // close + expectAssignable>(dispatcher.close()) + expectAssignable(dispatcher.close(() => {})) + + // destroy + expectAssignable>(dispatcher.destroy()) + expectAssignable>(dispatcher.destroy(new Error())) + expectAssignable>(dispatcher.destroy(null)) + expectAssignable(dispatcher.destroy(() => {})) + expectAssignable(dispatcher.destroy(new Error(), () => {})) + expectAssignable(dispatcher.destroy(null, () => {})) +} + +declare const { body }: Dispatcher.ResponseData; + +{ + // body mixin tests + expectType(body.body) + expectType(body.bodyUsed) + expectType>(body.arrayBuffer()) + expectType>(body.blob()) + expectType>(body.formData()) + expectType>(body.text()) + expectType>(body.json()) +} diff --git a/test/types/errors.test-d.ts b/test/types/errors.test-d.ts new file mode 100644 index 0000000..837dbf8 --- /dev/null +++ b/test/types/errors.test-d.ts @@ -0,0 +1,115 @@ +import { expectAssignable } from 'tsd' +import { errors } from '../..' +import Client from '../../types/client' +import { IncomingHttpHeaders } from "../../types/header"; + +expectAssignable(new errors.UndiciError()) +expectAssignable(new errors.UndiciError().name) +expectAssignable(new errors.UndiciError().code) + +expectAssignable(new errors.ConnectTimeoutError()) +expectAssignable(new errors.ConnectTimeoutError()) +expectAssignable<'ConnectTimeoutError'>(new errors.ConnectTimeoutError().name) +expectAssignable<'UND_ERR_CONNECT_TIMEOUT'>(new errors.ConnectTimeoutError().code) + +expectAssignable(new errors.HeadersTimeoutError()) +expectAssignable(new errors.HeadersTimeoutError()) +expectAssignable<'HeadersTimeoutError'>(new errors.HeadersTimeoutError().name) +expectAssignable<'UND_ERR_HEADERS_TIMEOUT'>(new errors.HeadersTimeoutError().code) + +expectAssignable(new errors.HeadersOverflowError()) +expectAssignable(new errors.HeadersOverflowError()) +expectAssignable<'HeadersOverflowError'>(new errors.HeadersOverflowError().name) +expectAssignable<'UND_ERR_HEADERS_OVERFLOW'>(new errors.HeadersOverflowError().code) + +expectAssignable(new errors.BodyTimeoutError()) +expectAssignable(new errors.BodyTimeoutError()) +expectAssignable<'BodyTimeoutError'>(new errors.BodyTimeoutError().name) +expectAssignable<'UND_ERR_BODY_TIMEOUT'>(new errors.BodyTimeoutError().code) + +expectAssignable(new errors.ResponseStatusCodeError()) +expectAssignable(new errors.ResponseStatusCodeError()) +expectAssignable<'ResponseStatusCodeError'>(new errors.ResponseStatusCodeError().name) +expectAssignable<'UND_ERR_RESPONSE_STATUS_CODE'>(new errors.ResponseStatusCodeError().code) +expectAssignable(new errors.ResponseStatusCodeError().status) +expectAssignable(new errors.ResponseStatusCodeError().statusCode) +expectAssignable(new errors.ResponseStatusCodeError().headers) +expectAssignable | string>(new errors.ResponseStatusCodeError().body) + +expectAssignable(new errors.InvalidArgumentError()) +expectAssignable(new errors.InvalidArgumentError()) +expectAssignable<'InvalidArgumentError'>(new errors.InvalidArgumentError().name) +expectAssignable<'UND_ERR_INVALID_ARG'>(new errors.InvalidArgumentError().code) + +expectAssignable(new errors.InvalidReturnValueError()) +expectAssignable(new errors.InvalidReturnValueError()) +expectAssignable<'InvalidReturnValueError'>(new errors.InvalidReturnValueError().name) +expectAssignable<'UND_ERR_INVALID_RETURN_VALUE'>(new errors.InvalidReturnValueError().code) + +expectAssignable(new errors.RequestAbortedError()) +expectAssignable(new errors.RequestAbortedError()) +expectAssignable<'AbortError'>(new errors.RequestAbortedError().name) +expectAssignable<'UND_ERR_ABORTED'>(new errors.RequestAbortedError().code) + +expectAssignable(new errors.InformationalError()) +expectAssignable(new errors.InformationalError()) +expectAssignable<'InformationalError'>(new errors.InformationalError().name) +expectAssignable<'UND_ERR_INFO'>(new errors.InformationalError().code) + +expectAssignable(new errors.RequestContentLengthMismatchError()) +expectAssignable(new errors.RequestContentLengthMismatchError()) +expectAssignable<'RequestContentLengthMismatchError'>(new errors.RequestContentLengthMismatchError().name) +expectAssignable<'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH'>(new errors.RequestContentLengthMismatchError().code) + +expectAssignable(new errors.ResponseContentLengthMismatchError()) +expectAssignable(new errors.ResponseContentLengthMismatchError()) +expectAssignable<'ResponseContentLengthMismatchError'>(new errors.ResponseContentLengthMismatchError().name) +expectAssignable<'UND_ERR_RES_CONTENT_LENGTH_MISMATCH'>(new errors.ResponseContentLengthMismatchError().code) + +expectAssignable(new errors.ClientDestroyedError()) +expectAssignable(new errors.ClientDestroyedError()) +expectAssignable<'ClientDestroyedError'>(new errors.ClientDestroyedError().name) +expectAssignable<'UND_ERR_DESTROYED'>(new errors.ClientDestroyedError().code) + +expectAssignable(new errors.ClientClosedError()) +expectAssignable(new errors.ClientClosedError()) +expectAssignable<'ClientClosedError'>(new errors.ClientClosedError().name) +expectAssignable<'UND_ERR_CLOSED'>(new errors.ClientClosedError().code) + +expectAssignable(new errors.SocketError()) +expectAssignable(new errors.SocketError()) +expectAssignable<'SocketError'>(new errors.SocketError().name) +expectAssignable<'UND_ERR_SOCKET'>(new errors.SocketError().code) +expectAssignable(new errors.SocketError().socket) + +expectAssignable(new errors.NotSupportedError()) +expectAssignable(new errors.NotSupportedError()) +expectAssignable<'NotSupportedError'>(new errors.NotSupportedError().name) +expectAssignable<'UND_ERR_NOT_SUPPORTED'>(new errors.NotSupportedError().code) + +expectAssignable(new errors.BalancedPoolMissingUpstreamError()) +expectAssignable(new errors.BalancedPoolMissingUpstreamError()) +expectAssignable<'MissingUpstreamError'>(new errors.BalancedPoolMissingUpstreamError().name) +expectAssignable<'UND_ERR_BPL_MISSING_UPSTREAM'>(new errors.BalancedPoolMissingUpstreamError().code) + +expectAssignable(new errors.HTTPParserError()) +expectAssignable(new errors.HTTPParserError()) +expectAssignable<'HTTPParserError'>(new errors.HTTPParserError().name) + +expectAssignable(new errors.ResponseExceededMaxSizeError()) +expectAssignable(new errors.ResponseExceededMaxSizeError()) +expectAssignable<'ResponseExceededMaxSizeError'>(new errors.ResponseExceededMaxSizeError().name) +expectAssignable<'UND_ERR_RES_EXCEEDED_MAX_SIZE'>(new errors.ResponseExceededMaxSizeError().code) + +{ + // @ts-ignore + function f (): errors.HeadersTimeoutError | errors.ConnectTimeoutError { return } + + const e = f() + + if (e.code === 'UND_ERR_HEADERS_TIMEOUT') { + expectAssignable(e) + } else if (e.code === 'UND_ERR_CONNECT_TIMEOUT') { + expectAssignable(e) + } +} diff --git a/test/types/fetch.test-d.ts b/test/types/fetch.test-d.ts new file mode 100644 index 0000000..59fb49f --- /dev/null +++ b/test/types/fetch.test-d.ts @@ -0,0 +1,173 @@ +import { URL } from 'url' +import { Blob } from 'buffer' +import { ReadableStream } from 'stream/web' +import { expectType, expectError, expectAssignable, expectNotAssignable } from 'tsd' +import { + Agent, + BodyInit, + fetch, + FormData, + Headers, + HeadersInit, + SpecIterableIterator, + Request, + RequestCache, + RequestCredentials, + RequestDestination, + RequestInit, + RequestMode, + RequestRedirect, + Response, + ResponseInit, + ResponseType, + ReferrerPolicy +} from '../..' + +const requestInit: RequestInit = {} +const responseInit: ResponseInit = { status: 200, statusText: 'OK' } +const requestInit2: RequestInit = { + dispatcher: new Agent() +} +const requestInit3: RequestInit = {} +// Test assignment. See https://github.com/whatwg/fetch/issues/1445 +requestInit3.credentials = 'include' + +declare const request: Request +declare const headers: Headers +declare const response: Response + +expectType(requestInit.method) +expectType(requestInit.keepalive) +expectType(requestInit.headers) +expectType(requestInit.body) +expectType(requestInit.redirect) +expectType(requestInit.integrity) +expectType(requestInit.signal) +expectType(requestInit.credentials) +expectType(requestInit.mode) +expectType(requestInit.referrer); +expectType(requestInit.referrerPolicy) +expectType(requestInit.window) + +expectType(responseInit.status) +expectType(responseInit.statusText) +expectType(responseInit.headers) + +expectType(new Headers()) +expectType(new Headers({})) +expectType(new Headers([])) +expectType(new Headers(headers)) +expectType(new Headers(undefined)) + +expectType(new Request(request)) +expectType(new Request('https://example.com')) +expectType(new Request(new URL('https://example.com'))) +expectType(new Request(request, requestInit)) +expectType(new Request('https://example.com', requestInit)) +expectType(new Request(new URL('https://example.com'), requestInit)) + +expectType>(fetch(request)) +expectType>(fetch('https://example.com')) +expectType>(fetch(new URL('https://example.com'))) +expectType>(fetch(request, requestInit)) +expectType>(fetch('https://example.com', requestInit)) +expectType>(fetch(new URL('https://example.com'), requestInit)) + +expectType(new Response()) +expectType(new Response(null)) +expectType(new Response('string')) +expectType(new Response(new Blob([]))) +expectType(new Response(new FormData())) +expectType(new Response(new Int8Array())) +expectType(new Response(new Uint8Array())) +expectType(new Response(new Uint8ClampedArray())) +expectType(new Response(new Int16Array())) +expectType(new Response(new Uint16Array())) +expectType(new Response(new Int32Array())) +expectType(new Response(new Uint32Array())) +expectType(new Response(new Float32Array())) +expectType(new Response(new Float64Array())) +expectType(new Response(new BigInt64Array())) +expectType(new Response(new BigUint64Array())) +expectType(new Response(new ArrayBuffer(0))) +expectType(new Response(null, responseInit)) +expectType(new Response('string', responseInit)) +expectType(new Response(new Blob([]), responseInit)) +expectType(new Response(new FormData(), responseInit)) +expectType(new Response(new Int8Array(), responseInit)) +expectType(new Response(new Uint8Array(), responseInit)) +expectType(new Response(new Uint8ClampedArray(), responseInit)) +expectType(new Response(new Int16Array(), responseInit)) +expectType(new Response(new Uint16Array(), responseInit)) +expectType(new Response(new Int32Array(), responseInit)) +expectType(new Response(new Uint32Array(), responseInit)) +expectType(new Response(new Float32Array(), responseInit)) +expectType(new Response(new Float64Array(), responseInit)) +expectType(new Response(new BigInt64Array(), responseInit)) +expectType(new Response(new BigUint64Array(), responseInit)) +expectType(new Response(new ArrayBuffer(0), responseInit)) +expectType(Response.error()) +expectType(Response.json({ a: 'b' })) +expectType(Response.json({}, { status: 200 })) +expectType(Response.json({}, { statusText: 'OK' })) +expectType(Response.json({}, { headers: {} })) +expectType(Response.json(null)) +expectType(Response.redirect('https://example.com', 301)) +expectType(Response.redirect('https://example.com', 302)) +expectType(Response.redirect('https://example.com', 303)) +expectType(Response.redirect('https://example.com', 307)) +expectType(Response.redirect('https://example.com', 308)) +expectError(Response.redirect('https://example.com', NaN)) +expectError(Response.json()) +expectError(Response.json(null, 3)) + +expectType(headers.append('key', 'value')) +expectType(headers.delete('key')) +expectType(headers.get('key')) +expectType(headers.has('key')) +expectType(headers.set('key', 'value')) +expectType>(headers.keys()) +expectType>(headers.values()) +expectType>(headers.entries()) + +expectType(request.cache) +expectType(request.credentials) +expectType(request.destination) +expectType(request.headers) +expectType(request.integrity) +expectType(request.method) +expectType(request.mode) +expectType(request.redirect) +expectType(request.referrerPolicy) +expectType(request.url) +expectType(request.keepalive) +expectType(request.signal) +expectType(request.bodyUsed) +expectType>(request.arrayBuffer()) +expectType>(request.blob()) +expectType>(request.formData()) +expectType>(request.json()) +expectType>(request.text()) +expectType(request.clone()) + +expectType(response.headers) +expectType(response.ok) +expectType(response.status) +expectType(response.statusText) +expectType(response.type) +expectType(response.url) +expectType(response.redirected) +expectType(response.body) +expectType(response.bodyUsed) +expectType>(response.arrayBuffer()) +expectType>(response.blob()) +expectType>(response.formData()) +expectType>(response.json()) +expectType>(response.text()) +expectType(response.clone()) + +expectType(new Request('https://example.com', { body: 'Hello, world', duplex: 'half' })) +expectAssignable({ duplex: 'half' }) +expectNotAssignable({ duplex: 'not valid' }) + +expectType(headers.getSetCookie()) diff --git a/test/types/formdata.test-d.ts b/test/types/formdata.test-d.ts new file mode 100644 index 0000000..79058c4 --- /dev/null +++ b/test/types/formdata.test-d.ts @@ -0,0 +1,27 @@ +import { Blob } from 'buffer' +import { Readable } from 'stream' +import { expectAssignable, expectType } from 'tsd' +import { File, FormData, SpecIterableIterator } from '../..' +import Dispatcher from '../../types/dispatcher' + +declare const dispatcherOptions: Dispatcher.DispatchOptions + +declare const blob: Blob +const formData = new FormData() +expectType(formData) + +expectType(formData.append('key', 'value')) +expectType(formData.append('key', blob)) +expectType(formData.set('key', 'value')) +expectType(formData.set('key', blob)) +expectType(formData.get('key')) +expectType(formData.get('key')) +expectType>(formData.getAll('key')) +expectType>(formData.getAll('key')) +expectType(formData.has('key')) +expectType(formData.delete('key')) +expectAssignable>(formData.keys()) +expectAssignable>(formData.values()) +expectAssignable>(formData.entries()) +expectAssignable>(formData[Symbol.iterator]()) +expectAssignable(dispatcherOptions.body) diff --git a/test/types/global-dispatcher.test-d.ts b/test/types/global-dispatcher.test-d.ts new file mode 100644 index 0000000..428b809 --- /dev/null +++ b/test/types/global-dispatcher.test-d.ts @@ -0,0 +1,12 @@ +import { expectAssignable } from 'tsd' +import { setGlobalDispatcher, Dispatcher, getGlobalDispatcher } from '../..' + +{ + expectAssignable(setGlobalDispatcher(new Dispatcher())) + class CustomDispatcher extends Dispatcher {} + expectAssignable(setGlobalDispatcher(new CustomDispatcher())) +} + +{ + expectAssignable(getGlobalDispatcher()) +} diff --git a/test/types/header.test-d.ts b/test/types/header.test-d.ts new file mode 100644 index 0000000..38ac9f6 --- /dev/null +++ b/test/types/header.test-d.ts @@ -0,0 +1,16 @@ +import { IncomingHttpHeaders as CoreIncomingHttpHeaders } from "http"; +import { expectAssignable, expectNotAssignable } from "tsd"; +import { IncomingHttpHeaders } from "../../types/header"; + +const headers = { + authorization: undefined, + ["content-type"]: "application/json", +} satisfies CoreIncomingHttpHeaders; + +expectAssignable(headers); + +// It is why we do not need to add ` | null` to `IncomingHttpHeaders`: +expectNotAssignable({ + authorization: null, + ["content-type"]: "application/json", +}); diff --git a/test/types/index.test-d.ts b/test/types/index.test-d.ts new file mode 100644 index 0000000..3827e61 --- /dev/null +++ b/test/types/index.test-d.ts @@ -0,0 +1,23 @@ +import { expectAssignable } from 'tsd' +import Undici, {Pool, Client, errors, fetch, Interceptable, RedirectHandler, DecoratorHandler, Headers, Response, Request, FormData, File, FileReader} from '../..' +import Dispatcher from "../../types/dispatcher"; + +expectAssignable(new Undici.Pool('', {})) +expectAssignable(new Undici.Client('', {})) +expectAssignable(new Undici.MockAgent().get('')) +expectAssignable(Undici.errors) +expectAssignable(Undici.fetch) +expectAssignable(Undici.Headers) +expectAssignable(Undici.Response) +expectAssignable(Undici.Request) +expectAssignable(Undici.FormData) +expectAssignable(Undici.File) +expectAssignable(Undici.FileReader) + +const client = new Undici.Client('', {}) +const handler: Dispatcher.DispatchHandlers = {} + +expectAssignable(new Undici.RedirectHandler(client, 10, { + path: '/', method: 'GET' +}, handler)) +expectAssignable(new Undici.DecoratorHandler(handler)) diff --git a/test/types/interceptor.test-d.ts b/test/types/interceptor.test-d.ts new file mode 100644 index 0000000..ba242bf --- /dev/null +++ b/test/types/interceptor.test-d.ts @@ -0,0 +1,5 @@ +import {expectAssignable} from "tsd"; +import Undici from "../.."; +import Dispatcher from "../../types/dispatcher"; + +expectAssignable(Undici.createRedirectInterceptor({ maxRedirections: 3 })) diff --git a/test/types/mock-agent.test-d.ts b/test/types/mock-agent.test-d.ts new file mode 100644 index 0000000..5f7f968 --- /dev/null +++ b/test/types/mock-agent.test-d.ts @@ -0,0 +1,75 @@ +import {expectAssignable, expectType} from 'tsd' +import {Agent, Dispatcher, MockAgent, MockClient, MockPool, setGlobalDispatcher} from '../..' +import {MockInterceptor} from '../../types/mock-interceptor' +import MockDispatch = MockInterceptor.MockDispatch; + +expectAssignable(new MockAgent()) +expectAssignable(new MockAgent({})) + +{ + const mockAgent = new MockAgent() + expectAssignable(setGlobalDispatcher(mockAgent)) + + // get + expectAssignable(mockAgent.get('')) + expectAssignable(mockAgent.get(new RegExp(''))) + expectAssignable(mockAgent.get((origin) => { + expectAssignable(origin) + return true + })) + expectAssignable(mockAgent.get('')) + + // close + expectAssignable>(mockAgent.close()) + + // deactivate + expectAssignable(mockAgent.deactivate()) + + // activate + expectAssignable(mockAgent.activate()) + + // enableNetConnect + expectAssignable(mockAgent.enableNetConnect()) + expectAssignable(mockAgent.enableNetConnect('')) + expectAssignable(mockAgent.enableNetConnect(new RegExp(''))) + expectAssignable(mockAgent.enableNetConnect((host) => { + expectAssignable(host) + return true + })) + + // disableNetConnect + expectAssignable(mockAgent.disableNetConnect()) + + // dispatch + expectAssignable(mockAgent.dispatch({origin: '', path: '', method: 'GET'}, {})) + + // intercept + expectAssignable((mockAgent.get('foo')).intercept({path: '', method: 'GET'})) +} + +{ + const mockAgent = new MockAgent({connections: 1}) + expectAssignable(setGlobalDispatcher(mockAgent)) + expectAssignable(mockAgent.get('')) +} + +{ + const agent = new Agent() + const mockAgent = new MockAgent({agent}) + expectAssignable(setGlobalDispatcher(mockAgent)) + expectAssignable(mockAgent.get('')) +} + +{ + interface PendingInterceptor extends MockDispatch { + origin: string; + } + + const agent = new MockAgent({agent: new Agent()}) + expectType<() => PendingInterceptor[]>(agent.pendingInterceptors) + expectType<(options?: { + pendingInterceptorsFormatter?: { + format(pendingInterceptors: readonly PendingInterceptor[]): string; + } + }) => void>(agent.assertNoPendingInterceptors) +} diff --git a/test/types/mock-client.test-d.ts b/test/types/mock-client.test-d.ts new file mode 100644 index 0000000..9e92b8e --- /dev/null +++ b/test/types/mock-client.test-d.ts @@ -0,0 +1,43 @@ +import { expectAssignable } from 'tsd' +import { MockAgent, MockClient } from '../..' +import { MockInterceptor } from '../../types/mock-interceptor' + +{ + const mockClient: MockClient = new MockAgent({ connections: 1 }).get('') + + // intercept + expectAssignable(mockClient.intercept({ path: '', method: 'GET' })) + expectAssignable(mockClient.intercept({ path: '', method: 'GET', body: '', headers: { 'User-Agent': '' } })) + expectAssignable(mockClient.intercept({ path: '', method: 'GET', query: { id: 1 } })) + expectAssignable(mockClient.intercept({ path: new RegExp(''), method: new RegExp(''), body: new RegExp(''), headers: { 'User-Agent': new RegExp('') } })) + expectAssignable(mockClient.intercept({ + path: (path) => { + expectAssignable(path) + return true + }, + method: (method) => { + expectAssignable(method) + return true + }, + body: (body) => { + expectAssignable(body) + return true + }, + headers: { + 'User-Agent': (header) => { + expectAssignable(header) + return true + } + } + })) + + // dispatch + expectAssignable(mockClient.dispatch({ origin: '', path: '', method: 'GET' }, {})) + + // close + expectAssignable>(mockClient.close()) +} + +{ + expectAssignable(new MockClient('', {agent: new MockAgent({ connections: 1})})) +} diff --git a/test/types/mock-errors.test-d.ts b/test/types/mock-errors.test-d.ts new file mode 100644 index 0000000..2cf3e5e --- /dev/null +++ b/test/types/mock-errors.test-d.ts @@ -0,0 +1,19 @@ +import { expectAssignable } from 'tsd' +import { mockErrors, errors } from '../..' + +expectAssignable(new mockErrors.MockNotMatchedError()) +expectAssignable(new mockErrors.MockNotMatchedError()) +expectAssignable(new mockErrors.MockNotMatchedError('kaboom')) +expectAssignable<'MockNotMatchedError'>(new mockErrors.MockNotMatchedError().name) +expectAssignable<'UND_MOCK_ERR_MOCK_NOT_MATCHED'>(new mockErrors.MockNotMatchedError().code) + +{ + // @ts-ignore + function f (): mockErrors.MockNotMatchedError { return } + + const e = f() + + if (e.code === 'UND_MOCK_ERR_MOCK_NOT_MATCHED') { + expectAssignable(e) + } +} diff --git a/test/types/mock-interceptor.test-d.ts b/test/types/mock-interceptor.test-d.ts new file mode 100644 index 0000000..24d29e1 --- /dev/null +++ b/test/types/mock-interceptor.test-d.ts @@ -0,0 +1,80 @@ +import { expectAssignable } from 'tsd' +import { MockAgent, MockPool, BodyInit, Dispatcher } from '../..' +import { MockInterceptor, MockScope } from '../../types/mock-interceptor' + +declare const mockResponseCallbackOptions: MockInterceptor.MockResponseCallbackOptions; + +expectAssignable(mockResponseCallbackOptions.body) + +{ + const mockPool: MockPool = new MockAgent().get('') + const mockInterceptor = mockPool.intercept({ path: '', method: 'GET' }) + const mockInterceptorDefaultMethod = mockPool.intercept({ path: '' }) + + // reply + expectAssignable(mockInterceptor.reply(200)) + expectAssignable(mockInterceptor.reply(200, '')) + expectAssignable(mockInterceptor.reply(200, Buffer)) + expectAssignable(mockInterceptor.reply(200, {})) + expectAssignable(mockInterceptor.reply(200, () => ({}))) + expectAssignable(mockInterceptor.reply(200, {}, {})) + expectAssignable(mockInterceptor.reply(200, () => ({}), {})) + expectAssignable(mockInterceptor.reply(200, {}, { headers: { foo: 'bar' }})) + expectAssignable(mockInterceptor.reply(200, () => ({}), { headers: { foo: 'bar' }})) + expectAssignable(mockInterceptor.reply(200, {}, { trailers: { foo: 'bar' }})) + expectAssignable(mockInterceptor.reply(200, () => ({}), { trailers: { foo: 'bar' }})) + expectAssignable>(mockInterceptor.reply<{ foo: string }>(200, { foo: 'bar' })) + expectAssignable>(mockInterceptor.reply<{ foo: string }>(200, () => ({ foo: 'bar' }))) + expectAssignable(mockInterceptor.reply(() => ({ statusCode: 200, data: { foo: 'bar' }}))) + expectAssignable(mockInterceptor.reply(() => ({ statusCode: 200, data: { foo: 'bar' }, responseOptions: { + headers: { foo: 'bar' } + }}))) + expectAssignable(mockInterceptor.reply((options) => { + expectAssignable(options); + return { statusCode: 200, data: { foo: 'bar'} + }})) + expectAssignable(mockInterceptor.reply(() => ({ statusCode: 200, data: { foo: 'bar' }, responseOptions: { + trailers: { foo: 'bar' } + }}))) + mockInterceptor.reply((options) => { + expectAssignable(options.headers); + return { statusCode: 200, data: { foo: 'bar' } } + }) + + // replyWithError + class CustomError extends Error { + hello(): void {} + } + expectAssignable(mockInterceptor.replyWithError(new Error(''))) + expectAssignable(mockInterceptor.replyWithError(new CustomError(''))) + + // defaultReplyHeaders + expectAssignable(mockInterceptor.defaultReplyHeaders({ foo: 'bar' })) + + // defaultReplyTrailers + expectAssignable(mockInterceptor.defaultReplyTrailers({ foo: 'bar' })) + + // replyContentLength + expectAssignable(mockInterceptor.replyContentLength()) +} + +{ + const mockPool: MockPool = new MockAgent().get('') + const mockScope = mockPool.intercept({ path: '', method: 'GET' }).reply(200) + + // delay + expectAssignable(mockScope.delay(1)) + + // persist + expectAssignable(mockScope.persist()) + + // times + expectAssignable(mockScope.times(2)) +} + +{ + const mockPool: MockPool = new MockAgent().get('') + mockPool.intercept({ path: '', method: 'GET', headers: () => true }) + mockPool.intercept({ path: '', method: 'GET', headers: () => false }) + mockPool.intercept({ path: '', method: 'GET', headers: (headers) => Object.keys(headers).includes('authorization') }) +} diff --git a/test/types/mock-pool.test-d.ts b/test/types/mock-pool.test-d.ts new file mode 100644 index 0000000..b51779b --- /dev/null +++ b/test/types/mock-pool.test-d.ts @@ -0,0 +1,42 @@ +import { expectAssignable } from 'tsd' +import { MockAgent, MockPool } from '../..' +import { MockInterceptor } from '../../types/mock-interceptor' + +{ + const mockPool: MockPool = new MockAgent({ connections: 1 }).get('') + + // intercept + expectAssignable(mockPool.intercept({ path: '', method: 'GET' })) + expectAssignable(mockPool.intercept({ path: '', method: 'GET', body: '', headers: { 'User-Agent': '' } })) + expectAssignable(mockPool.intercept({ path: new RegExp(''), method: new RegExp(''), body: new RegExp(''), headers: { 'User-Agent': new RegExp('') } })) + expectAssignable(mockPool.intercept({ + path: (path) => { + expectAssignable(path) + return true + }, + method: (method) => { + expectAssignable(method) + return true + }, + body: (body) => { + expectAssignable(body) + return true + }, + headers: { + 'User-Agent': (header) => { + expectAssignable(header) + return true + } + } + })) + + // dispatch + expectAssignable(mockPool.dispatch({ origin: '', path: '', method: 'GET' }, {})) + + // close + expectAssignable>(mockPool.close()) +} + +{ + expectAssignable(new MockPool('', {agent: new MockAgent({ connections: 1})})) +} diff --git a/test/types/pool.test-d.ts b/test/types/pool.test-d.ts new file mode 100644 index 0000000..c237468 --- /dev/null +++ b/test/types/pool.test-d.ts @@ -0,0 +1,112 @@ +import { Duplex, Readable, Writable } from 'stream' +import { expectAssignable, expectType } from 'tsd' +import { Dispatcher, Pool, Client } from '../..' +import { URL } from 'url' + +expectAssignable(new Pool('')) +expectAssignable(new Pool('', {})) +expectAssignable(new Pool(new URL('http://localhost'), {})) +expectAssignable(new Pool('', { factory: () => new Dispatcher() })) +expectAssignable(new Pool('', { factory: (origin, opts) => new Client(origin, opts) })) +expectAssignable(new Pool('', { connections: 1 })) + +{ + const pool = new Pool('', {}) + + // properties + expectAssignable(pool.closed) + expectAssignable(pool.destroyed) + expectAssignable(pool.stats) + + // request + expectAssignable>(pool.request({ origin: '', path: '', method: 'GET' })) + expectAssignable>(pool.request({ origin: new URL('http://localhost'), path: '', method: 'GET' })) + expectAssignable(pool.request({ origin: '', path: '', method: 'GET' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + expectAssignable(pool.request({ origin: new URL('http://localhost'), path: '', method: 'GET' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // stream + expectAssignable>(pool.stream({ origin: '', path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Writable() + })) + expectAssignable>(pool.stream({ origin: new URL('http://localhost'), path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Writable() + })) + expectAssignable(pool.stream( + { origin: '', path: '', method: 'GET' }, + data => { + expectAssignable(data) + return new Writable() + }, + (err, data) => { + expectAssignable(err) + expectAssignable(data) + } + )) + expectAssignable(pool.stream( + { origin: new URL('http://localhost'), path: '', method: 'GET' }, + data => { + expectAssignable(data) + return new Writable() + }, + (err, data) => { + expectAssignable(err) + expectAssignable(data) + } + )) + + // pipeline + expectAssignable(pool.pipeline({ origin: '', path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Readable() + })) + expectAssignable(pool.pipeline({ origin: new URL('http://localhost'), path: '', method: 'GET' }, data => { + expectAssignable(data) + return new Readable() + })) + + // upgrade + expectAssignable>(pool.upgrade({ path: '' })) + expectAssignable(pool.upgrade({ path: '' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // connect + expectAssignable>(pool.connect({ path: '' })) + expectAssignable(pool.connect({ path: '' }, (err, data) => { + expectAssignable(err) + expectAssignable(data) + })) + + // dispatch + expectAssignable(pool.dispatch({ origin: '', path: '', method: 'GET' }, {})) + expectAssignable(pool.dispatch({ origin: new URL('http://localhost'), path: '', method: 'GET' }, {})) + + // close + expectAssignable>(pool.close()) + expectAssignable(pool.close(() => {})) + + // destroy + expectAssignable>(pool.destroy()) + expectAssignable>(pool.destroy(new Error())) + expectAssignable>(pool.destroy(null)) + expectAssignable(pool.destroy(() => {})) + expectAssignable(pool.destroy(new Error(), () => {})) + expectAssignable(pool.destroy(null, () => {})) + + // stats + expectType(pool.stats.connected) + expectType(pool.stats.free) + expectType(pool.stats.pending) + expectType(pool.stats.queued) + expectType(pool.stats.running) + expectType(pool.stats.size) +} diff --git a/test/types/proxy-agent.test-d.ts b/test/types/proxy-agent.test-d.ts new file mode 100644 index 0000000..7cc092b --- /dev/null +++ b/test/types/proxy-agent.test-d.ts @@ -0,0 +1,43 @@ +import { expectAssignable } from 'tsd' +import { URL } from 'url' +import { ProxyAgent, setGlobalDispatcher, getGlobalDispatcher, Agent, Pool } from '../..' + +expectAssignable(new ProxyAgent('')) +expectAssignable(new ProxyAgent({ uri: '' })) +expectAssignable( + new ProxyAgent({ + connections: 1, + uri: '', + auth: '', + token: '', + maxRedirections: 1, + factory: (_origin: URL, opts: Object) => new Agent(opts), + requestTls: { + ca: [''], + key: '', + cert: '', + servername: '', + timeout: 1 + }, + proxyTls: { + ca: [''], + key: '', + cert: '', + servername: '', + timeout: 1 + }, + clientFactory: (origin: URL, opts: object) => new Pool(origin, opts) + }) +) + +{ + const proxyAgent = new ProxyAgent('') + expectAssignable(setGlobalDispatcher(proxyAgent)) + expectAssignable(getGlobalDispatcher()) + + // close + expectAssignable>(proxyAgent.close()) + + // dispatch + expectAssignable(proxyAgent.dispatch({ origin: '', path: '', method: 'GET' }, {})) +} diff --git a/test/types/readable.test-d.ts b/test/types/readable.test-d.ts new file mode 100644 index 0000000..d004b70 --- /dev/null +++ b/test/types/readable.test-d.ts @@ -0,0 +1,34 @@ +import { expectAssignable } from 'tsd' +import BodyReadable from '../../types/readable' +import { Blob } from 'buffer' + +expectAssignable(new BodyReadable()) + +{ + const readable = new BodyReadable() + + // dump + expectAssignable>(readable.dump()) + expectAssignable>(readable.dump({ limit: 123 })) + + // text + expectAssignable>(readable.text()) + + // json + expectAssignable>(readable.json()) + + // blob + expectAssignable>(readable.blob()) + + // arrayBuffer + expectAssignable>(readable.arrayBuffer()) + + // formData + expectAssignable>(readable.formData()) + + // bodyUsed + expectAssignable(readable.bodyUsed) + + // body + expectAssignable(readable.body) +} diff --git a/test/unix.js b/test/unix.js new file mode 100644 index 0000000..019f654 --- /dev/null +++ b/test/unix.js @@ -0,0 +1,141 @@ +'use strict' + +const { test } = require('tap') +const { Client, Pool } = require('..') +const http = require('http') +const https = require('https') +const pem = require('https-pem') +const fs = require('fs') + +if (process.platform !== 'win32') { + test('http unix get', (t) => { + t.plan(7) + + const server = http.createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal('localhost', req.headers.host) + res.setHeader('Content-Type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + try { + fs.unlinkSync('/var/tmp/test3.sock') + } catch (err) { + + } + + server.listen('/var/tmp/test3.sock', () => { + const client = new Client({ + hostname: 'localhost', + protocol: 'http:' + }, { + socketPath: '/var/tmp/test3.sock' + }) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.error(err) + const { statusCode, headers, body } = data + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) + }) + + test('http unix get pool', (t) => { + t.plan(7) + + const server = http.createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal('localhost', req.headers.host) + res.setHeader('Content-Type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + try { + fs.unlinkSync('/var/tmp/test3.sock') + } catch (err) { + + } + + server.listen('/var/tmp/test3.sock', () => { + const client = new Pool({ + hostname: 'localhost', + protocol: 'http:' + }, { + socketPath: '/var/tmp/test3.sock' + }) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.error(err) + const { statusCode, headers, body } = data + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) + }) + + test('https get with tls opts', (t) => { + t.plan(6) + + const server = https.createServer(pem, (req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + res.setHeader('content-type', 'text/plain') + res.end('hello') + }) + t.teardown(server.close.bind(server)) + + try { + fs.unlinkSync('/var/tmp/test3.sock') + } catch (err) { + + } + + server.listen('/var/tmp/test8.sock', () => { + const client = new Client({ + hostname: 'localhost', + protocol: 'https:' + }, { + socketPath: '/var/tmp/test8.sock', + tls: { + rejectUnauthorized: false + } + }) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'GET' }, (err, data) => { + t.error(err) + const { statusCode, headers, body } = data + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) + }) +} diff --git a/test/util.js b/test/util.js new file mode 100644 index 0000000..794c68e --- /dev/null +++ b/test/util.js @@ -0,0 +1,123 @@ +'use strict' + +const t = require('tap') +const { test } = t +const { Stream } = require('stream') +const { EventEmitter } = require('events') + +const util = require('../lib/core/util') +const { InvalidArgumentError } = require('../lib/core/errors') + +test('isStream', (t) => { + t.plan(3) + + const stream = new Stream() + t.ok(util.isStream(stream)) + + const buffer = Buffer.alloc(0) + t.notOk(util.isStream(buffer)) + + const ee = new EventEmitter() + t.notOk(util.isStream(ee)) +}) + +test('getServerName', (t) => { + t.plan(6) + t.equal(util.getServerName('1.1.1.1'), '') + t.equal(util.getServerName('1.1.1.1:443'), '') + t.equal(util.getServerName('example.com'), 'example.com') + t.equal(util.getServerName('example.com:80'), 'example.com') + t.equal(util.getServerName('[2606:4700:4700::1111]'), '') + t.equal(util.getServerName('[2606:4700:4700::1111]:443'), '') +}) + +test('validateHandler', (t) => { + t.plan(9) + + t.throws(() => util.validateHandler(null), InvalidArgumentError, 'handler must be an object') + t.throws(() => util.validateHandler({ + onConnect: null + }), InvalidArgumentError, 'invalid onConnect method') + t.throws(() => util.validateHandler({ + onConnect: () => {}, + onError: null + }), InvalidArgumentError, 'invalid onError method') + t.throws(() => util.validateHandler({ + onConnect: () => {}, + onError: () => {}, + onBodySent: null + }), InvalidArgumentError, 'invalid onBodySent method') + t.throws(() => util.validateHandler({ + onConnect: () => {}, + onError: () => {}, + onBodySent: () => {}, + onHeaders: null + }), InvalidArgumentError, 'invalid onHeaders method') + t.throws(() => util.validateHandler({ + onConnect: () => {}, + onError: () => {}, + onBodySent: () => {}, + onHeaders: () => {}, + onData: null + }), InvalidArgumentError, 'invalid onData method') + t.throws(() => util.validateHandler({ + onConnect: () => {}, + onError: () => {}, + onBodySent: () => {}, + onHeaders: () => {}, + onData: () => {}, + onComplete: null + }), InvalidArgumentError, 'invalid onComplete method') + t.throws(() => util.validateHandler({ + onConnect: () => {}, + onError: () => {}, + onBodySent: () => {}, + onUpgrade: 'null' + }, 'CONNECT'), InvalidArgumentError, 'invalid onUpgrade method') + t.throws(() => util.validateHandler({ + onConnect: () => {}, + onError: () => {}, + onBodySent: () => {}, + onUpgrade: 'null' + }, 'CONNECT', () => {}), InvalidArgumentError, 'invalid onUpgrade method') +}) + +test('parseHeaders', (t) => { + t.plan(6) + t.same(util.parseHeaders(['key', 'value']), { key: 'value' }) + t.same(util.parseHeaders([Buffer.from('key'), Buffer.from('value')]), { key: 'value' }) + t.same(util.parseHeaders(['Key', 'Value']), { key: 'Value' }) + t.same(util.parseHeaders(['Key', 'value', 'key', 'Value']), { key: ['value', 'Value'] }) + t.same(util.parseHeaders(['key', ['value1', 'value2', 'value3']]), { key: ['value1', 'value2', 'value3'] }) + t.same(util.parseHeaders([Buffer.from('key'), [Buffer.from('value1'), Buffer.from('value2'), Buffer.from('value3')]]), { key: ['value1', 'value2', 'value3'] }) +}) + +test('parseRawHeaders', (t) => { + t.plan(1) + t.same(util.parseRawHeaders(['key', 'value', Buffer.from('key'), Buffer.from('value')]), ['key', 'value', 'key', 'value']) +}) + +test('buildURL', { skip: util.nodeMajor >= 12 }, (t) => { + const tests = [ + [{ id: BigInt(123456) }, 'id=123456'], + [{ date: new Date() }, 'date='], + [{ obj: { id: 1 } }, 'obj='], + [{ params: ['a', 'b', 'c'] }, 'params=a¶ms=b¶ms=c'], + [{ bool: true }, 'bool=true'], + [{ number: 123456 }, 'number=123456'], + [{ string: 'hello' }, 'string=hello'], + [{ null: null }, 'null='], + [{ void: undefined }, 'void='], + [{ fn: function () {} }, 'fn='], + [{}, ''] + ] + + const base = 'https://www.google.com' + + for (const [input, output] of tests) { + const expected = `${base}${output ? `?${output}` : output}` + t.equal(util.buildURL(base, input), expected) + } + + t.end() +}) diff --git a/test/utils/async-iterators.js b/test/utils/async-iterators.js new file mode 100644 index 0000000..da7e0a8 --- /dev/null +++ b/test/utils/async-iterators.js @@ -0,0 +1,25 @@ +'use strict' + +async function * wrapWithAsyncIterable (asyncIterable, indefinite = false) { + for await (const chunk of asyncIterable) { + yield chunk + } + if (indefinite) { + await new Promise(() => {}) + } +} + +const STREAM = 'stream' +const ASYNC_ITERATOR = 'async-iterator' +function maybeWrapStream (stream, type) { + if (type === STREAM) { + return stream + } + if (type === ASYNC_ITERATOR) { + return wrapWithAsyncIterable(stream) + } + + throw new Error(`bad input ${type} should be ${STREAM} or ${ASYNC_ITERATOR}`) +} + +module.exports = { wrapWithAsyncIterable, maybeWrapStream, consts: { STREAM, ASYNC_ITERATOR } } diff --git a/test/utils/esm-wrapper.mjs b/test/utils/esm-wrapper.mjs new file mode 100644 index 0000000..51f8572 --- /dev/null +++ b/test/utils/esm-wrapper.mjs @@ -0,0 +1,102 @@ +import { createServer } from 'http' +import tap from 'tap' +import { + Agent, + Client, + errors, + pipeline, + Pool, + request, + connect, + upgrade, + setGlobalDispatcher, + getGlobalDispatcher, + stream +} from '../../index.js' + +const { test } = tap + +test('imported Client works with basic GET', (t) => { + t.plan(10) + + const server = createServer((req, res) => { + t.equal('/', req.url) + t.equal('GET', req.method) + t.equal(`localhost:${server.address().port}`, req.headers.host) + t.equal(undefined, req.headers.foo) + t.equal('bar', req.headers.bar) + t.equal(undefined, req.headers['content-length']) + res.setHeader('Content-Type', 'text/plain') + res.end('hello') + }) + + t.teardown(server.close.bind(server)) + + const reqHeaders = { + foo: undefined, + bar: 'bar' + } + + server.listen(0, () => { + const client = new Client(`http://localhost:${server.address().port}`) + t.teardown(client.close.bind(client)) + + client.request({ + path: '/', + method: 'GET', + headers: reqHeaders + }, (err, data) => { + t.error(err) + const { statusCode, headers, body } = data + t.equal(statusCode, 200) + t.equal(headers['content-type'], 'text/plain') + const bufs = [] + body.on('data', (buf) => { + bufs.push(buf) + }) + body.on('end', () => { + t.equal('hello', Buffer.concat(bufs).toString('utf8')) + }) + }) + }) +}) + +test('imported errors work with request args validation', (t) => { + t.plan(2) + + const client = new Client('http://localhost:5000') + + client.request(null, (err) => { + t.type(err, errors.InvalidArgumentError) + }) + + try { + client.request(null, 'asd') + } catch (err) { + t.type(err, errors.InvalidArgumentError) + } +}) + +test('imported errors work with request args validation promise', (t) => { + t.plan(1) + + const client = new Client('http://localhost:5000') + + client.request(null).catch((err) => { + t.type(err, errors.InvalidArgumentError) + }) +}) + +test('named exports', (t) => { + t.equal(typeof Client, 'function') + t.equal(typeof Pool, 'function') + t.equal(typeof Agent, 'function') + t.equal(typeof request, 'function') + t.equal(typeof stream, 'function') + t.equal(typeof pipeline, 'function') + t.equal(typeof connect, 'function') + t.equal(typeof upgrade, 'function') + t.equal(typeof setGlobalDispatcher, 'function') + t.equal(typeof getGlobalDispatcher, 'function') + t.end() +}) diff --git a/test/utils/formdata.js b/test/utils/formdata.js new file mode 100644 index 0000000..edd8854 --- /dev/null +++ b/test/utils/formdata.js @@ -0,0 +1,49 @@ +const Busboy = require('@fastify/busboy') + +function parseFormDataString ( + body, + contentType +) { + const cache = { + fileMap: new Map(), + fields: [] + } + + const bb = new Busboy({ + headers: { + 'content-type': contentType + } + }) + + return new Promise((resolve, reject) => { + bb.on('file', (name, file, filename, encoding, mimeType) => { + cache.fileMap.set(name, { data: [], info: { filename, encoding, mimeType } }) + + file.on('data', (data) => { + const old = cache.fileMap.get(name) + + cache.fileMap.set(name, { + data: [...old.data, data], + info: old.info + }) + }).on('end', () => { + const old = cache.fileMap.get(name) + + cache.fileMap.set(name, { + data: Buffer.concat(old.data), + info: old.info + }) + }) + }) + + bb.on('field', (key, value) => cache.fields.push({ key, value })) + bb.on('finish', () => resolve(cache)) + bb.on('error', (e) => reject(e)) + + bb.end(body) + }) +} + +module.exports = { + parseFormDataString +} diff --git a/test/utils/redirecting-servers.js b/test/utils/redirecting-servers.js new file mode 100644 index 0000000..ad8aa58 --- /dev/null +++ b/test/utils/redirecting-servers.js @@ -0,0 +1,265 @@ +'use strict' + +const { createServer } = require('http') + +const isNode20 = process.version.startsWith('v20.') + +function close (server) { + return function () { + return new Promise(resolve => { + if (isNode20) { + server.closeAllConnections() + } + server.close(resolve) + }) + } +} + +function startServer (t, handler) { + return new Promise(resolve => { + const server = createServer(handler) + + server.listen(0, () => { + resolve(`localhost:${server.address().port}`) + }) + + t.teardown(close(server)) + }) +} + +async function startRedirectingServer (t) { + const server = await startServer(t, (req, res) => { + // Parse the path and normalize arguments + let [code, redirections, query] = req.url + .slice(1) + .split(/[/?]/) + + if (req.url.indexOf('?') !== -1 && !query) { + query = redirections + redirections = 0 + } + + code = parseInt(code, 10) + redirections = parseInt(redirections, 10) + + if (isNaN(code) || code < 0) { + code = 302 + } else if (code < 300) { + res.statusCode = code + redirections = 5 + } + + if (isNaN(redirections) || redirections < 0) { + redirections = 0 + } + + // On 303, the method must be GET or HEAD after the first redirect + if (code === 303 && redirections > 0 && req.method !== 'GET' && req.method !== 'HEAD') { + res.statusCode = 400 + res.setHeader('Connection', 'close') + res.end('Did not switch to GET') + return + } + + // End the chain at some point + if (redirections === 5) { + res.setHeader('Connection', 'close') + res.write( + `${req.method} /${redirections}${query ? ` ${query}` : ''} :: ${Object.entries(req.headers) + .map(([k, v]) => `${k}@${v}`) + .join(' ')}` + ) + + if (parseInt(req.headers['content-length']) > 0) { + res.write(' :: ') + req.pipe(res) + } else { + res.end('') + } + + return + } + + // Redirect by default + res.statusCode = code + res.setHeader('Connection', 'close') + res.setHeader('Location', `http://${server}/${code}/${++redirections}${query ? `?${query}` : ''}`) + res.end('') + }) + + return server +} + +async function startRedirectingWithBodyServer (t) { + const server = await startServer(t, (req, res) => { + if (req.url === '/') { + res.statusCode = 301 + res.setHeader('Connection', 'close') + res.setHeader('Location', `http://${server}/end`) + res.end('REDIRECT') + return + } + + res.setHeader('Connection', 'close') + res.end('FINAL') + }) + + return server +} + +function startRedirectingWithoutLocationServer (t) { + return startServer(t, (req, res) => { + // Parse the path and normalize arguments + let [code] = req.url + .slice(1) + .split('/') + .map(r => parseInt(r, 10)) + + if (isNaN(code) || code < 0) { + code = 302 + } + + res.statusCode = code + res.setHeader('Connection', 'close') + res.end('') + }) +} + +async function startRedirectingChainServers (t) { + const server1 = await startServer(t, (req, res) => { + if (req.url === '/') { + res.statusCode = 301 + res.setHeader('Connection', 'close') + res.setHeader('Location', `http://${server2}/`) + res.end('') + return + } + + res.setHeader('Connection', 'close') + res.end(req.method) + }) + + const server2 = await startServer(t, (req, res) => { + res.statusCode = 301 + res.setHeader('Connection', 'close') + + if (req.url === '/') { + res.setHeader('Location', `http://${server3}/`) + } else { + res.setHeader('Location', `http://${server3}/end`) + } + + res.end('') + }) + + const server3 = await startServer(t, (req, res) => { + res.statusCode = 301 + res.setHeader('Connection', 'close') + + if (req.url === '/') { + res.setHeader('Location', `http://${server2}/end`) + } else { + res.setHeader('Location', `http://${server1}/end`) + } + + res.end('') + }) + + return [server1, server2, server3] +} + +async function startRedirectingWithAuthorization (t, authorization) { + const server1 = await startServer(t, (req, res) => { + if (req.headers.authorization !== authorization) { + res.statusCode = 403 + res.setHeader('Connection', 'close') + res.end('') + return + } + + res.statusCode = 301 + res.setHeader('Connection', 'close') + + res.setHeader('Location', `http://${server2}`) + res.end('') + }) + + const server2 = await startServer(t, (req, res) => { + res.end(req.headers.authorization || '') + }) + + return [server1, server2] +} + +async function startRedirectingWithCookie (t, cookie) { + const server1 = await startServer(t, (req, res) => { + if (req.headers.cookie !== cookie) { + res.statusCode = 403 + res.setHeader('Connection', 'close') + res.end('') + return + } + + res.statusCode = 301 + res.setHeader('Connection', 'close') + + res.setHeader('Location', `http://${server2}`) + res.end('') + }) + + const server2 = await startServer(t, (req, res) => { + res.end(req.headers.cookie || '') + }) + + return [server1, server2] +} + +async function startRedirectingWithRelativePath (t) { + const server = await startServer(t, (req, res) => { + res.setHeader('Connection', 'close') + + if (req.url === '/') { + res.statusCode = 301 + res.setHeader('Location', '/absolute/a') + res.end('') + } else if (req.url === '/absolute/a') { + res.statusCode = 301 + res.setHeader('Location', 'b') + res.end('') + } else { + res.statusCode = 200 + res.end(req.url) + } + }) + + return server +} + +async function startRedirectingWithQueryParams (t) { + const server = await startServer(t, (req, res) => { + if (req.url === '/?param1=first') { + res.statusCode = 301 + res.setHeader('Connection', 'close') + res.setHeader('Location', `http://${server}/?param2=second`) + res.end('REDIRECT') + return + } + + res.setHeader('Connection', 'close') + res.end('') + }) + + return server +} + +module.exports = { + startServer, + startRedirectingServer, + startRedirectingWithBodyServer, + startRedirectingWithoutLocationServer, + startRedirectingChainServers, + startRedirectingWithAuthorization, + startRedirectingWithCookie, + startRedirectingWithRelativePath, + startRedirectingWithQueryParams +} diff --git a/test/utils/stream.js b/test/utils/stream.js new file mode 100644 index 0000000..b78ff5c --- /dev/null +++ b/test/utils/stream.js @@ -0,0 +1,48 @@ +'use strict' + +const { Readable, Writable } = require('stream') + +let ReadableStream + +function createReadable (data) { + return new Readable({ + read () { + this.push(Buffer.from(data)) + this.push(null) + } + }) +} + +function createWritable (target) { + return new Writable({ + write (chunk, _, callback) { + target.push(chunk.toString()) + callback() + }, + final (callback) { + callback() + } + }) +} + +class Source { + constructor (data) { + this.data = data + } + + async start (controller) { + this.controller = controller + } + + async pull (controller) { + controller.enqueue(this.data) + controller.close() + } +} + +function createReadableStream (data) { + ReadableStream = require('stream/web').ReadableStream + return new ReadableStream(new Source(data)) +} + +module.exports = { createReadableStream, createReadable, createWritable } diff --git a/test/validations.js b/test/validations.js new file mode 100644 index 0000000..d1b3409 --- /dev/null +++ b/test/validations.js @@ -0,0 +1,63 @@ +'use strict' + +const t = require('tap') +const { test } = t +const { createServer } = require('http') +const { Client, errors } = require('..') + +const server = createServer((req, res) => { + res.setHeader('content-type', 'text/plain') + res.end('hello') + t.fail('server should never be called') +}) +t.teardown(server.close.bind(server)) + +server.listen(0, () => { + const url = `http://localhost:${server.address().port}` + + test('path', (t) => { + t.plan(4) + + const client = new Client(url) + t.teardown(client.close.bind(client)) + + client.request({ path: null, method: 'GET' }, (err, res) => { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'path must be a string') + }) + + client.request({ path: 'aaa', method: 'GET' }, (err, res) => { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'path must be an absolute URL or start with a slash') + }) + }) + + test('method', (t) => { + t.plan(2) + + const client = new Client(url) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: null }, (err, res) => { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'method must be a string') + }) + }) + + test('body', (t) => { + t.plan(4) + + const client = new Client(url) + t.teardown(client.close.bind(client)) + + client.request({ path: '/', method: 'POST', body: 42 }, (err, res) => { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable') + }) + + client.request({ path: '/', method: 'POST', body: { hello: 'world' } }, (err, res) => { + t.type(err, errors.InvalidArgumentError) + t.equal(err.message, 'body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable') + }) + }) +}) diff --git a/test/webidl/converters.js b/test/webidl/converters.js new file mode 100644 index 0000000..95bea88 --- /dev/null +++ b/test/webidl/converters.js @@ -0,0 +1,202 @@ +'use strict' + +const { test } = require('tap') +const { webidl } = require('../../lib/fetch/webidl') + +test('sequence', (t) => { + const converter = webidl.sequenceConverter( + webidl.converters.DOMString + ) + + t.same(converter([1, 2, 3]), ['1', '2', '3']) + + t.throws(() => { + converter(3) + }, TypeError, 'disallows non-objects') + + t.throws(() => { + converter(null) + }, TypeError) + + t.throws(() => { + converter(undefined) + }, TypeError) + + t.throws(() => { + converter({}) + }, TypeError, 'no Symbol.iterator') + + t.throws(() => { + converter({ + [Symbol.iterator]: 42 + }) + }, TypeError, 'invalid Symbol.iterator') + + t.throws(() => { + converter(webidl.converters.sequence({ + [Symbol.iterator] () { + return { + next: 'never!' + } + } + })) + }, TypeError, 'invalid generator') + + t.end() +}) + +test('webidl.dictionaryConverter', (t) => { + t.test('arguments', (t) => { + const converter = webidl.dictionaryConverter([]) + + t.throws(() => { + converter(true) + }, TypeError) + + for (const value of [{}, undefined, null]) { + t.doesNotThrow(() => { + converter(value) + }) + } + + t.end() + }) + + t.test('required key', (t) => { + const converter = webidl.dictionaryConverter([ + { + converter: () => true, + key: 'Key', + required: true + } + ]) + + t.throws(() => { + converter({ wrongKey: 'key' }) + }, TypeError) + + t.doesNotThrow(() => { + converter({ Key: 'this key was required!' }) + }) + + t.end() + }) + + t.end() +}) + +test('ArrayBuffer', (t) => { + t.throws(() => { + webidl.converters.ArrayBuffer(true) + }, TypeError) + + t.throws(() => { + webidl.converters.ArrayBuffer({}) + }, TypeError) + + t.throws(() => { + const sab = new SharedArrayBuffer(1024) + webidl.converters.ArrayBuffer(sab, { allowShared: false }) + }, TypeError) + + t.doesNotThrow(() => { + const sab = new SharedArrayBuffer(1024) + webidl.converters.ArrayBuffer(sab) + }) + + t.doesNotThrow(() => { + const ab = new ArrayBuffer(8) + webidl.converters.ArrayBuffer(ab) + }) + + t.end() +}) + +test('TypedArray', (t) => { + t.throws(() => { + webidl.converters.TypedArray(3) + }, TypeError) + + t.throws(() => { + webidl.converters.TypedArray({}) + }, TypeError) + + t.throws(() => { + const uint8 = new Uint8Array([1, 2, 3]) + Object.defineProperty(uint8, 'buffer', { + get () { + return new SharedArrayBuffer(8) + } + }) + + webidl.converters.TypedArray(uint8, Uint8Array, { + allowShared: false + }) + }, TypeError) + + t.end() +}) + +test('DataView', (t) => { + t.throws(() => { + webidl.converters.DataView(3) + }, TypeError) + + t.throws(() => { + webidl.converters.DataView({}) + }, TypeError) + + t.throws(() => { + const buffer = new ArrayBuffer(16) + const view = new DataView(buffer, 0) + + Object.defineProperty(view, 'buffer', { + get () { + return new SharedArrayBuffer(8) + } + }) + + webidl.converters.DataView(view, { + allowShared: false + }) + }) + + const buffer = new ArrayBuffer(16) + const view = new DataView(buffer, 0) + + t.equal(webidl.converters.DataView(view), view) + + t.end() +}) + +test('BufferSource', (t) => { + t.doesNotThrow(() => { + const buffer = new ArrayBuffer(16) + const view = new DataView(buffer, 0) + + webidl.converters.BufferSource(view) + }) + + t.throws(() => { + webidl.converters.BufferSource(3) + }, TypeError) + + t.end() +}) + +test('ByteString', (t) => { + t.doesNotThrow(() => { + webidl.converters.ByteString('') + }) + + // https://github.com/nodejs/undici/issues/1590 + t.throws(() => { + const char = String.fromCharCode(256) + webidl.converters.ByteString(`invalid${char}char`) + }, { + message: 'Cannot convert argument to a ByteString because the character at ' + + 'index 7 has a value of 256 which is greater than 255.' + }) + + t.end() +}) diff --git a/test/webidl/helpers.js b/test/webidl/helpers.js new file mode 100644 index 0000000..f44d501 --- /dev/null +++ b/test/webidl/helpers.js @@ -0,0 +1,75 @@ +'use strict' + +const { test } = require('tap') +const { webidl } = require('../../lib/fetch/webidl') + +test('webidl.interfaceConverter', (t) => { + class A {} + class B {} + + const converter = webidl.interfaceConverter(A) + + t.throws(() => { + converter(new B()) + }, TypeError) + + t.doesNotThrow(() => { + converter(new A()) + }) + + t.end() +}) + +test('webidl.dictionaryConverter', (t) => { + t.test('extraneous keys are provided', (t) => { + const converter = webidl.dictionaryConverter([ + { + key: 'key', + converter: webidl.converters.USVString, + defaultValue: 420, + required: true + } + ]) + + t.same( + converter({ + a: 'b', + key: 'string', + c: 'd', + get value () { + return 6 + } + }), + { key: 'string' } + ) + + t.end() + }) + + t.test('defaultValue with key = null', (t) => { + const converter = webidl.dictionaryConverter([ + { + key: 'key', + converter: webidl.converters['unsigned short'], + defaultValue: 200 + } + ]) + + t.same(converter({ key: null }), { key: 0 }) + t.end() + }) + + t.test('no defaultValue and optional', (t) => { + const converter = webidl.dictionaryConverter([ + { + key: 'key', + converter: webidl.converters.ByteString + } + ]) + + t.same(converter({ a: 'b', c: 'd' }), {}) + t.end() + }) + + t.end() +}) diff --git a/test/webidl/util.js b/test/webidl/util.js new file mode 100644 index 0000000..c451590 --- /dev/null +++ b/test/webidl/util.js @@ -0,0 +1,106 @@ +'use strict' + +const { test } = require('tap') +const { webidl } = require('../../lib/fetch/webidl') + +test('Type(V)', (t) => { + const Type = webidl.util.Type + + t.equal(Type(undefined), 'Undefined') + t.equal(Type(null), 'Null') + t.equal(Type(true), 'Boolean') + t.equal(Type('string'), 'String') + t.equal(Type(Symbol('symbol')), 'Symbol') + t.equal(Type(1.23), 'Number') + t.equal(Type(1n), 'BigInt') + t.equal(Type({ a: 'b' }), 'Object') + + t.end() +}) + +test('ConvertToInt(V)', (t) => { + const ConvertToInt = webidl.util.ConvertToInt + + t.equal(ConvertToInt(63, 64, 'signed'), 63, 'odd int') + t.equal(ConvertToInt(64.49, 64, 'signed'), 64) + t.equal(ConvertToInt(64.51, 64, 'signed'), 64) + + const max = 2 ** 53 + t.equal(ConvertToInt(max + 1, 64, 'signed'), max, 'signed pos') + t.equal(ConvertToInt(-max - 1, 64, 'signed'), -max, 'signed neg') + + t.equal(ConvertToInt(max + 1, 64, 'unsigned'), max + 1, 'unsigned pos') + t.equal(ConvertToInt(-max - 1, 64, 'unsigned'), -max - 1, 'unsigned neg') + + for (const signedness of ['signed', 'unsigned']) { + t.equal(ConvertToInt(Infinity, 64, signedness), 0) + t.equal(ConvertToInt(-Infinity, 64, signedness), 0) + t.equal(ConvertToInt(NaN, 64, signedness), 0) + } + + for (const signedness of ['signed', 'unsigned']) { + t.throws(() => { + ConvertToInt(NaN, 64, signedness, { + enforceRange: true + }) + }, TypeError) + + t.throws(() => { + ConvertToInt(Infinity, 64, signedness, { + enforceRange: true + }) + }, TypeError) + + t.throws(() => { + ConvertToInt(-Infinity, 64, signedness, { + enforceRange: true + }) + }, TypeError) + + t.throws(() => { + ConvertToInt(2 ** 53 + 1, 32, 'signed', { + enforceRange: true + }) + }, TypeError) + + t.throws(() => { + ConvertToInt(-(2 ** 53 + 1), 32, 'unsigned', { + enforceRange: true + }) + }, TypeError) + + t.equal( + ConvertToInt(65.5, 64, signedness, { + enforceRange: true + }), + 65 + ) + } + + for (const signedness of ['signed', 'unsigned']) { + t.equal( + ConvertToInt(63.49, 64, signedness, { + clamp: true + }), + 64 + ) + + t.equal( + ConvertToInt(63.51, 64, signedness, { + clamp: true + }), + 64 + ) + + t.equal( + ConvertToInt(-0, 64, signedness, { + clamp: true + }), + 0 + ) + } + + t.equal(ConvertToInt(111, 2, 'signed'), -1) + + t.end() +}) diff --git a/test/websocket/close.js b/test/websocket/close.js new file mode 100644 index 0000000..4d314a4 --- /dev/null +++ b/test/websocket/close.js @@ -0,0 +1,130 @@ +'use strict' + +const { test } = require('tap') +const { WebSocketServer } = require('ws') +const { WebSocket } = require('../..') + +test('Close', (t) => { + t.plan(6) + + t.test('Close with code', (t) => { + t.plan(1) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.on('close', (code) => { + t.equal(code, 1000) + }) + }) + + t.teardown(server.close.bind(server)) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + ws.addEventListener('open', () => ws.close(1000)) + }) + + t.test('Close with code and reason', (t) => { + t.plan(2) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.on('close', (code, reason) => { + t.equal(code, 1000) + t.same(reason, Buffer.from('Goodbye')) + }) + }) + + t.teardown(server.close.bind(server)) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + ws.addEventListener('open', () => ws.close(1000, 'Goodbye')) + }) + + t.test('Close with invalid code', (t) => { + t.plan(2) + + const server = new WebSocketServer({ port: 0 }) + + t.teardown(server.close.bind(server)) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + ws.addEventListener('open', () => { + t.throws( + () => ws.close(2999), + { + name: 'InvalidAccessError', + constructor: DOMException + } + ) + + t.throws( + () => ws.close(5000), + { + name: 'InvalidAccessError', + constructor: DOMException + } + ) + + ws.close() + }) + }) + + t.test('Close with invalid reason', (t) => { + t.plan(1) + + const server = new WebSocketServer({ port: 0 }) + + t.teardown(server.close.bind(server)) + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('open', () => { + t.throws( + () => ws.close(1000, 'a'.repeat(124)), + { + name: 'SyntaxError', + constructor: DOMException + } + ) + + ws.close(1000) + }) + }) + + t.test('Close with no code or reason', (t) => { + t.plan(2) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.on('close', (code, reason) => { + t.equal(code, 1005) + t.same(reason, Buffer.alloc(0)) + }) + }) + + t.teardown(server.close.bind(server)) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + ws.addEventListener('open', () => ws.close()) + }) + + t.test('Close with a 3000 status code', (t) => { + t.plan(2) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.on('close', (code, reason) => { + t.equal(code, 3000) + t.same(reason, Buffer.alloc(0)) + }) + }) + + t.teardown(server.close.bind(server)) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + ws.addEventListener('open', () => ws.close(3000)) + }) +}) diff --git a/test/websocket/constructor.js b/test/websocket/constructor.js new file mode 100644 index 0000000..dd87dea --- /dev/null +++ b/test/websocket/constructor.js @@ -0,0 +1,48 @@ +'use strict' + +const { test } = require('tap') +const { WebSocket } = require('../..') + +test('Constructor', (t) => { + t.throws( + () => new WebSocket('abc'), + { + name: 'SyntaxError', + constructor: DOMException + } + ) + + t.throws( + () => new WebSocket('wss://echo.websocket.events/#a'), + { + name: 'SyntaxError', + constructor: DOMException + } + ) + + t.throws( + () => new WebSocket('wss://echo.websocket.events', ''), + { + name: 'SyntaxError', + constructor: DOMException + } + ) + + t.throws( + () => new WebSocket('wss://echo.websocket.events', ['chat', 'chat']), + { + name: 'SyntaxError', + constructor: DOMException + } + ) + + t.throws( + () => new WebSocket('wss://echo.websocket.events', ['<>@,;:\\"/[]?={}\t']), + { + name: 'SyntaxError', + constructor: DOMException + } + ) + + t.end() +}) diff --git a/test/websocket/custom-headers.js b/test/websocket/custom-headers.js new file mode 100644 index 0000000..01f1830 --- /dev/null +++ b/test/websocket/custom-headers.js @@ -0,0 +1,30 @@ +'use strict' + +const { test } = require('tap') +const assert = require('assert') +const { Agent, WebSocket } = require('../..') + +test('Setting custom headers', (t) => { + t.plan(1) + + const headers = { + 'x-khafra-hello': 'hi', + Authorization: 'Bearer base64orsomethingitreallydoesntmatter' + } + + class TestAgent extends Agent { + dispatch (options) { + t.match(options.headers, headers) + + return false + } + } + + const ws = new WebSocket('wss://echo.websocket.events', { + headers, + dispatcher: new TestAgent() + }) + + // We don't want to make a request, just ensure the headers are set. + ws.onclose = ws.onerror = ws.onmessage = assert.fail +}) diff --git a/test/websocket/diagnostics-channel.js b/test/websocket/diagnostics-channel.js new file mode 100644 index 0000000..c3bf05a --- /dev/null +++ b/test/websocket/diagnostics-channel.js @@ -0,0 +1,71 @@ +'use strict' + +const t = require('tap') +const dc = require('diagnostics_channel') +const { WebSocketServer } = require('ws') +const { WebSocket } = require('../..') + +t.test('diagnostics channel', { jobs: 1 }, (t) => { + t.plan(2) + + t.test('undici:websocket:open', (t) => { + t.plan(3) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.close(1000, 'goodbye') + }) + + const listener = ({ extensions, protocol }) => { + t.equal(extensions, null) + t.equal(protocol, 'chat') + } + + t.teardown(() => { + dc.channel('undici:websocket:open').unsubscribe(listener) + return server.close() + }) + + const { port } = server.address() + + dc.channel('undici:websocket:open').subscribe(listener) + + const ws = new WebSocket(`ws://localhost:${port}`, 'chat') + + ws.addEventListener('open', () => { + t.pass('Emitted open') + }) + }) + + t.test('undici:websocket:close', (t) => { + t.plan(4) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.close(1000, 'goodbye') + }) + + const listener = ({ websocket, code, reason }) => { + t.type(websocket, WebSocket) + t.equal(code, 1000) + t.equal(reason, 'goodbye') + } + + t.teardown(() => { + dc.channel('undici:websocket:close').unsubscribe(listener) + return server.close() + }) + + const { port } = server.address() + + dc.channel('undici:websocket:close').subscribe(listener) + + const ws = new WebSocket(`ws://localhost:${port}`, 'chat') + + ws.addEventListener('close', () => { + t.pass('Emitted open') + }) + }) +}) diff --git a/test/websocket/events.js b/test/websocket/events.js new file mode 100644 index 0000000..e5b565c --- /dev/null +++ b/test/websocket/events.js @@ -0,0 +1,204 @@ +'use strict' + +const { test } = require('tap') +const { WebSocketServer } = require('ws') +const { MessageEvent, CloseEvent, ErrorEvent } = require('../../lib/websocket/events') +const { WebSocket } = require('../..') + +test('MessageEvent', (t) => { + t.throws(() => new MessageEvent(), TypeError, 'no arguments') + t.throws(() => new MessageEvent('').initMessageEvent(), TypeError) + + const noInitEvent = new MessageEvent('message') + + t.equal(noInitEvent.origin, '') + t.equal(noInitEvent.data, null) + t.equal(noInitEvent.lastEventId, '') + t.equal(noInitEvent.source, null) + t.ok(Array.isArray(noInitEvent.ports)) + t.ok(Object.isFrozen(noInitEvent.ports)) + t.type(new MessageEvent('').initMessageEvent('message'), MessageEvent) + + t.end() +}) + +test('CloseEvent', (t) => { + t.throws(() => new CloseEvent(), TypeError) + + const noInitEvent = new CloseEvent('close') + + t.equal(noInitEvent.wasClean, false) + t.equal(noInitEvent.code, 0) + t.equal(noInitEvent.reason, '') + + t.end() +}) + +test('ErrorEvent', (t) => { + t.throws(() => new ErrorEvent(), TypeError) + + const noInitEvent = new ErrorEvent('error') + + t.equal(noInitEvent.message, '') + t.equal(noInitEvent.filename, '') + t.equal(noInitEvent.lineno, 0) + t.equal(noInitEvent.colno, 0) + t.equal(noInitEvent.error, undefined) + + t.end() +}) + +test('Event handlers', (t) => { + t.plan(4) + + const server = new WebSocketServer({ port: 0 }) + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + function listen () {} + + t.teardown(server.close.bind(server)) + t.teardown(() => ws.close()) + + t.test('onopen', (t) => { + t.plan(3) + + t.equal(ws.onopen, null) + ws.onopen = 3 + t.equal(ws.onopen, null) + ws.onopen = listen + t.equal(ws.onopen, listen) + }) + + t.test('onerror', (t) => { + t.plan(3) + + t.equal(ws.onerror, null) + ws.onerror = 3 + t.equal(ws.onerror, null) + ws.onerror = listen + t.equal(ws.onerror, listen) + }) + + t.test('onclose', (t) => { + t.plan(3) + + t.equal(ws.onclose, null) + ws.onclose = 3 + t.equal(ws.onclose, null) + ws.onclose = listen + t.equal(ws.onclose, listen) + }) + + t.test('onmessage', (t) => { + t.plan(3) + + t.equal(ws.onmessage, null) + ws.onmessage = 3 + t.equal(ws.onmessage, null) + ws.onmessage = listen + t.equal(ws.onmessage, listen) + }) +}) + +test('CloseEvent WPTs ported', (t) => { + t.test('initCloseEvent', (t) => { + // Taken from websockets/interfaces/CloseEvent/historical.html + t.notOk('initCloseEvent' in CloseEvent.prototype) + t.notOk('initCloseEvent' in new CloseEvent('close')) + + t.end() + }) + + t.test('CloseEvent constructor', (t) => { + // Taken from websockets/interfaces/CloseEvent/constructor.html + + { + const event = new CloseEvent('foo') + + t.ok(event instanceof CloseEvent, 'should be a CloseEvent') + t.equal(event.type, 'foo') + t.notOk(event.bubbles, 'bubbles') + t.notOk(event.cancelable, 'cancelable') + t.notOk(event.wasClean, 'wasClean') + t.equal(event.code, 0) + t.equal(event.reason, '') + } + + { + const event = new CloseEvent('foo', { + bubbles: true, + cancelable: true, + wasClean: true, + code: 7, + reason: 'x' + }) + t.ok(event instanceof CloseEvent, 'should be a CloseEvent') + t.equal(event.type, 'foo') + t.ok(event.bubbles, 'bubbles') + t.ok(event.cancelable, 'cancelable') + t.ok(event.wasClean, 'wasClean') + t.equal(event.code, 7) + t.equal(event.reason, 'x') + } + + t.end() + }) + + t.end() +}) + +test('ErrorEvent WPTs ported', (t) => { + t.test('Synthetic ErrorEvent', (t) => { + // Taken from html/webappapis/scripting/events/event-handler-processing-algorithm-error/document-synthetic-errorevent.html + + { + const e = new ErrorEvent('error') + t.equal(e.message, '') + t.equal(e.filename, '') + t.equal(e.lineno, 0) + t.equal(e.colno, 0) + t.equal(e.error, undefined) + } + + { + const e = new ErrorEvent('error', { error: null }) + t.equal(e.error, null) + } + + { + const e = new ErrorEvent('error', { error: undefined }) + t.equal(e.error, undefined) + } + + { + const e = new ErrorEvent('error', { error: 'foo' }) + t.equal(e.error, 'foo') + } + + t.end() + }) + + t.test('webidl', (t) => { + // Taken from webidl/ecmascript-binding/no-regexp-special-casing.any.js + + const regExp = new RegExp() + regExp.message = 'some message' + + const errorEvent = new ErrorEvent('type', regExp) + + t.equal(errorEvent.message, 'some message') + + t.end() + }) + + t.test('initErrorEvent', (t) => { + // Taken from workers/Worker_dispatchEvent_ErrorEvent.htm + + const e = new ErrorEvent('error') + t.notOk('initErrorEvent' in e, 'should not be supported') + + t.end() + }) + + t.end() +}) diff --git a/test/websocket/fragments.js b/test/websocket/fragments.js new file mode 100644 index 0000000..d51db4b --- /dev/null +++ b/test/websocket/fragments.js @@ -0,0 +1,40 @@ +'use strict' + +const { test } = require('tap') +const { WebSocketServer } = require('ws') +const { WebSocket } = require('../..') +const diagnosticsChannel = require('diagnostics_channel') + +test('Fragmented frame with a ping frame in the middle of it', (t) => { + t.plan(2) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + const socket = ws._socket + + socket.write(Buffer.from([0x01, 0x03, 0x48, 0x65, 0x6c])) // Text frame "Hel" + socket.write(Buffer.from([0x89, 0x05, 0x48, 0x65, 0x6c, 0x6c, 0x6f])) // ping "Hello" + socket.write(Buffer.from([0x80, 0x02, 0x6c, 0x6f])) // Text frame "lo" + }) + + t.teardown(() => { + for (const client of server.clients) { + client.close() + } + + server.close() + }) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('message', ({ data }) => { + t.same(data, 'Hello') + + ws.close() + }) + + diagnosticsChannel.channel('undici:websocket:ping').subscribe( + ({ payload }) => t.same(payload, Buffer.from('Hello')) + ) +}) diff --git a/test/websocket/frame.js b/test/websocket/frame.js new file mode 100644 index 0000000..b4b73b7 --- /dev/null +++ b/test/websocket/frame.js @@ -0,0 +1,24 @@ +'use strict' + +const { test } = require('tap') +const { WebsocketFrameSend } = require('../../lib/websocket/frame') +const { opcodes } = require('../../lib/websocket/constants') + +test('Writing 16-bit frame length value at correct offset when buffer has a non-zero byteOffset', (t) => { + /* + When writing 16-bit frame lengths, a `DataView` was being used without setting a `byteOffset` into the buffer: + i.e. `new DataView(buffer.buffer)` instead of `new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength)`. + Small `Buffers` returned by `allocUnsafe` are usually returned from the buffer pool, and thus have a non-zero `byteOffset`. + Invalid frames were therefore being returned in that case. + */ + t.plan(3) + + const payloadLength = 126 // 126 bytes is the smallest payload to trigger a 16-bit length field + const smallBuffer = Buffer.allocUnsafe(1) // make it very likely that the next buffer returned by allocUnsafe DOESN'T have a zero byteOffset + const payload = Buffer.allocUnsafe(payloadLength).fill(0) + const frame = new WebsocketFrameSend(payload).createFrame(opcodes.BINARY) + + t.equal(frame[2], payloadLength >>> 8) + t.equal(frame[3], payloadLength & 0xff) + t.equal(smallBuffer.length, 1) // ensure smallBuffer can't be garbage-collected too soon +}) diff --git a/test/websocket/opening-handshake.js b/test/websocket/opening-handshake.js new file mode 100644 index 0000000..b9a7989 --- /dev/null +++ b/test/websocket/opening-handshake.js @@ -0,0 +1,215 @@ +'use strict' + +const { test } = require('tap') +const { createServer } = require('http') +const { WebSocketServer } = require('ws') +const { WebSocket } = require('../..') + +test('WebSocket connecting to server that isn\'t a Websocket server', (t) => { + t.plan(5) + + const server = createServer((req, res) => { + t.equal(req.headers.connection, 'upgrade') + t.equal(req.headers.upgrade, 'websocket') + t.ok(req.headers['sec-websocket-key']) + t.equal(req.headers['sec-websocket-version'], '13') + + res.end() + server.unref() + }).listen(0, () => { + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + // Server isn't a websocket server + ws.onmessage = ws.onopen = t.fail + + ws.addEventListener('error', t.pass) + }) + + t.teardown(server.close.bind(server)) +}) + +test('Open event is emitted', (t) => { + t.plan(1) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.close(1000) + }) + + t.teardown(server.close.bind(server)) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.onmessage = ws.onerror = t.fail + ws.addEventListener('open', t.pass) +}) + +test('Multiple protocols are joined by a comma', (t) => { + t.plan(1) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws, req) => { + t.equal(req.headers['sec-websocket-protocol'], 'chat, echo') + + ws.close(1000) + server.close() + }) + + t.teardown(server.close.bind(server)) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`, ['chat', 'echo']) + + ws.addEventListener('open', () => ws.close()) +}) + +test('Server doesn\'t send Sec-WebSocket-Protocol header when protocols are used', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.statusCode = 101 + + req.socket.destroy() + }).listen(0, () => { + const ws = new WebSocket(`ws://localhost:${server.address().port}`, 'chat') + + ws.onopen = t.fail + + ws.addEventListener('error', ({ error }) => { + t.ok(error) + }) + }) + + t.teardown(server.close.bind(server)) +}) + +test('Server sends invalid Upgrade header', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.setHeader('Upgrade', 'NotWebSocket') + res.statusCode = 101 + + req.socket.destroy() + }).listen(0, () => { + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.onopen = t.fail + + ws.addEventListener('error', ({ error }) => { + t.ok(error) + }) + }) + + t.teardown(server.close.bind(server)) +}) + +test('Server sends invalid Connection header', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.setHeader('Upgrade', 'websocket') + res.setHeader('Connection', 'downgrade') + res.statusCode = 101 + + req.socket.destroy() + }).listen(0, () => { + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.onopen = t.fail + + ws.addEventListener('error', ({ error }) => { + t.ok(error) + }) + }) + + t.teardown(server.close.bind(server)) +}) + +test('Server sends invalid Sec-WebSocket-Accept header', (t) => { + t.plan(1) + + const server = createServer((req, res) => { + res.setHeader('Upgrade', 'websocket') + res.setHeader('Connection', 'upgrade') + res.setHeader('Sec-WebSocket-Accept', 'abc') + res.statusCode = 101 + + req.socket.destroy() + }).listen(0, () => { + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.onopen = t.fail + + ws.addEventListener('error', ({ error }) => { + t.ok(error) + }) + }) + + t.teardown(server.close.bind(server)) +}) + +test('Server sends invalid Sec-WebSocket-Extensions header', (t) => { + const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' + const { createHash } = require('crypto') + + t.plan(2) + + const server = createServer((req, res) => { + const key = req.headers['sec-websocket-key'] + t.ok(key) + + const accept = createHash('sha1').update(key + uid).digest('base64') + + res.setHeader('Upgrade', 'websocket') + res.setHeader('Connection', 'upgrade') + res.setHeader('Sec-WebSocket-Accept', accept) + res.setHeader('Sec-WebSocket-Extensions', 'InvalidExtension') + res.statusCode = 101 + + res.end() + }).listen(0, () => { + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.onopen = t.fail + + ws.addEventListener('error', ({ error }) => { + t.ok(error) + }) + }) + + t.teardown(server.close.bind(server)) +}) + +test('Server sends invalid Sec-WebSocket-Extensions header', (t) => { + const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' + const { createHash } = require('crypto') + + t.plan(2) + + const server = createServer((req, res) => { + const key = req.headers['sec-websocket-key'] + t.ok(key) + + const accept = createHash('sha1').update(key + uid).digest('base64') + + res.setHeader('Upgrade', 'websocket') + res.setHeader('Connection', 'upgrade') + res.setHeader('Sec-WebSocket-Accept', accept) + res.setHeader('Sec-WebSocket-Protocol', 'echo') // <-- + res.statusCode = 101 + + res.end() + }).listen(0, () => { + const ws = new WebSocket(`ws://localhost:${server.address().port}`, 'chat') + + ws.onopen = t.fail + + ws.addEventListener('error', ({ error }) => { + t.ok(error) + }) + }) + + t.teardown(server.close.bind(server)) +}) diff --git a/test/websocket/ping-pong.js b/test/websocket/ping-pong.js new file mode 100644 index 0000000..b7c4694 --- /dev/null +++ b/test/websocket/ping-pong.js @@ -0,0 +1,46 @@ +'use strict' + +const { test } = require('tap') +const { WebSocketServer } = require('ws') +const diagnosticsChannel = require('diagnostics_channel') +const { WebSocket } = require('../..') + +test('Receives ping and parses body', (t) => { + t.plan(1) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.ping('Hello, world') + }) + + t.teardown(server.close.bind(server)) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + ws.onerror = ws.onmessage = t.fail + + diagnosticsChannel.channel('undici:websocket:ping').subscribe(({ payload }) => { + t.same(payload, Buffer.from('Hello, world')) + ws.close() + }) +}) + +test('Receives pong and parses body', (t) => { + t.plan(1) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.pong('Pong') + }) + + t.teardown(server.close.bind(server)) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + ws.onerror = ws.onmessage = t.fail + + diagnosticsChannel.channel('undici:websocket:pong').subscribe(({ payload }) => { + t.same(payload, Buffer.from('Pong')) + ws.close() + }) +}) diff --git a/test/websocket/receive.js b/test/websocket/receive.js new file mode 100644 index 0000000..a669022 --- /dev/null +++ b/test/websocket/receive.js @@ -0,0 +1,60 @@ +'use strict' + +const { test } = require('tap') +const { WebSocketServer } = require('ws') +const { WebSocket } = require('../..') + +test('Receiving a frame with a payload length > 2^31-1 bytes', (t) => { + t.plan(1) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + const socket = ws._socket + + socket.write(Buffer.from([0x81, 0x7F, 0xCA, 0xE5, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00])) + }) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + t.teardown(() => { + ws.close() + server.close() + }) + + ws.onmessage = t.fail + + ws.addEventListener('error', (event) => { + t.type(event.error, Error) // error event is emitted + }) +}) + +test('Receiving an ArrayBuffer', (t) => { + t.plan(3) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.on('message', (data, isBinary) => { + ws.send(data, { binary: true }) + + ws.close(1000) + }) + }) + + t.teardown(server.close.bind(server)) + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('open', () => { + ws.binaryType = 'what' + t.equal(ws.binaryType, 'blob') + + ws.binaryType = 'arraybuffer' // <-- + ws.send('Hello') + }) + + ws.addEventListener('message', ({ data }) => { + t.type(data, ArrayBuffer) + t.same(Buffer.from(data), Buffer.from('Hello')) + }) +}) diff --git a/test/websocket/send.js b/test/websocket/send.js new file mode 100644 index 0000000..ac295fd --- /dev/null +++ b/test/websocket/send.js @@ -0,0 +1,216 @@ +'use strict' + +const { test } = require('tap') +const { WebSocketServer } = require('ws') +const { Blob } = require('buffer') +const { WebSocket } = require('../..') + +// the following three tests exercise different code paths because of the three +// different ways a payload length may be specified in a WebSocket frame +// (https://datatracker.ietf.org/doc/html/rfc6455#section-5.2) + +test('Sending >= 2^16 bytes', (t) => { + t.plan(3) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.on('message', (m, isBinary) => { + ws.send(m, { binary: isBinary }) + }) + }) + + const payload = Buffer.allocUnsafe(2 ** 16).fill('Hello') + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('open', () => { + ws.send(payload) + }) + + ws.addEventListener('message', async ({ data }) => { + t.type(data, Blob) + t.equal(data.size, payload.length) + t.same(Buffer.from(await data.arrayBuffer()), payload) + + ws.close() + server.close() + }) +}) + +test('Sending >= 126, < 2^16 bytes', (t) => { + t.plan(3) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.on('message', (m, isBinary) => { + ws.send(m, { binary: isBinary }) + }) + }) + + const payload = Buffer.allocUnsafe(126).fill('Hello') + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('open', () => { + ws.send(payload) + }) + + ws.addEventListener('message', async ({ data }) => { + t.type(data, Blob) + t.equal(data.size, payload.length) + t.same(Buffer.from(await data.arrayBuffer()), payload) + + ws.close() + server.close() + }) +}) + +test('Sending < 126 bytes', (t) => { + t.plan(3) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.on('message', (m, isBinary) => { + ws.send(m, { binary: isBinary }) + }) + }) + + const payload = Buffer.allocUnsafe(125).fill('Hello') + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('open', () => { + ws.send(payload) + }) + + ws.addEventListener('message', async ({ data }) => { + t.type(data, Blob) + t.equal(data.size, payload.length) + t.same(Buffer.from(await data.arrayBuffer()), payload) + + ws.close() + server.close() + }) +}) + +test('Sending data after close', (t) => { + t.plan(2) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + t.pass() + + ws.on('message', t.fail) + }) + + t.teardown(server.close.bind(server)) + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('open', () => { + ws.close() + ws.send('Some message') + + t.pass() + }) + + ws.addEventListener('error', t.fail) +}) + +test('Sending data before connected', (t) => { + t.plan(2) + + const server = new WebSocketServer({ port: 0 }) + + t.teardown(server.close.bind(server)) + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + t.throws( + () => ws.send('Not sent'), + { + name: 'InvalidStateError', + constructor: DOMException + } + ) + + t.equal(ws.readyState, WebSocket.CONNECTING) +}) + +test('Sending data to a server', (t) => { + t.plan(3) + + t.test('Send with string', (t) => { + t.plan(2) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.on('message', (data, isBinary) => { + t.notOk(isBinary, 'Received text frame') + t.same(data, Buffer.from('message')) + + ws.close(1000) + }) + }) + + t.teardown(server.close.bind(server)) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('open', () => { + ws.send('message') + }) + }) + + t.test('Send with ArrayBuffer', (t) => { + t.plan(2) + + const message = new TextEncoder().encode('message') + const ab = new ArrayBuffer(7) + new Uint8Array(ab).set(message) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.on('message', (data, isBinary) => { + t.ok(isBinary) + t.same(new Uint8Array(data), message) + + ws.close(1000) + }) + }) + + t.teardown(server.close.bind(server)) + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('open', () => { + ws.send(ab) + }) + }) + + t.test('Send with Blob', (t) => { + t.plan(2) + + const blob = new Blob(['hello']) + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.on('message', (data, isBinary) => { + t.ok(isBinary) + t.same(data, Buffer.from('hello')) + + ws.close(1000) + }) + }) + + t.teardown(server.close.bind(server)) + const ws = new WebSocket(`ws://localhost:${server.address().port}`) + + ws.addEventListener('open', () => { + ws.send(blob) + }) + }) +}) diff --git a/test/websocket/websocketinit.js b/test/websocket/websocketinit.js new file mode 100644 index 0000000..4dda3b4 --- /dev/null +++ b/test/websocket/websocketinit.js @@ -0,0 +1,45 @@ +'use strict' + +const { test } = require('tap') +const { WebSocketServer } = require('ws') +const { WebSocket, Dispatcher, Agent } = require('../..') + +test('WebSocketInit', (t) => { + t.plan(2) + + class WsDispatcher extends Dispatcher { + constructor () { + super() + this.agent = new Agent() + } + + dispatch () { + t.pass() + return this.agent.dispatch(...arguments) + } + } + + t.test('WebSocketInit as 2nd param', (t) => { + t.plan(1) + + const server = new WebSocketServer({ port: 0 }) + + server.on('connection', (ws) => { + ws.send(Buffer.from('hello, world')) + }) + + t.teardown(server.close.bind(server)) + + const ws = new WebSocket(`ws://localhost:${server.address().port}`, { + dispatcher: new WsDispatcher() + }) + + ws.onerror = t.fail + + ws.addEventListener('message', async (event) => { + t.equal(await event.data.text(), 'hello, world') + server.close() + ws.close() + }) + }) +}) diff --git a/test/wpt/runner/runner.mjs b/test/wpt/runner/runner.mjs new file mode 100644 index 0000000..5bec326 --- /dev/null +++ b/test/wpt/runner/runner.mjs @@ -0,0 +1,356 @@ +import { EventEmitter, once } from 'node:events' +import { isAbsolute, join, resolve } from 'node:path' +import { existsSync, readdirSync, readFileSync, statSync, writeFileSync } from 'node:fs' +import { fileURLToPath } from 'node:url' +import { Worker } from 'node:worker_threads' +import { colors, handlePipes, normalizeName, parseMeta, resolveStatusPath } from './util.mjs' + +const basePath = fileURLToPath(join(import.meta.url, '../..')) +const testPath = join(basePath, 'tests') +const statusPath = join(basePath, 'status') + +// https://github.com/web-platform-tests/wpt/blob/b24eedd/resources/testharness.js#L3705 +function sanitizeUnpairedSurrogates (str) { + return str.replace( + /([\ud800-\udbff]+)(?![\udc00-\udfff])|(^|[^\ud800-\udbff])([\udc00-\udfff]+)/g, + function (_, low, prefix, high) { + let output = prefix || '' // Prefix may be undefined + const string = low || high // Only one of these alternates can match + for (let i = 0; i < string.length; i++) { + output += codeUnitStr(string[i]) + } + return output + }) +} + +function codeUnitStr (char) { + return 'U+' + char.charCodeAt(0).toString(16) +} + +export class WPTRunner extends EventEmitter { + /** @type {string} */ + #folderName + + /** @type {string} */ + #folderPath + + /** @type {string[]} */ + #files = [] + + /** @type {string[]} */ + #initScripts = [] + + /** @type {string} */ + #url + + /** @type {import('../../status/fetch.status.json')} */ + #status + + /** Tests that have expectedly failed mapped by file name */ + #statusOutput = {} + + #uncaughtExceptions = [] + + /** @type {boolean} */ + #appendReport + + /** @type {string} */ + #reportPath + + #stats = { + completed: 0, + failed: 0, + success: 0, + expectedFailures: 0, + skipped: 0 + } + + constructor (folder, url, { appendReport = false, reportPath } = {}) { + super() + + this.#folderName = folder + this.#folderPath = join(testPath, folder) + this.#files.push( + ...WPTRunner.walk( + this.#folderPath, + (file) => file.endsWith('.any.js') + ) + ) + + if (appendReport) { + if (!reportPath) { + throw new TypeError('reportPath must be provided when appendReport is true') + } + if (!existsSync(reportPath)) { + throw new TypeError('reportPath is invalid') + } + } + + this.#appendReport = appendReport + this.#reportPath = reportPath + + this.#status = JSON.parse(readFileSync(join(statusPath, `${folder}.status.json`))) + this.#url = url + + if (this.#files.length === 0) { + queueMicrotask(() => { + this.emit('completion') + }) + } + + this.once('completion', () => { + for (const { error, test } of this.#uncaughtExceptions) { + console.log(colors(`Uncaught exception in "${test}":`, 'red')) + console.log(colors(`${error.stack}`, 'red')) + console.log('='.repeat(96)) + } + }) + } + + static walk (dir, fn) { + const ini = new Set(readdirSync(dir)) + const files = new Set() + + while (ini.size !== 0) { + for (const d of ini) { + const path = resolve(dir, d) + ini.delete(d) // remove from set + const stats = statSync(path) + + if (stats.isDirectory()) { + for (const f of readdirSync(path)) { + ini.add(resolve(path, f)) + } + } else if (stats.isFile() && fn(d)) { + files.add(path) + } + } + } + + return [...files].sort() + } + + async run () { + const workerPath = fileURLToPath(join(import.meta.url, '../worker.mjs')) + /** @type {Set} */ + const activeWorkers = new Set() + let finishedFiles = 1 + let total = this.#files.length + + const files = this.#files.map((test) => { + const code = test.includes('.sub.') + ? handlePipes(readFileSync(test, 'utf-8'), this.#url) + : readFileSync(test, 'utf-8') + const meta = this.resolveMeta(code, test) + + if (meta.variant.length) { + total += meta.variant.length - 1 + } + + return [test, code, meta] + }) + + console.log('='.repeat(96)) + + for (const [test, code, meta] of files) { + console.log(`Started ${test}`) + + const status = resolveStatusPath(test, this.#status) + + if (status.file.skip || status.topLevel.skip) { + this.#stats.skipped += 1 + + console.log(colors(`[${finishedFiles}/${total}] SKIPPED - ${test}`, 'yellow')) + console.log('='.repeat(96)) + + finishedFiles++ + continue + } + + const start = performance.now() + + for (const variant of meta.variant.length ? meta.variant : ['']) { + const url = new URL(this.#url) + if (variant) { + url.search = variant + } + const worker = new Worker(workerPath, { + workerData: { + // Code to load before the test harness and tests. + initScripts: this.#initScripts, + // The test file. + test: code, + // Parsed META tag information + meta, + url: url.href, + path: test + } + }) + + let result, report + if (this.#appendReport) { + report = JSON.parse(readFileSync(this.#reportPath)) + + const fileUrl = new URL(`/${this.#folderName}${test.slice(this.#folderPath.length)}`, 'http://wpt') + fileUrl.pathname = fileUrl.pathname.replace(/\.js$/, '.html') + fileUrl.search = variant + + result = { + test: fileUrl.href.slice(fileUrl.origin.length), + subtests: [], + status: 'OK' + } + report.results.push(result) + } + + activeWorkers.add(worker) + // These values come directly from the web-platform-tests + const timeout = meta.timeout === 'long' ? 60_000 : 10_000 + + worker.on('message', (message) => { + if (message.type === 'result') { + this.handleIndividualTestCompletion(message, status, test, meta, result) + } else if (message.type === 'completion') { + this.handleTestCompletion(worker) + } else if (message.type === 'error') { + this.#uncaughtExceptions.push({ error: message.error, test }) + this.#stats.failed += 1 + this.#stats.success -= 1 + } + }) + + try { + await once(worker, 'exit', { + signal: AbortSignal.timeout(timeout) + }) + + console.log(colors(`[${finishedFiles}/${total}] PASSED - ${test}`, 'green')) + if (variant) console.log('Variant:', variant) + console.log(`Test took ${(performance.now() - start).toFixed(2)}ms`) + console.log('='.repeat(96)) + } catch (e) { + console.log(`${test} timed out after ${timeout}ms`) + } finally { + if (result?.subtests.length > 0) { + writeFileSync(this.#reportPath, JSON.stringify(report)) + } + + finishedFiles++ + activeWorkers.delete(worker) + } + } + } + + this.handleRunnerCompletion() + } + + /** + * Called after a test has succeeded or failed. + */ + handleIndividualTestCompletion (message, status, path, meta, wptResult) { + const { file, topLevel } = status + + if (message.type === 'result') { + this.#stats.completed += 1 + + if (message.result.status === 1) { + this.#stats.failed += 1 + + wptResult?.subtests.push({ + status: 'FAIL', + name: sanitizeUnpairedSurrogates(message.result.name), + message: sanitizeUnpairedSurrogates(message.result.message) + }) + + const name = normalizeName(message.result.name) + + if (file.flaky?.includes(name)) { + this.#stats.expectedFailures += 1 + } else if (file.allowUnexpectedFailures || topLevel.allowUnexpectedFailures || file.fail?.includes(name)) { + if (!file.allowUnexpectedFailures && !topLevel.allowUnexpectedFailures) { + if (Array.isArray(file.fail)) { + this.#statusOutput[path] ??= [] + this.#statusOutput[path].push(name) + } + } + + this.#stats.expectedFailures += 1 + } else { + process.exitCode = 1 + console.error(message.result) + } + } else { + wptResult?.subtests.push({ + status: 'PASS', + name: sanitizeUnpairedSurrogates(message.result.name) + }) + this.#stats.success += 1 + } + } + } + + /** + * Called after all the tests in a worker are completed. + * @param {Worker} worker + */ + handleTestCompletion (worker) { + worker.terminate() + } + + /** + * Called after every test has completed. + */ + handleRunnerCompletion () { + console.log(this.#statusOutput) // tests that failed + + this.emit('completion') + const { completed, failed, success, expectedFailures, skipped } = this.#stats + console.log( + `[${this.#folderName}]: ` + + `Completed: ${completed}, failed: ${failed}, success: ${success}, ` + + `expected failures: ${expectedFailures}, ` + + `unexpected failures: ${failed - expectedFailures}, ` + + `skipped: ${skipped}` + ) + + process.exit(0) + } + + addInitScript (code) { + this.#initScripts.push(code) + } + + /** + * Parses META tags and resolves any script file paths. + * @param {string} code + * @param {string} path The absolute path of the test + */ + resolveMeta (code, path) { + const meta = parseMeta(code) + const scripts = meta.scripts.map((filePath) => { + let content = '' + + if (filePath === '/resources/WebIDLParser.js') { + // See https://github.com/web-platform-tests/wpt/pull/731 + return readFileSync(join(testPath, '/resources/webidl2/lib/webidl2.js'), 'utf-8') + } else if (isAbsolute(filePath)) { + content = readFileSync(join(testPath, filePath), 'utf-8') + } else { + content = readFileSync(resolve(path, '..', filePath), 'utf-8') + } + + // If the file has any built-in pipes. + if (filePath.includes('.sub.')) { + content = handlePipes(content, this.#url) + } + + return content + }) + + return { + ...meta, + resourcePaths: meta.scripts, + scripts + } + } +} diff --git a/test/wpt/runner/util.mjs b/test/wpt/runner/util.mjs new file mode 100644 index 0000000..ec284df --- /dev/null +++ b/test/wpt/runner/util.mjs @@ -0,0 +1,172 @@ +import assert from 'node:assert' +import { exit } from 'node:process' +import { inspect } from 'node:util' +import tty from 'node:tty' +import { sep } from 'node:path' + +/** + * Parse the `Meta:` tags sometimes included in tests. + * These can include resources to inject, how long it should + * take to timeout, and which globals to expose. + * @example + * // META: timeout=long + * // META: global=window,worker + * // META: script=/common/utils.js + * // META: script=/common/get-host-info.sub.js + * // META: script=../request/request-error.js + * @see https://nodejs.org/api/readline.html#readline_example_read_file_stream_line_by_line + * @param {string} fileContents + */ +export function parseMeta (fileContents) { + const lines = fileContents.split(/\r?\n/g) + + const meta = { + /** @type {string|null} */ + timeout: null, + /** @type {string[]} */ + global: [], + /** @type {string[]} */ + scripts: [], + /** @type {string[]} */ + variant: [] + } + + for (const line of lines) { + if (!line.startsWith('// META: ')) { + break + } + + const groups = /^\/\/ META: (?.*?)=(?.*)$/.exec(line)?.groups + + if (!groups) { + console.log(`Failed to parse META tag: ${line}`) + exit(1) + } + + switch (groups.type) { + case 'variant': + meta[groups.type].push(groups.match) + break + case 'title': + case 'timeout': { + meta[groups.type] = groups.match + break + } + case 'global': { + // window,worker -> ['window', 'worker'] + meta.global.push(...groups.match.split(',')) + break + } + case 'script': { + // A relative or absolute file path to the resources + // needed for the current test. + meta.scripts.push(groups.match) + break + } + default: { + console.log(`Unknown META tag: ${groups.type}`) + exit(1) + } + } + } + + return meta +} + +/** + * @param {string} sub + */ +function parseSubBlock (sub) { + const subName = sub.includes('[') ? sub.slice(0, sub.indexOf('[')) : sub + const options = sub.matchAll(/\[(.*?)\]/gm) + + return { + sub: subName, + options: [...options].map(match => match[1]) + } +} + +/** + * @see https://web-platform-tests.org/writing-tests/server-pipes.html?highlight=sub#built-in-pipes + * @param {string} code + * @param {string} url + */ +export function handlePipes (code, url) { + const server = new URL(url) + + // "Substitutions are marked in a file using a block delimited by + // {{ and }}. Inside the block the following variables are available:" + return code.replace(/{{(.*?)}}/gm, (_, match) => { + const { sub } = parseSubBlock(match) + + switch (sub) { + // "The host name of the server excluding any subdomain part." + // eslint-disable-next-line no-fallthrough + case 'host': + // "The domain name of a particular subdomain e.g. + // {{domains[www]}} for the www subdomain." + // eslint-disable-next-line no-fallthrough + case 'domains': + // "The domain name of a particular subdomain for a particular host. + // The first key may be empty (designating the “default†host) or + // the value alt; i.e., {{hosts[alt][]}} (designating the alternate + // host)." + // eslint-disable-next-line no-fallthrough + case 'hosts': { + return 'localhost' + } + // "The port number of servers, by protocol e.g. {{ports[http][0]}} + // for the first (and, depending on setup, possibly only) http server" + case 'ports': { + return server.port + } + default: { + throw new TypeError(`Unknown substitute "${sub}".`) + } + } + }) +} + +/** + * Some test names may contain characters that JSON cannot handle. + * @param {string} name + */ +export function normalizeName (name) { + return name.replace(/(\v)/g, (_, match) => { + switch (inspect(match)) { + case '\'\\x0B\'': return '\\x0B' + default: return match + } + }) +} + +export function colors (str, color) { + assert(Object.hasOwn(inspect.colors, color), `Missing color ${color}`) + + if (!tty.WriteStream.prototype.hasColors()) { + return str + } + + const [start, end] = inspect.colors[color] + + return `\u001b[${start}m${str}\u001b[${end}m` +} + +/** @param {string} path */ +export function resolveStatusPath (path, status) { + const paths = path + .slice(process.cwd().length + sep.length) + .split(sep) + .slice(3) // [test, wpt, tests, fetch, b, c.js] -> [fetch, b, c.js] + + // skip the first folder name + for (let i = 1; i < paths.length - 1; i++) { + status = status[paths[i]] + + if (!status) { + break + } + } + + return { topLevel: status ?? {}, file: status?.[paths.at(-1)] ?? {} } +} diff --git a/test/wpt/runner/worker.mjs b/test/wpt/runner/worker.mjs new file mode 100644 index 0000000..90bfcf6 --- /dev/null +++ b/test/wpt/runner/worker.mjs @@ -0,0 +1,164 @@ +import buffer from 'node:buffer' +import { readFileSync } from 'node:fs' +import { join } from 'node:path' +import { setFlagsFromString } from 'node:v8' +import { runInNewContext, runInThisContext } from 'node:vm' +import { parentPort, workerData } from 'node:worker_threads' +import { + fetch, File, FileReader, FormData, Headers, Request, Response, setGlobalOrigin +} from '../../../index.js' +import { CloseEvent } from '../../../lib/websocket/events.js' +import { WebSocket } from '../../../lib/websocket/websocket.js' +import { Cache } from '../../../lib/cache/cache.js' +import { CacheStorage } from '../../../lib/cache/cachestorage.js' +import { kConstruct } from '../../../lib/cache/symbols.js' + +const { initScripts, meta, test, url, path } = workerData + +process.on('uncaughtException', (err) => { + parentPort.postMessage({ + type: 'error', + error: { + message: err.message, + name: err.name, + stack: err.stack + } + }) +}) + +const basePath = join(process.cwd(), 'test/wpt/tests') +const urlPath = path.slice(basePath.length) + +const globalPropertyDescriptors = { + writable: true, + enumerable: false, + configurable: true +} + +Object.defineProperties(globalThis, { + fetch: { + ...globalPropertyDescriptors, + enumerable: true, + value: fetch + }, + File: { + ...globalPropertyDescriptors, + value: buffer.File ?? File + }, + FormData: { + ...globalPropertyDescriptors, + value: FormData + }, + Headers: { + ...globalPropertyDescriptors, + value: Headers + }, + Request: { + ...globalPropertyDescriptors, + value: Request + }, + Response: { + ...globalPropertyDescriptors, + value: Response + }, + FileReader: { + ...globalPropertyDescriptors, + value: FileReader + }, + WebSocket: { + ...globalPropertyDescriptors, + value: WebSocket + }, + CloseEvent: { + ...globalPropertyDescriptors, + value: CloseEvent + }, + Blob: { + ...globalPropertyDescriptors, + // See https://github.com/nodejs/node/pull/45659 + value: buffer.Blob + }, + caches: { + ...globalPropertyDescriptors, + value: new CacheStorage(kConstruct) + }, + Cache: { + ...globalPropertyDescriptors, + value: Cache + }, + CacheStorage: { + ...globalPropertyDescriptors, + value: CacheStorage + } +}) + +// self is required by testharness +// GLOBAL is required by self +runInThisContext(` + globalThis.self = globalThis + globalThis.GLOBAL = { + isWorker () { + return false + }, + isShadowRealm () { + return false + }, + isWindow () { + return false + } + } + globalThis.window = globalThis + globalThis.location = new URL('${urlPath.replace(/\\/g, '/')}', '${url}') + globalThis.Window = Object.getPrototypeOf(globalThis).constructor +`) + +if (meta.title) { + runInThisContext(`globalThis.META_TITLE = "${meta.title}"`) +} + +const harness = readFileSync(join(basePath, '/resources/testharness.js'), 'utf-8') +runInThisContext(harness) + +// add_*_callback comes from testharness +// stolen from node's wpt test runner +// eslint-disable-next-line no-undef +add_result_callback((result) => { + parentPort.postMessage({ + type: 'result', + result: { + status: result.status, + name: result.name, + message: result.message, + stack: result.stack + } + }) +}) + +// eslint-disable-next-line no-undef +add_completion_callback((_, status) => { + parentPort.postMessage({ + type: 'completion', + status + }) +}) + +setGlobalOrigin(globalThis.location) + +// Inject any script the user provided before +// running the tests. +for (const initScript of initScripts) { + runInThisContext(initScript) +} + +// Inject any files from the META tags +for (const script of meta.scripts) { + runInThisContext(script) +} + +// A few tests require gc, which can't be passed to a Worker. +// see https://github.com/nodejs/node/issues/16595#issuecomment-340288680 +setFlagsFromString('--expose-gc') +globalThis.gc = runInNewContext('gc') + +// Finally, run the test. +runInThisContext(test) diff --git a/test/wpt/server/routes/network-partition-key.mjs b/test/wpt/server/routes/network-partition-key.mjs new file mode 100644 index 0000000..f1203f7 --- /dev/null +++ b/test/wpt/server/routes/network-partition-key.mjs @@ -0,0 +1,111 @@ +const stash = new Map() + +/** + * @see https://github.com/web-platform-tests/wpt/blob/master/fetch/connection-pool/resources/network-partition-key.py + * @param {Parameters[0]} req + * @param {Parameters[1]} res + * @param {URL} url + */ +export function route (req, res, { searchParams, port }) { + res.setHeader('Cache-Control', 'no-store') + + const dispatch = searchParams.get('dispatch') + const uuid = searchParams.get('uuid') + const partitionId = searchParams.get('partition_id') + + if (!uuid || !dispatch || !partitionId) { + res.statusCode = 404 + res.end('Invalid query parameters') + return + } + + let testFailed = false + let requestCount = 0 + let connectionCount = 0 + + if (searchParams.get('nocheck_partition') !== 'True') { + const addressKey = `${req.socket.localAddress}|${port}` + const serverState = stash.get(uuid) ?? { + testFailed: false, + requestCount: 0, + connectionCount: 0 + } + + stash.delete(uuid) + requestCount = serverState.requestCount + 1 + serverState.requestCount = requestCount + + if (Object.hasOwn(serverState, addressKey)) { + if (serverState[addressKey] !== partitionId) { + serverState.testFailed = true + } + } else { + connectionCount = serverState.connectionCount + 1 + serverState.connectionCount = connectionCount + } + + serverState[addressKey] = partitionId + testFailed = serverState.testFailed + stash.set(uuid, serverState) + } + + const origin = req.headers.origin + if (origin) { + res.setHeader('Access-Control-Allow-Origin', origin) + res.setHeader('Access-Control-Allow-Credentials', 'true') + } + + if (req.method === 'OPTIONS') { + return handlePreflight(req, res) + } + + if (dispatch === 'fetch_file') { + res.end() + return + } + + if (dispatch === 'check_partition') { + const status = searchParams.get('status') ?? 200 + + if (testFailed) { + res.statusCode = status + res.end('Multiple partition IDs used on a socket') + return + } + + let body = 'ok' + if (searchParams.get('addcounter')) { + body += `. Request was sent ${requestCount} times. ${connectionCount} connections were created.` + res.statusCode = status + res.end(body) + return + } + } + + if (dispatch === 'clean_up') { + stash.delete(uuid) + res.statusCode = 200 + if (testFailed) { + res.end('Test failed, but cleanup completed.') + } else { + res.end('cleanup complete') + } + + return + } + + res.statusCode = 404 + res.end('Unrecognized dispatch parameter: ' + dispatch) +} + +/** + * @param {Parameters[0]} req + * @param {Parameters[1]} res + */ +function handlePreflight (req, res) { + res.statusCode = 200 + res.setHeader('Access-Control-Allow-Methods', 'GET') + res.setHeader('Access-Control-Allow-Headers', 'header-to-force-cors') + res.setHeader('Access-Control-Max-Age', '86400') + res.end('Preflight request') +} diff --git a/test/wpt/server/routes/redirect.mjs b/test/wpt/server/routes/redirect.mjs new file mode 100644 index 0000000..46770cf --- /dev/null +++ b/test/wpt/server/routes/redirect.mjs @@ -0,0 +1,104 @@ +import { setTimeout } from 'timers/promises' + +const stash = new Map() + +/** + * @see https://github.com/web-platform-tests/wpt/blob/master/fetch/connection-pool/resources/network-partition-key.py + * @param {Parameters[0]} req + * @param {Parameters[1]} res + * @param {URL} fullUrl + */ +export async function route (req, res, fullUrl) { + const { searchParams } = fullUrl + + let stashedData = { count: 0, preflight: 0 } + let status = 302 + res.setHeader('Content-Type', 'text/plain') + res.setHeader('Cache-Control', 'no-cache') + res.setHeader('Pragma', 'no-cache') + + if (Object.hasOwn(req.headers, 'origin')) { + res.setHeader('Access-Control-Allow-Origin', req.headers.origin ?? '') + res.setHeader('Access-Control-Allow-Credentials', 'true') + } else { + res.setHeader('Access-Control-Allow-Origin', '*') + } + + let token = null + if (searchParams.has('token')) { + token = searchParams.get('token') + const data = stash.get(token) + stash.delete(token) + if (data) { + stashedData = data + } + } + + if (req.method === 'OPTIONS') { + if (searchParams.has('allow_headers')) { + res.setHeader('Access-Control-Allow-Headers', searchParams.get('allow_headers')) + } + + stashedData.preflight = '1' + + if (!searchParams.has('redirect_preflight')) { + if (token) { + stash.set(searchParams.get('token'), stashedData) + } + + res.statusCode = 200 + res.end('') + return + } + } + + if (searchParams.has('redirect_status')) { + status = parseInt(searchParams.get('redirect_status')) + } + + stashedData.count += 1 + + if (searchParams.has('location')) { + let url = decodeURIComponent(searchParams.get('location')) + + if (!searchParams.has('simple')) { + const scheme = new URL(url, fullUrl).protocol + + if (scheme === 'http:' || scheme === 'https:') { + url += url.includes('?') ? '&' : '?' + + for (const [key, value] of searchParams) { + url += '&' + encodeURIComponent(key) + '=' + encodeURIComponent(value) + } + + url += '&count=' + stashedData.count + } + } + + res.setHeader('location', url) + } + + if (searchParams.has('redirect_referrerpolicy')) { + res.setHeader('Referrer-Policy', searchParams.get('redirect_referrerpolicy')) + } + + if (searchParams.has('delay')) { + await setTimeout(parseFloat(searchParams.get('delay') ?? 0)) + } + + if (token) { + stash.set(searchParams.get('token'), stashedData) + + if (searchParams.has('max_count')) { + const maxCount = parseInt(searchParams.get('max_count')) + + if (stashedData.count > maxCount) { + res.end((stashedData.count - 1).toString()) + return + } + } + } + + res.statusCode = status + res.end('') +} diff --git a/test/wpt/server/server.mjs b/test/wpt/server/server.mjs new file mode 100644 index 0000000..82b9080 --- /dev/null +++ b/test/wpt/server/server.mjs @@ -0,0 +1,397 @@ +import { once } from 'node:events' +import { createServer } from 'node:http' +import { join } from 'node:path' +import process from 'node:process' +import { fileURLToPath } from 'node:url' +import { createReadStream, readFileSync, existsSync } from 'node:fs' +import { setTimeout as sleep } from 'node:timers/promises' +import { route as networkPartitionRoute } from './routes/network-partition-key.mjs' +import { route as redirectRoute } from './routes/redirect.mjs' + +const tests = fileURLToPath(join(import.meta.url, '../../tests')) + +// https://web-platform-tests.org/tools/wptserve/docs/stash.html +class Stash extends Map { + take (key) { + if (this.has(key)) { + const value = this.get(key) + + this.delete(key) + return value.value + } + } + + put (key, value, path) { + this.set(key, { value, path }) + } +} + +const stash = new Stash() + +const server = createServer(async (req, res) => { + const fullUrl = new URL(req.url, `http://localhost:${server.address().port}`) + + switch (fullUrl.pathname) { + case '/service-workers/cache-storage/resources/blank.html': { + res.setHeader('content-type', 'text/html') + // fall through + } + case '/service-workers/cache-storage/resources/simple.txt': + case '/fetch/content-encoding/resources/foo.octetstream.gz': + case '/fetch/content-encoding/resources/foo.text.gz': + case '/fetch/api/resources/cors-top.txt': + case '/fetch/api/resources/top.txt': + case '/mimesniff/mime-types/resources/generated-mime-types.json': + case '/mimesniff/mime-types/resources/mime-types.json': + case '/interfaces/dom.idl': + case '/interfaces/url.idl': + case '/interfaces/html.idl': + case '/interfaces/fetch.idl': + case '/interfaces/FileAPI.idl': + case '/interfaces/websockets.idl': + case '/interfaces/referrer-policy.idl': + case '/xhr/resources/utf16-bom.json': + case '/fetch/data-urls/resources/base64.json': + case '/fetch/data-urls/resources/data-urls.json': + case '/fetch/api/resources/empty.txt': + case '/fetch/api/resources/data.json': { + // If this specific resources requires custom headers + const customHeadersPath = join(tests, fullUrl.pathname + '.headers') + if (existsSync(customHeadersPath)) { + const headers = readFileSync(customHeadersPath, 'utf-8') + .trim() + .split(/\r?\n/g) + .map((h) => h.split(': ')) + + for (const [key, value] of headers) { + if (!key || !value) { + console.warn(`Skipping ${key}:${value} header pair`) + continue + } + res.setHeader(key, value) + } + } + + // https://github.com/web-platform-tests/wpt/blob/6ae3f702a332e8399fab778c831db6b7dca3f1c6/fetch/api/resources/data.json + return createReadStream(join(tests, fullUrl.pathname)) + .on('end', () => res.end()) + .pipe(res) + } + case '/fetch/api/resources/trickle.py': { + // Note: python's time.sleep(...) takes seconds, while setTimeout + // takes ms. + const delay = parseFloat(fullUrl.searchParams.get('ms') ?? 500) + const count = parseInt(fullUrl.searchParams.get('count') ?? 50) + + // eslint-disable-next-line no-unused-vars + for await (const chunk of req); // read request body + + await sleep(delay) + + if (!fullUrl.searchParams.has('notype')) { + res.setHeader('Content-type', 'text/plain') + } + + res.statusCode = 200 + await sleep(delay) + + for (let i = 0; i < count; i++) { + res.write('TEST_TRICKLE\n') + await sleep(delay) + } + + res.end() + break + } + case '/fetch/api/resources/infinite-slow-response.py': { + // https://github.com/web-platform-tests/wpt/blob/master/fetch/api/resources/infinite-slow-response.py + const stateKey = fullUrl.searchParams.get('stateKey') ?? '' + const abortKey = fullUrl.searchParams.get('abortKey') ?? '' + + if (stateKey) { + stash.put(stateKey, 'open', fullUrl.pathname) + } + + res.setHeader('Content-Type', 'text/plain') + res.statusCode = 200 + + res.write('.'.repeat(2048)) + + while (true) { + if (!res.write('.')) { + break + } else if (abortKey && stash.take(abortKey)) { + break + } + + await sleep(100) + } + + if (stateKey) { + stash.put(stateKey, 'closed', fullUrl.pathname) + } + + res.end() + return + } + case '/fetch/api/resources/stash-take.py': { + // https://github.com/web-platform-tests/wpt/blob/6ae3f702a332e8399fab778c831db6b7dca3f1c6/fetch/api/resources/stash-take.py + + const key = fullUrl.searchParams.get('key') + res.setHeader('Access-Control-Allow-Origin', '*') + + const took = stash.take(key, fullUrl.pathname) ?? null + + res.write(JSON.stringify(took)) + return res.end() + } + case '/fetch/api/resources/echo-content.py': { + res.setHeader('X-Request-Method', req.method) + res.setHeader('X-Request-Content-Length', req.headers['content-length'] ?? 'NO') + res.setHeader('X-Request-Content-Type', req.headers['content-type'] ?? 'NO') + res.setHeader('Content-Type', 'text/plain') + + for await (const chunk of req) { + res.write(chunk) + } + + res.end() + break + } + case '/fetch/api/resources/status.py': { + const code = parseInt(fullUrl.searchParams.get('code') ?? 200) + const text = fullUrl.searchParams.get('text') ?? 'OMG' + const content = fullUrl.searchParams.get('content') ?? '' + const type = fullUrl.searchParams.get('type') ?? '' + res.statusCode = code + res.statusMessage = text + res.setHeader('Content-Type', type) + res.setHeader('X-Request-Method', req.method) + res.end(content) + break + } + case '/fetch/api/resources/inspect-headers.py': { + const query = fullUrl.searchParams + const checkedHeaders = query.get('headers') + ?.split('|') + .map(h => h.toLowerCase()) ?? [] + + if (query.has('headers')) { + for (const header of checkedHeaders) { + if (Object.hasOwn(req.headers, header)) { + res.setHeader(`x-request-${header}`, req.headers[header] ?? '') + } + } + } + + if (query.has('cors')) { + if (Object.hasOwn(req.headers, 'origin')) { + res.setHeader('Access-Control-Allow-Origin', req.headers.origin ?? '') + } else { + res.setHeader('Access-Control-Allow-Origin', '*') + } + + res.setHeader('Access-Control-Allow-Credentials', 'true') + res.setHeader('Access-Control-Allow-Methods', 'GET, POST, HEAD') + const exposedHeaders = checkedHeaders.map(h => `x-request-${h}`).join(', ') + res.setHeader('Access-Control-Expose-Headers', exposedHeaders) + if (query.has('allow_headers')) { + res.setHeader('Access-Control-Allow-Headers', query.get('allowed_headers')) + } else { + res.setHeader('Access-Control-Allow-Headers', Object.keys(req.headers).join(', ')) + } + } + + res.setHeader('content-type', 'text/plain') + res.end('') + break + } + case '/xhr/resources/parse-headers.py': { + if (fullUrl.searchParams.has('my-custom-header')) { + const val = fullUrl.searchParams.get('my-custom-header').toLowerCase() + // res.setHeader does validation which may prevent some tests from running. + res.socket.write( + `HTTP/1.1 200 OK\r\nmy-custom-header: ${val}\r\n\r\n` + ) + } + res.end('') + break + } + case '/fetch/api/resources/bad-chunk-encoding.py': { + const query = fullUrl.searchParams + + const delay = parseFloat(query.get('ms') ?? 1000) + const count = parseInt(query.get('count') ?? 50) + await sleep(delay) + res.socket.write( + 'HTTP/1.1 200 OK\r\ntransfer-encoding: chunked\r\n\r\n' + ) + await sleep(delay) + + for (let i = 0; i < count; i++) { + res.socket.write('a\r\nTEST_CHUNK\r\n') + await sleep(delay) + } + + res.end('garbage') + break + } + case '/xhr/resources/headers-www-authenticate.asis': + case '/xhr/resources/headers-some-are-empty.asis': + case '/xhr/resources/headers-basic': + case '/xhr/resources/headers-double-empty.asis': + case '/xhr/resources/header-content-length-twice.asis': + case '/xhr/resources/header-content-length.asis': { + let asis = readFileSync(join(tests, fullUrl.pathname), 'utf-8') + asis = asis.replace(/\n/g, '\r\n') + asis = `${asis}\r\n` + + res.socket.write(asis) + res.end() + break + } + case '/fetch/connection-pool/resources/network-partition-key.py': { + return networkPartitionRoute(req, res, fullUrl) + } + case '/resources/top.txt': { + return createReadStream(join(tests, 'fetch/api/', fullUrl.pathname)) + .on('end', () => res.end()) + .pipe(res) + } + case '/fetch/api/resources/redirect.py': { + return redirectRoute(req, res, fullUrl) + } + case '/fetch/api/resources/method.py': { + if (fullUrl.searchParams.has('cors')) { + res.setHeader('Access-Control-Allow-Origin', '*') + res.setHeader('Access-Control-Allow-Credentials', 'true') + res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, FOO') + res.setHeader('Access-Control-Allow-Headers', 'x-test, x-foo') + res.setHeader('Access-Control-Expose-Headers', 'x-request-method') + } + + res.setHeader('x-request-method', req.method) + res.setHeader('x-request-content-type', req.headers['content-type'] ?? 'NO') + res.setHeader('x-request-content-length', req.headers['content-length'] ?? 'NO') + res.setHeader('x-request-content-encoding', req.headers['content-encoding'] ?? 'NO') + res.setHeader('x-request-content-language', req.headers['content-language'] ?? 'NO') + res.setHeader('x-request-content-location', req.headers['content-location'] ?? 'NO') + + for await (const chunk of req) { + res.write(chunk) + } + + res.end() + return + } + case '/fetch/api/resources/clean-stash.py': { + const token = fullUrl.searchParams.get('token') + const took = stash.take(token) + + if (took) { + res.end('1') + } else { + res.end('0') + } + + break + } + case '/fetch/content-encoding/resources/bad-gzip-body.py': { + res.setHeader('Content-Encoding', 'gzip') + res.end('not actually gzip') + break + } + case '/fetch/api/resources/dump-authorization-header.py': { + res.setHeader('Content-Type', 'text/html') + res.setHeader('Cache-Control', 'no-cache') + + if (req.headers.origin) { + res.setHeader('Access-Control-Allow-Origin', req.headers.origin) + res.setHeader('Access-Control-Allow-Credentials', 'true') + } else { + res.setHeader('Access-Control-Allow-Origin', '*') + } + + res.setHeader('Access-Control-Allow-Headers', 'Authorization') + res.statusCode = 200 + + if (req.headers.authorization) { + res.end(req.headers.authorization) + return + } + + res.end('none') + break + } + case '/xhr/resources/echo-headers.py': { + res.statusCode = 200 + res.setHeader('Content-Type', 'text/plain') + + // wpt runner sends this as 1 chunk + let body = '' + + for (let i = 0; i < req.rawHeaders.length; i += 2) { + const key = req.rawHeaders[i] + const value = req.rawHeaders[i + 1] + + body += `${key}: ${value}` + } + + res.end(body) + break + } + case '/fetch/api/resources/authentication.py': { + const auth = Buffer.from(req.headers.authorization.slice('Basic '.length), 'base64') + const [user, password] = auth.toString().split(':') + + if (user === 'user' && password === 'password') { + res.end('Authentication done') + return + } + + const realm = fullUrl.searchParams.get('realm') ?? 'test' + + res.statusCode = 401 + res.setHeader('WWW-Authenticate', `Basic realm="${realm}"`) + res.end('Please login with credentials \'user\' and \'password\'') + return + } + case '/fetch/api/resources/redirect-empty-location.py': { + res.setHeader('location', '') + res.statusCode = 302 + res.end('') + return + } + case '/service-workers/cache-storage/resources/fetch-status.py': { + const status = Number(fullUrl.searchParams.get('status')) + + res.statusCode = status + res.end() + return + } + default: { + res.statusCode = 200 + res.end(fullUrl.toString()) + } + } +}).listen(0) + +await once(server, 'listening') + +const send = (message) => { + if (typeof process.send === 'function') { + process.send(message) + } +} + +const url = `http://localhost:${server.address().port}` +console.log('server opened ' + url) +send({ server: url }) + +process.on('message', (message) => { + if (message === 'shutdown') { + server.close((err) => process.exit(err ? 1 : 0)) + } +}) + +export { server } diff --git a/test/wpt/server/websocket.mjs b/test/wpt/server/websocket.mjs new file mode 100644 index 0000000..cc8ce78 --- /dev/null +++ b/test/wpt/server/websocket.mjs @@ -0,0 +1,46 @@ +import { WebSocketServer } from 'ws' +import { server } from './server.mjs' + +// The file router server handles sending the url, closing, +// and sending messages back to the main process for us. +// The types for WebSocketServer don't include a `request` +// event, so I'm unsure if we can stop relying on server. + +const wss = new WebSocketServer({ + server, + handleProtocols: (protocols) => [...protocols].join(', ') +}) + +wss.on('connection', (ws, request) => { + ws.on('message', (data, isBinary) => { + const str = data.toString('utf-8') + + if (request.url === '/receive-many-with-backpressure') { + setTimeout(() => { + ws.send(str.length.toString(), { binary: false }) + }, 100) + return + } + + if (str === 'Goodbye') { + // Close-server-initiated-close.any.js sends a "Goodbye" message + // when it wants the server to close the connection. + ws.close(1000) + return + } + + ws.send(data, { binary: isBinary }) + }) + + // Some tests, such as `Create-blocked-port.any.js` do NOT + // close the connection automatically. + const timeout = setTimeout(() => { + if (ws.readyState !== ws.CLOSED && ws.readyState !== ws.CLOSING) { + ws.close() + } + }, 2500) + + ws.on('close', () => { + clearTimeout(timeout) + }) +}) diff --git a/test/wpt/start-FileAPI.mjs b/test/wpt/start-FileAPI.mjs new file mode 100644 index 0000000..5a92ab8 --- /dev/null +++ b/test/wpt/start-FileAPI.mjs @@ -0,0 +1,26 @@ +import { WPTRunner } from './runner/runner.mjs' +import { join } from 'path' +import { fileURLToPath } from 'url' +import { fork } from 'child_process' +import { on } from 'events' + +const serverPath = fileURLToPath(join(import.meta.url, '../server/server.mjs')) + +const child = fork(serverPath, [], { + stdio: ['pipe', 'pipe', 'pipe', 'ipc'] +}) + +child.on('exit', (code) => process.exit(code)) + +for await (const [message] of on(child, 'message')) { + if (message.server) { + const runner = new WPTRunner('FileAPI', message.server) + runner.run() + + runner.once('completion', () => { + if (child.connected) { + child.send('shutdown') + } + }) + } +} diff --git a/test/wpt/start-cacheStorage.mjs b/test/wpt/start-cacheStorage.mjs new file mode 100644 index 0000000..a630e05 --- /dev/null +++ b/test/wpt/start-cacheStorage.mjs @@ -0,0 +1,26 @@ +import { WPTRunner } from './runner/runner.mjs' +import { join } from 'path' +import { fileURLToPath } from 'url' +import { fork } from 'child_process' +import { on } from 'events' + +const serverPath = fileURLToPath(join(import.meta.url, '../server/server.mjs')) + +const child = fork(serverPath, [], { + stdio: ['pipe', 'pipe', 'pipe', 'ipc'] +}) + +child.on('exit', (code) => process.exit(code)) + +for await (const [message] of on(child, 'message')) { + if (message.server) { + const runner = new WPTRunner('service-workers/cache-storage', message.server) + runner.run() + + runner.once('completion', () => { + if (child.connected) { + child.send('shutdown') + } + }) + } +} diff --git a/test/wpt/start-fetch.mjs b/test/wpt/start-fetch.mjs new file mode 100644 index 0000000..59c9f83 --- /dev/null +++ b/test/wpt/start-fetch.mjs @@ -0,0 +1,31 @@ +import { WPTRunner } from './runner/runner.mjs' +import { join } from 'path' +import { fileURLToPath } from 'url' +import { fork } from 'child_process' +import { on } from 'events' + +const { WPT_REPORT } = process.env + +const serverPath = fileURLToPath(join(import.meta.url, '../server/server.mjs')) + +const child = fork(serverPath, [], { + stdio: ['pipe', 'pipe', 'pipe', 'ipc'] +}) + +child.on('exit', (code) => process.exit(code)) + +for await (const [message] of on(child, 'message')) { + if (message.server) { + const runner = new WPTRunner('fetch', message.server, { + appendReport: !!WPT_REPORT, + reportPath: WPT_REPORT + }) + runner.run() + + runner.once('completion', () => { + if (child.connected) { + child.send('shutdown') + } + }) + } +} diff --git a/test/wpt/start-mimesniff.mjs b/test/wpt/start-mimesniff.mjs new file mode 100644 index 0000000..fbdb9bf --- /dev/null +++ b/test/wpt/start-mimesniff.mjs @@ -0,0 +1,31 @@ +import { WPTRunner } from './runner/runner.mjs' +import { join } from 'path' +import { fileURLToPath } from 'url' +import { fork } from 'child_process' +import { on } from 'events' + +const { WPT_REPORT } = process.env + +const serverPath = fileURLToPath(join(import.meta.url, '../server/server.mjs')) + +const child = fork(serverPath, [], { + stdio: ['pipe', 'pipe', 'pipe', 'ipc'] +}) + +child.on('exit', (code) => process.exit(code)) + +for await (const [message] of on(child, 'message')) { + if (message.server) { + const runner = new WPTRunner('mimesniff', message.server, { + appendReport: !!WPT_REPORT, + reportPath: WPT_REPORT + }) + runner.run() + + runner.once('completion', () => { + if (child.connected) { + child.send('shutdown') + } + }) + } +} diff --git a/test/wpt/start-websockets.mjs b/test/wpt/start-websockets.mjs new file mode 100644 index 0000000..79aa297 --- /dev/null +++ b/test/wpt/start-websockets.mjs @@ -0,0 +1,47 @@ +import { WPTRunner } from './runner/runner.mjs' +import { join } from 'path' +import { fileURLToPath } from 'url' +import { fork } from 'child_process' +import { on } from 'events' + +const { WPT_REPORT } = process.env + +function isGlobalAvailable () { + if (typeof WebSocket !== 'undefined') { + return true + } + + const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) + + // TODO: keep this up to date when backports to earlier majors happen + return nodeMajor >= 21 || (nodeMajor === 20 && nodeMinor >= 10) +} + +if (process.env.CI) { + // TODO(@KhafraDev): figure out *why* these tests are flaky in the CI. + // process.exit(0) +} + +const serverPath = fileURLToPath(join(import.meta.url, '../server/websocket.mjs')) + +const child = fork(serverPath, [], { + stdio: ['pipe', 'pipe', 'pipe', 'ipc'] +}) + +child.on('exit', (code) => process.exit(code)) + +for await (const [message] of on(child, 'message')) { + if (message.server) { + const runner = new WPTRunner('websockets', message.server, { + appendReport: !!WPT_REPORT && isGlobalAvailable(), + reportPath: WPT_REPORT + }) + runner.run() + + runner.once('completion', () => { + if (child.connected) { + child.send('shutdown') + } + }) + } +} diff --git a/test/wpt/start-xhr.mjs b/test/wpt/start-xhr.mjs new file mode 100644 index 0000000..08f82eb --- /dev/null +++ b/test/wpt/start-xhr.mjs @@ -0,0 +1,12 @@ +import { WPTRunner } from './runner/runner.mjs' +import { once } from 'events' + +const { WPT_REPORT } = process.env + +const runner = new WPTRunner('xhr/formdata', 'http://localhost:3333', { + appendReport: !!WPT_REPORT, + reportPath: WPT_REPORT +}) +runner.run() + +await once(runner, 'completion') diff --git a/test/wpt/status/FileAPI.status.json b/test/wpt/status/FileAPI.status.json new file mode 100644 index 0000000..c64d255 --- /dev/null +++ b/test/wpt/status/FileAPI.status.json @@ -0,0 +1,75 @@ +{ + "file": { + "File-constructor.any.js": { + "flaky": [ + "Using type in File constructor: nonparsable" + ] + } + }, + "blob": { + "Blob-constructor.any.js": { + "skip": true + }, + "Blob-stream.any.js": { + "fail": [ + "Reading Blob.stream() with BYOB reader" + ] + } + }, + "url": { + "url-with-xhr.any.js": { + "skip": true + }, + "url-with-fetch.any.js": { + "note": "needs investigation", + "fail": [ + "Only exact matches should revoke URLs, using fetch", + "Revoke blob URL after creating Request, will fetch", + "Revoke blob URL after creating Request, then clone Request, will fetch" + ] + }, + "url-format.any.js": { + "fail": [ + "Origin of Blob URL matches our origin", + "Blob URL parses correctly", + "Origin of Blob URL matches our origin for Files" + ] + } + }, + "reading-data-section": { + "filereader_result.any.js": { + "note": "has to do with html microtask queue being different than queueMicrotask", + "skip": true + }, + "filereader_events.any.js": { + "note": "has to do with html microtask queue being different than queueMicrotask", + "fail": [ + "events are dispatched in the correct order for an empty blob", + "events are dispatched in the correct order for a non-empty blob" + ] + } + }, + "idlharness.any.js": { + "note": "These flaky tests only fail in < node v19; add in a way to mark them as such eventually", + "flaky": [ + "Blob interface: attribute size", + "Blob interface: attribute type", + "Blob interface: operation slice(optional long long, optional long long, optional DOMString)", + "Blob interface: operation stream()", + "Blob interface: operation text()", + "Blob interface: operation arrayBuffer()", + "URL interface: operation createObjectURL((Blob or MediaSource))", + "URL interface: operation revokeObjectURL(DOMString)" + ], + "fail": [ + "FileList interface: existence and properties of interface object", + "FileList interface object length", + "FileList interface object name", + "FileList interface: existence and properties of interface prototype object", + "FileList interface: existence and properties of interface prototype object's \"constructor\" property", + "FileList interface: existence and properties of interface prototype object's @@unscopables property", + "FileList interface: operation item(unsigned long)", + "FileList interface: attribute length" + ] + } +} diff --git a/test/wpt/status/fetch.status.json b/test/wpt/status/fetch.status.json new file mode 100644 index 0000000..5910bf3 --- /dev/null +++ b/test/wpt/status/fetch.status.json @@ -0,0 +1,457 @@ +{ + "api": { + "abort": { + "general.any.js": { + "note": "TODO(@KhafraDev): Clone aborts with original controller can probably be fixed", + "fail": [ + "Already aborted signal rejects immediately", + "Underlying connection is closed when aborting after receiving response - no-cors", + "Stream errors once aborted. Underlying connection closed.", + "Readable stream synchronously cancels with AbortError if aborted before reading", + "Clone aborts with original controller" + ] + }, + "cache.https.any.js": { + "note": "undici doesn't implement http caching", + "skip": true + } + }, + "basic": { + "conditional-get.any.js": { + "fail": [ + "Testing conditional GET with ETags" + ] + }, + "header-value-combining.any.js": { + "fail": [ + "response.headers.get('content-length') expects 0, 0", + "response.headers.get('foo-test') expects 1, 2, 3", + "response.headers.get('heya') expects , \\x0B\f, 1, , , 2" + ], + "flaky": [ + "response.headers.get('content-length') expects 0", + "response.headers.get('double-trouble') expects , ", + "response.headers.get('www-authenticate') expects 1, 2, 3, 4" + ] + }, + "integrity.sub.any.js": { + "fail": [ + "Empty string integrity for opaque response" + ] + }, + "keepalive.any.js": { + "note": "document is not defined", + "skip": true + }, + "mode-no-cors.sub.any.js": { + "note": "undici doesn't implement CORs", + "skip": true + }, + "mode-same-origin.any.js": { + "note": "undici doesn't respect RequestInit.mode", + "skip": true + }, + "referrer.any.js": { + "fail": [ + "origin-when-cross-origin policy on a cross-origin URL", + "origin-when-cross-origin policy on a cross-origin URL after same-origin redirection", + "origin-when-cross-origin policy on a same-origin URL after cross-origin redirection", + "origin-when-cross-origin policy on a same-origin URL" + ] + }, + "request-forbidden-headers.any.js": { + "note": "undici doesn't filter headers", + "skip": true + }, + "request-headers.any.js": { + "fail": [ + "Fetch with Chicken", + "Fetch with Chicken with body", + "Fetch with TacO and mode \"same-origin\" needs an Origin header", + "Fetch with TacO and mode \"cors\" needs an Origin header" + ] + }, + "request-referrer.any.js": { + "note": "TODO(@KhafraDev): url referrer test could probably be fixed", + "fail": [ + "about:client referrer", + "url referrer" + ] + }, + "request-upload.any.js": { + "fail": [ + "Fetch with POST with text body on 421 response should be retried once on new connection." + ] + }, + "request-upload.h2.any.js": { + "note": "undici doesn't support http/2", + "skip": true + }, + "status.h2.any.js": { + "note": "undici doesn't support http/2", + "skip": true + }, + "stream-safe-creation.any.js": { + "note": "tests are very finnicky", + "fail": [ + "throwing Object.prototype.type accessor should not affect stream creation by 'fetch'", + "Object.prototype.type accessor returning invalid value should not affect stream creation by 'fetch'", + "throwing Object.prototype.highWaterMark accessor should not affect stream creation by 'fetch'", + "Object.prototype.highWaterMark accessor returning invalid value should not affect stream creation by 'fetch'" + ] + } + }, + "body": { + "mime-type.any.js": { + "note": "fails on all platforms, https://wpt.fyi/results/fetch/api/body/mime-type.any.html?label=master&label=experimental&product=chrome&product=firefox&product=safari&product=node.js&product=deno&aligned", + "fail": [ + "Response: Extract a MIME type with clone" + ] + } + }, + "cors": { + "note": "undici doesn't implement CORs", + "skip": true + }, + "credentials": { + "authentication-redirection.any.js": { + "note": "connects to https server", + "fail": [ + "getAuthorizationHeaderValue - cross origin redirection", + "getAuthorizationHeaderValue - same origin redirection" + ] + }, + "cookies.any.js": { + "fail": [ + "Include mode: 1 cookie", + "Include mode: 2 cookies", + "Same-origin mode: 1 cookie", + "Same-origin mode: 2 cookies" + ] + } + }, + "fetch-later": { + "note": "this is not part of the spec, only a proposal", + "skip": true + }, + "headers": { + "header-setcookie.any.js": { + "note": "undici doesn't filter headers", + "fail": [ + "Set-Cookie is a forbidden response header" + ] + }, + "header-values-normalize.any.js": { + "note": "TODO(@KhafraDev): https://github.com/nodejs/undici/issues/1680", + "fail": [ + "XMLHttpRequest with value %00", + "XMLHttpRequest with value %01", + "XMLHttpRequest with value %02", + "XMLHttpRequest with value %03", + "XMLHttpRequest with value %04", + "XMLHttpRequest with value %05", + "XMLHttpRequest with value %06", + "XMLHttpRequest with value %07", + "XMLHttpRequest with value %08", + "XMLHttpRequest with value %09", + "XMLHttpRequest with value %0A", + "XMLHttpRequest with value %0D", + "XMLHttpRequest with value %0E", + "XMLHttpRequest with value %0F", + "XMLHttpRequest with value %10", + "XMLHttpRequest with value %11", + "XMLHttpRequest with value %12", + "XMLHttpRequest with value %13", + "XMLHttpRequest with value %14", + "XMLHttpRequest with value %15", + "XMLHttpRequest with value %16", + "XMLHttpRequest with value %17", + "XMLHttpRequest with value %18", + "XMLHttpRequest with value %19", + "XMLHttpRequest with value %1A", + "XMLHttpRequest with value %1B", + "XMLHttpRequest with value %1C", + "XMLHttpRequest with value %1D", + "XMLHttpRequest with value %1E", + "XMLHttpRequest with value %1F", + "XMLHttpRequest with value %20", + "fetch() with value %01", + "fetch() with value %02", + "fetch() with value %03", + "fetch() with value %04", + "fetch() with value %05", + "fetch() with value %06", + "fetch() with value %07", + "fetch() with value %08", + "fetch() with value %0E", + "fetch() with value %0F", + "fetch() with value %10", + "fetch() with value %11", + "fetch() with value %12", + "fetch() with value %13", + "fetch() with value %14", + "fetch() with value %15", + "fetch() with value %16", + "fetch() with value %17", + "fetch() with value %18", + "fetch() with value %19", + "fetch() with value %1A", + "fetch() with value %1B", + "fetch() with value %1C", + "fetch() with value %1D", + "fetch() with value %1E", + "fetch() with value %1F" + ] + }, + "header-values.any.js": { + "fail": [ + "XMLHttpRequest with value x%00x needs to throw", + "XMLHttpRequest with value x%0Ax needs to throw", + "XMLHttpRequest with value x%0Dx needs to throw", + "XMLHttpRequest with all valid values", + "fetch() with all valid values" + ] + }, + "headers-no-cors.any.js": { + "note": "undici doesn't implement CORs", + "skip": true + } + }, + "redirect": { + "redirect-empty-location.any.js": { + "note": "undici handles redirect: manual differently than browsers", + "fail": [ + "redirect response with empty Location, manual mode" + ] + }, + "redirect-keepalive.any.js": { + "note": "document is not defined", + "skip": true + }, + "redirect-location-escape.tentative.any.js": { + "note": "TODO(@KhafraDev): crashes runner", + "skip": true + }, + "redirect-location.any.js": { + "note": "undici handles redirect: manual differently than browsers", + "fail": [ + "Redirect 301 in \"manual\" mode without location", + "Redirect 301 in \"manual\" mode with invalid location", + "Redirect 301 in \"manual\" mode with data location", + "Redirect 302 in \"manual\" mode without location", + "Redirect 302 in \"manual\" mode with invalid location", + "Redirect 302 in \"manual\" mode with data location", + "Redirect 303 in \"manual\" mode without location", + "Redirect 303 in \"manual\" mode with invalid location", + "Redirect 303 in \"manual\" mode with data location", + "Redirect 307 in \"manual\" mode without location", + "Redirect 307 in \"manual\" mode with invalid location", + "Redirect 307 in \"manual\" mode with data location", + "Redirect 308 in \"manual\" mode without location", + "Redirect 308 in \"manual\" mode with invalid location", + "Redirect 308 in \"manual\" mode with data location", + "Redirect 301 in \"manual\" mode with valid location", + "Redirect 302 in \"manual\" mode with valid location", + "Redirect 303 in \"manual\" mode with valid location", + "Redirect 307 in \"manual\" mode with valid location", + "Redirect 308 in \"manual\" mode with valid location" + ] + }, + "redirect-method.any.js": { + "fail": [ + "Redirect 303 with TESTING" + ] + }, + "redirect-mode.any.js": { + "note": "mode isn't respected", + "skip": true + }, + "redirect-origin.any.js": { + "note": "TODO(@KhafraDev): investigate", + "skip": true + }, + "redirect-referrer-override.any.js": { + "note": "TODO(@KhafraDev): investigate", + "skip": true + }, + "redirect-referrer.any.js": { + "note": "TODO(@KhafraDev): investigate", + "skip": true + }, + "redirect-upload.h2.any.js": { + "note": "undici doesn't support http/2", + "skip": true + } + }, + "request": { + "request-cache-default-conditional.any.js": { + "note": "undici doesn't implement an http cache", + "skip": true + }, + "request-cache-default.any.js": { + "note": "undici doesn't implement an http cache", + "skip": true + }, + "request-cache-force-cache.any.js": { + "note": "undici doesn't implement an http cache", + "skip": true + }, + "request-cache-no-cache.any.js": { + "note": "undici doesn't implement an http cache", + "skip": true + }, + "request-cache-no-store.any.js": { + "note": "undici doesn't implement an http cache", + "skip": true + }, + "request-cache-only-if-cached.any.js": { + "note": "undici doesn't implement an http cache", + "skip": true + }, + "request-cache-reload.any.js": { + "note": "undici doesn't implement an http cache", + "skip": true + }, + "request-consume-empty.any.js": { + "note": "the semantics about this test are being discussed - https://github.com/web-platform-tests/wpt/pull/3950", + "fail": [ + "Consume empty FormData request body as text" + ] + }, + "request-disturbed.any.js": { + "note": "this test fails in all other platforms - https://wpt.fyi/results/fetch/api/request/request-disturbed.any.html?label=master&label=experimental&product=chrome&product=firefox&product=safari&product=deno&aligned&view=subtest", + "fail": [ + "Input request used for creating new request became disturbed even if body is not used" + ] + }, + "request-headers.any.js": { + "note": "undici doesn't filter headers", + "fail": [ + "Adding invalid request header \"Accept-Charset: KO\"", + "Adding invalid request header \"accept-charset: KO\"", + "Adding invalid request header \"ACCEPT-ENCODING: KO\"", + "Adding invalid request header \"Accept-Encoding: KO\"", + "Adding invalid request header \"Access-Control-Request-Headers: KO\"", + "Adding invalid request header \"Access-Control-Request-Method: KO\"", + "Adding invalid request header \"Access-Control-Request-Private-Network: KO\"", + "Adding invalid request header \"Connection: KO\"", + "Adding invalid request header \"Content-Length: KO\"", + "Adding invalid request header \"Cookie: KO\"", + "Adding invalid request header \"Cookie2: KO\"", + "Adding invalid request header \"Date: KO\"", + "Adding invalid request header \"DNT: KO\"", + "Adding invalid request header \"Expect: KO\"", + "Adding invalid request header \"Host: KO\"", + "Adding invalid request header \"Keep-Alive: KO\"", + "Adding invalid request header \"Origin: KO\"", + "Adding invalid request header \"Referer: KO\"", + "Adding invalid request header \"Set-Cookie: KO\"", + "Adding invalid request header \"TE: KO\"", + "Adding invalid request header \"Trailer: KO\"", + "Adding invalid request header \"Transfer-Encoding: KO\"", + "Adding invalid request header \"Upgrade: KO\"", + "Adding invalid request header \"Via: KO\"", + "Adding invalid request header \"Proxy-: KO\"", + "Adding invalid request header \"proxy-a: KO\"", + "Adding invalid request header \"Sec-: KO\"", + "Adding invalid request header \"sec-b: KO\"", + "Adding invalid no-cors request header \"Content-Type: KO\"", + "Adding invalid no-cors request header \"Potato: KO\"", + "Adding invalid no-cors request header \"proxy: KO\"", + "Adding invalid no-cors request header \"proxya: KO\"", + "Adding invalid no-cors request header \"sec: KO\"", + "Adding invalid no-cors request header \"secb: KO\"", + "Adding invalid no-cors request header \"Empty-Value: \"", + "Check that request constructor is filtering headers provided as init parameter", + "Check that no-cors request constructor is filtering headers provided as init parameter", + "Check that no-cors request constructor is filtering headers provided as part of request parameter" + ] + }, + "request-init-priority.any.js": { + "note": "undici doesn't implement priority hints, yet(?)", + "skip": true + } + }, + "response": { + "response-clone.any.js": { + "fail": [ + "Check response clone use structureClone for teed ReadableStreams (ArrayBufferchunk)", + "Check response clone use structureClone for teed ReadableStreams (DataViewchunk)" + ] + }, + "response-consume-empty.any.js": { + "fail": [ + "Consume empty FormData response body as text" + ] + }, + "response-consume-stream.any.js": { + "fail": [ + "Read blob response's body as readableStream with mode=byob", + "Read text response's body as readableStream with mode=byob", + "Read URLSearchParams response's body as readableStream with mode=byob", + "Read array buffer response's body as readableStream with mode=byob", + "Read form data response's body as readableStream with mode=byob" + ] + }, + "response-error-from-stream.any.js": { + "fail": [ + "ReadableStream start() Error propagates to Response.formData() Promise", + "ReadableStream pull() Error propagates to Response.formData() Promise" + ] + }, + "response-stream-with-broken-then.any.js": { + "note": "this is a bug in webstreams, see https://github.com/nodejs/node/issues/46786", + "skip": true + } + } + }, + "content-length": { + "api-and-duplicate-headers.any.js": { + "fail": [ + "XMLHttpRequest and duplicate Content-Length/Content-Type headers", + "fetch() and duplicate Content-Length/Content-Type headers" + ] + } + }, + "cross-origin-resource-policy": { + "note": "undici doesn't implement CORs", + "skip": true + }, + "http-cache": { + "note": "undici doesn't implement http caching", + "skip": true + }, + "metadata": { + "note": "undici doesn't respect RequestInit.mode", + "skip": true + }, + "orb": { + "tentative": { + "note": "undici doesn't implement orb", + "skip": true + } + }, + "range": { + "note": "undici doesn't respect range header", + "skip": true + }, + "security": { + "1xx-response.any.js": { + "fail": [ + "Status(100) should be ignored.", + "Status(101) should be accepted, with removing body.", + "Status(103) should be ignored.", + "Status(199) should be ignored." + ] + } + }, + "stale-while-revalidate": { + "note": "undici doesn't implement http caching", + "skip": true + }, + "idlharness.any.js": { + "flaky": [ + "Window interface: operation fetch(RequestInfo, optional RequestInit)" + ] + } +} diff --git a/test/wpt/status/mimesniff.status.json b/test/wpt/status/mimesniff.status.json new file mode 100644 index 0000000..ab9a3d3 --- /dev/null +++ b/test/wpt/status/mimesniff.status.json @@ -0,0 +1,7 @@ +{ + "mime-types": { + "parsing.any.js": { + "allowUnexpectedFailures": true + } + } +} diff --git a/test/wpt/status/service-workers/cache-storage.status.json b/test/wpt/status/service-workers/cache-storage.status.json new file mode 100644 index 0000000..09a291e --- /dev/null +++ b/test/wpt/status/service-workers/cache-storage.status.json @@ -0,0 +1,24 @@ +{ + "cache-storage": { + "cache-abort.https.any.js": { + "skip": true + }, + "cache-storage-buckets.https.any.js": { + "skip": true, + "note": "navigator is not defined" + }, + "cache-put.https.any.js": { + "note": "probably can be fixed", + "fail": [ + "Cache.put with a VARY:* opaque response should not reject", + "Cache.put with opaque-filtered HTTP 206 response" + ] + }, + "cache-match.https.any.js": { + "note": "requires https server", + "fail": [ + "cors-exposed header should be stored correctly." + ] + } + } +} diff --git a/test/wpt/status/websockets.status.json b/test/wpt/status/websockets.status.json new file mode 100644 index 0000000..68bc6e2 --- /dev/null +++ b/test/wpt/status/websockets.status.json @@ -0,0 +1,115 @@ +{ + "stream": { + "tentative": { + "skip": true + } + }, + "Create-blocked-port.any.js": { + "note": "TODO(@KhafraDev): investigate failure", + "fail": [ + "Basic check" + ] + }, + "Send-binary-arraybufferview-float32.any.js": { + "note": "TODO(@KhafraDev): investigate failure", + "fail": [ + "Send binary data on a WebSocket - ArrayBufferView - Float32Array - Connection should be closed" + ] + }, + "Send-binary-arraybufferview-float64.any.js": { + "note": "TODO(@KhafraDev): investigate failure", + "fail": [ + "Send binary data on a WebSocket - ArrayBufferView - Float64Array - Connection should be closed" + ] + }, + "Send-binary-arraybufferview-int16-offset.any.js": { + "note": "TODO(@KhafraDev): investigate failure", + "fail": [ + "Send binary data on a WebSocket - ArrayBufferView - Int16Array with offset - Connection should be closed" + ] + }, + "Send-binary-arraybufferview-int32.any.js": { + "note": "TODO(@KhafraDev): investigate failure", + "fail": [ + "Send binary data on a WebSocket - ArrayBufferView - Int32Array - Connection should be closed" + ] + }, + "Send-binary-arraybufferview-uint16-offset-length.any.js": { + "note": "TODO(@KhafraDev): investigate failure", + "fail": [ + "Send binary data on a WebSocket - ArrayBufferView - Uint16Array with offset and length - Connection should be closed" + ] + }, + "Send-binary-arraybufferview-uint32-offset.any.js": { + "note": "TODO(@KhafraDev): investigate failure", + "fail": [ + "Send binary data on a WebSocket - ArrayBufferView - Uint32Array with offset - Connection should be closed" + ] + }, + "basic-auth.any.js": { + "note": "TODO(@KhafraDev): investigate failure", + "fail": [ + "HTTP basic authentication should work with WebSockets" + ] + }, + "Create-on-worker-shutdown.any.js": { + "skip": true, + "//": "Node.js workers are different from web workers & don't work with blob: urls" + }, + "Close-delayed.any.js": { + "skip": true + }, + "bufferedAmount-unchanged-by-sync-xhr.any.js": { + "skip": true, + "//": "Node.js doesn't have XMLHttpRequest nor does this test make sense regardless" + }, + "referrer.any.js": { + "skip": true + }, + "Send-binary-blob.any.js": { + "flaky": [ + "Send binary data on a WebSocket - Blob - Connection should be closed" + ] + }, + "Send-65K-data.any.js": { + "flaky": [ + "Send 65K data on a WebSocket - Connection should be closed" + ] + }, + "Send-binary-65K-arraybuffer.any.js": { + "flaky": [ + "Send 65K binary data on a WebSocket - ArrayBuffer - Connection should be closed" + ] + }, + "Send-0byte-data.any.js": { + "flaky": [ + "Send 0 byte data on a WebSocket - Connection should be closed" + ] + }, + "send-many-64K-messages-with-backpressure.any.js": { + "note": "probably flaky based on other flaky tests.", + "flaky": [ + "sending 50 messages of size 65536 with backpressure applied should not hang" + ] + }, + "back-forward-cache-with-closed-websocket-connection-ccns.tentative.window.js": { + "skip": true, + "note": "browser-only test" + }, + "back-forward-cache-with-closed-websocket-connection.window.js": { + "skip": true, + "note": "browser-only test" + }, + "back-forward-cache-with-open-websocket-connection-ccns.tentative.window.js": { + "skip": true, + "note": "browser-only test" + }, + "back-forward-cache-with-open-websocket-connection.window.js": { + "skip": true, + "note": "browser-only test" + }, + "mixed-content.https.any.js": { + "note": "node has no concept of origin, thus there is no 'secure' or 'insecure' contexts", + "skip": true + } +} diff --git a/test/wpt/status/xhr/formdata.status.json b/test/wpt/status/xhr/formdata.status.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/test/wpt/status/xhr/formdata.status.json @@ -0,0 +1 @@ +{} diff --git a/test/wpt/tests/.azure-pipelines.yml b/test/wpt/tests/.azure-pipelines.yml new file mode 100644 index 0000000..75a87df --- /dev/null +++ b/test/wpt/tests/.azure-pipelines.yml @@ -0,0 +1,595 @@ +# This is the configuration file for Azure Pipelines, used to run tests on +# macOS and Windows. Documentation to help understand this setup: +# https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema +# https://docs.microsoft.com/en-us/azure/devops/pipelines/build/triggers +# https://docs.microsoft.com/en-us/azure/devops/pipelines/process/multiple-phases +# https://docs.microsoft.com/en-us/azure/devops/pipelines/process/templates +# https://docs.microsoft.com/en-us/azure/devops/pipelines/process/variables +# https://docs.microsoft.com/en-us/azure/devops/pipelines/tasks/index +# +# In addition to this configuration file, some setup in the Azure DevOps +# project is required: +# - The "Build pull requests from forks of this repository" setting must be +# enabled: https://docs.microsoft.com/en-us/azure/devops/pipelines/repos/github#validate-contributions-from-forks + +trigger: +# These are all the branches referenced in the jobs that follow. +- epochs/daily +- epochs/three_hourly +- triggers/edge_stable +- triggers/edge_dev +- triggers/edge_canary +- triggers/safari_stable +- triggers/safari_preview +- triggers/wktr_preview + +# Set safaridriver_diagnose to true to enable safaridriver diagnostics. The +# logs won't appear in `./wpt run` output but will be uploaded as an artifact. +variables: + safaridriver_diagnose: false + +jobs: +# The affected tests jobs are unconditional for speed, as most PRs have one or +# more affected tests: https://github.com/web-platform-tests/wpt/issues/13936. +- job: affected_safari_preview + displayName: 'affected tests: Safari Technology Preview' + condition: eq(variables['Build.Reason'], 'PullRequest') + pool: + vmImage: 'macOS-13' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/affected_tests.yml + parameters: + artifactName: 'safari-preview-affected-tests' +- template: tools/ci/azure/fyi_hook.yml + parameters: + dependsOn: affected_safari_preview + artifactName: safari-preview-affected-tests + +- job: affected_without_changes_safari_preview + displayName: 'affected tests without changes: Safari Technology Preview' + condition: eq(variables['Build.Reason'], 'PullRequest') + pool: + vmImage: 'macOS-13' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/affected_tests.yml + parameters: + checkoutCommit: 'HEAD^1' + affectedRange: 'HEAD@{1}' + artifactName: 'safari-preview-affected-tests-without-changes' +- template: tools/ci/azure/fyi_hook.yml + parameters: + dependsOn: affected_without_changes_safari_preview + artifactName: safari-preview-affected-tests-without-changes + +# The decision jobs runs `./wpt test-jobs` to determine which jobs to run, +# and all following jobs wait for it to finish and depend on its output. +- job: decision + displayName: './wpt test-jobs' + condition: eq(variables['Build.Reason'], 'PullRequest') + pool: + vmImage: 'ubuntu-20.04' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/checkout.yml + - script: | + set -eux -o pipefail + git fetch --depth 50 --quiet origin master + ./wpt test-jobs | while read job; do + echo "$job" + echo "##vso[task.setvariable variable=$job;isOutput=true]true"; + done + name: test_jobs + displayName: 'Run ./wpt test-jobs' + +- job: infrastructure_mac + displayName: 'infrastructure/ tests: macOS' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.wptrunner_infrastructure'] + pool: + vmImage: 'macOS-13' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/pip_install.yml + parameters: + packages: virtualenv + - template: tools/ci/azure/install_fonts.yml + - template: tools/ci/azure/install_certs.yml + - template: tools/ci/azure/color_profile.yml + - template: tools/ci/azure/install_chrome.yml + - template: tools/ci/azure/install_firefox.yml + - template: tools/ci/azure/install_safari.yml + - template: tools/ci/azure/update_hosts.yml + - template: tools/ci/azure/update_manifest.yml + - script: | + set -eux -o pipefail + ./wpt run --yes --no-manifest-update --manifest MANIFEST.json --metadata infrastructure/metadata/ --log-mach - --log-mach-level info --log-wptreport $(Build.ArtifactStagingDirectory)/wpt_report_macos_chrome.json --channel dev chrome infrastructure/ + condition: succeededOrFailed() + displayName: 'Run tests (Chrome Dev)' + - script: | + set -eux -o pipefail + ./wpt run --yes --no-manifest-update --manifest MANIFEST.json --metadata infrastructure/metadata/ --log-mach - --log-mach-level info --log-wptreport $(Build.ArtifactStagingDirectory)/wpt_report_macos_firefox.json --channel nightly firefox infrastructure/ + condition: succeededOrFailed() + displayName: 'Run tests (Firefox Nightly)' + - script: | + set -eux -o pipefail + export SYSTEM_VERSION_COMPAT=0 + ./wpt run --yes --no-manifest-update --manifest MANIFEST.json --metadata infrastructure/metadata/ --log-mach - --log-mach-level info --log-wptreport $(Build.ArtifactStagingDirectory)/wpt_report_macos_safari.json --channel preview safari infrastructure/ + condition: succeededOrFailed() + displayName: 'Run tests (Safari Technology Preview)' + - task: PublishBuildArtifacts@1 + condition: succeededOrFailed() + displayName: 'Publish results' + inputs: + artifactName: 'infrastructure-results' + - template: tools/ci/azure/publish_logs.yml + - template: tools/ci/azure/sysdiagnose.yml + +- job: tools_unittest_mac_py37 + displayName: 'tools/ unittests: macOS + Python 3.7' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.tools_unittest'] + pool: + vmImage: 'macOS-13' + steps: + - task: UsePythonVersion@0 + inputs: + # TODO(#40525): Revert back to 3.7 once the Mac agent's Python v3.7 contains bz2 again. + versionSpec: '3.7.16' + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/ + toxenv: py37 + +- job: tools_unittest_mac_py311 + displayName: 'tools/ unittests: macOS + Python 3.11' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.tools_unittest'] + pool: + vmImage: 'macOS-13' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/ + toxenv: py311 + +- job: wptrunner_unittest_mac_py37 + displayName: 'tools/wptrunner/ unittests: macOS + Python 3.7' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.wptrunner_unittest'] + pool: + vmImage: 'macOS-13' + steps: + - task: UsePythonVersion@0 + inputs: + # TODO(#40525): Revert back to 3.7 once the Mac agent's Python v3.7 contains bz2 again. + versionSpec: '3.7.16' + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/wptrunner/ + toxenv: py37 + +- job: wptrunner_unittest_mac_py311 + displayName: 'tools/wptrunner/ unittests: macOS + Python 3.11' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.wptrunner_unittest'] + pool: + vmImage: 'macOS-13' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/wptrunner/ + toxenv: py311 + +- job: wpt_integration_mac_py37 + displayName: 'tools/wpt/ tests: macOS + Python 3.7' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.wpt_integration'] + pool: + vmImage: 'macOS-13' + steps: + # full checkout required + - task: UsePythonVersion@0 + inputs: + # TODO(#40525): Revert back to 3.7 once the Mac agent's Python v3.7 contains bz2 again. + versionSpec: '3.7.16' + - template: tools/ci/azure/install_chrome.yml + - template: tools/ci/azure/install_firefox.yml + - template: tools/ci/azure/update_hosts.yml + - template: tools/ci/azure/update_manifest.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/wpt/ + toxenv: py37 + +- job: wpt_integration_mac_py311 + displayName: 'tools/wpt/ tests: macOS + Python 3.11' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.wpt_integration'] + pool: + vmImage: 'macOS-13' + steps: + # full checkout required + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/install_chrome.yml + - template: tools/ci/azure/install_firefox.yml + - template: tools/ci/azure/update_hosts.yml + - template: tools/ci/azure/update_manifest.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/wpt/ + toxenv: py311 + +- job: tools_unittest_win_py37 + displayName: 'tools/ unittests: Windows + Python 3.7' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.tools_unittest'] + pool: + vmImage: 'windows-2019' + variables: + HYPOTHESIS_PROFILE: ci + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.7' + addToPath: false + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/ + toxenv: py37 + +- job: tools_unittest_win_py311 + displayName: 'tools/ unittests: Windows + Python 3.11' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.tools_unittest'] + pool: + vmImage: 'windows-2019' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + addToPath: false + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/ + toxenv: py311 + +- job: wptrunner_unittest_win_py37 + displayName: 'tools/wptrunner/ unittests: Windows + Python 3.7' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.wptrunner_unittest'] + pool: + vmImage: 'windows-2019' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.7' + addToPath: false + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/wptrunner/ + toxenv: py37 + +- job: wptrunner_unittest_win_py311 + displayName: 'tools/wptrunner/ unittests: Windows + Python 3.11' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.wptrunner_unittest'] + pool: + vmImage: 'windows-2019' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + addToPath: false + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/wptrunner/ + toxenv: py311 + +- job: wpt_integration_win_py37 + displayName: 'tools/wpt/ tests: Windows + Python 3.7' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.wpt_integration'] + pool: + vmImage: 'windows-2019' + steps: + # full checkout required + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.7' + # currently just using the outdated Chrome/Firefox on the VM rather than + # figuring out how to install Chrome Dev channel on Windows + # - template: tools/ci/azure/install_chrome.yml + # - template: tools/ci/azure/install_firefox.yml + - template: tools/ci/azure/update_hosts.yml + - template: tools/ci/azure/update_manifest.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/wpt/ + toxenv: py37 + +- job: wpt_integration_win_py311 + displayName: 'tools/wpt/ tests: Windows + Python 3.11' + dependsOn: decision + condition: dependencies.decision.outputs['test_jobs.wpt_integration'] + pool: + vmImage: 'windows-2019' + steps: + # full checkout required + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + # currently just using the outdated Chrome/Firefox on the VM rather than + # figuring out how to install Chrome Dev channel on Windows + # - template: tools/ci/azure/install_chrome.yml + # - template: tools/ci/azure/install_firefox.yml + - template: tools/ci/azure/update_hosts.yml + - template: tools/ci/azure/update_manifest.yml + - template: tools/ci/azure/tox_pytest.yml + parameters: + directory: tools/wpt/ + toxenv: py311 + +- job: results_edge_stable + displayName: 'all tests: Edge Stable' + condition: | + or(eq(variables['Build.SourceBranch'], 'refs/heads/epochs/daily'), + eq(variables['Build.SourceBranch'], 'refs/heads/triggers/edge_stable'), + and(eq(variables['Build.Reason'], 'Manual'), variables['run_all_edge_stable'])) + strategy: + parallel: 8 # chosen to make runtime ~2h + timeoutInMinutes: 180 + pool: + vmImage: 'windows-2019' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/system_info.yml + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/pip_install.yml + parameters: + packages: virtualenv + - template: tools/ci/azure/install_certs.yml + - template: tools/ci/azure/install_edge.yml + parameters: + channel: stable + - template: tools/ci/azure/update_hosts.yml + - template: tools/ci/azure/update_manifest.yml + - script: python ./wpt run --yes --no-manifest-update --no-restart-on-unexpected --no-fail-on-unexpected --install-fonts --this-chunk $(System.JobPositionInPhase) --total-chunks $(System.TotalJobsInPhase) --chunk-type hash --log-wptreport $(Build.ArtifactStagingDirectory)/wpt_report_$(System.JobPositionInPhase).json --log-wptscreenshot $(Build.ArtifactStagingDirectory)/wpt_screenshot_$(System.JobPositionInPhase).txt --log-mach - --log-mach-level info --channel stable edgechromium + displayName: 'Run tests (Edge Stable)' + - task: PublishBuildArtifacts@1 + displayName: 'Publish results' + inputs: + artifactName: 'edge-stable-results' + - template: tools/ci/azure/publish_logs.yml + - template: tools/ci/azure/sysdiagnose.yml +- template: tools/ci/azure/fyi_hook.yml + parameters: + dependsOn: results_edge_stable + artifactName: edge-stable-results + +- job: results_edge_dev + displayName: 'all tests: Edge Dev' + condition: | + or(eq(variables['Build.SourceBranch'], 'refs/heads/epochs/three_hourly'), + eq(variables['Build.SourceBranch'], 'refs/heads/triggers/edge_dev'), + and(eq(variables['Build.Reason'], 'Manual'), variables['run_all_edge_dev'])) + strategy: + parallel: 8 # chosen to make runtime ~2h + timeoutInMinutes: 180 + pool: + vmImage: 'windows-2019' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/system_info.yml + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/pip_install.yml + parameters: + packages: virtualenv + - template: tools/ci/azure/install_certs.yml + - template: tools/ci/azure/install_edge.yml + parameters: + channel: dev + - template: tools/ci/azure/update_hosts.yml + - template: tools/ci/azure/update_manifest.yml + - script: python ./wpt run --yes --no-manifest-update --no-restart-on-unexpected --no-fail-on-unexpected --install-fonts --this-chunk $(System.JobPositionInPhase) --total-chunks $(System.TotalJobsInPhase) --chunk-type hash --log-wptreport $(Build.ArtifactStagingDirectory)/wpt_report_$(System.JobPositionInPhase).json --log-wptscreenshot $(Build.ArtifactStagingDirectory)/wpt_screenshot_$(System.JobPositionInPhase).txt --log-mach - --log-mach-level info --channel dev edgechromium + displayName: 'Run tests (Edge Dev)' + - task: PublishBuildArtifacts@1 + displayName: 'Publish results' + inputs: + artifactName: 'edge-dev-results' + - template: tools/ci/azure/publish_logs.yml + - template: tools/ci/azure/sysdiagnose.yml +- template: tools/ci/azure/fyi_hook.yml + parameters: + dependsOn: results_edge_dev + artifactName: edge-dev-results + +- job: results_edge_canary + displayName: 'all tests: Edge Canary' + condition: | + or(eq(variables['Build.SourceBranch'], 'refs/heads/epochs/weekly'), + eq(variables['Build.SourceBranch'], 'refs/heads/triggers/edge_canary'), + and(eq(variables['Build.Reason'], 'Manual'), variables['run_all_edge_canary'])) + strategy: + parallel: 8 # chosen to make runtime ~2h + timeoutInMinutes: 180 + pool: + vmImage: 'windows-2019' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/pip_install.yml + parameters: + packages: virtualenv + - template: tools/ci/azure/install_certs.yml + - template: tools/ci/azure/install_edge.yml + parameters: + channel: canary + - template: tools/ci/azure/update_hosts.yml + - template: tools/ci/azure/update_manifest.yml + - script: python ./wpt run --yes --no-manifest-update --no-restart-on-unexpected --no-fail-on-unexpected --install-fonts --this-chunk $(System.JobPositionInPhase) --total-chunks $(System.TotalJobsInPhase) --chunk-type hash --log-wptreport $(Build.ArtifactStagingDirectory)/wpt_report_$(System.JobPositionInPhase).json --log-wptscreenshot $(Build.ArtifactStagingDirectory)/wpt_screenshot_$(System.JobPositionInPhase).txt --log-mach - --log-mach-level info --channel canary edgechromium + displayName: 'Run tests (Edge Canary)' + - task: PublishBuildArtifacts@1 + displayName: 'Publish results' + inputs: + artifactName: 'edge-canary-results' + - template: tools/ci/azure/publish_logs.yml + - template: tools/ci/azure/sysdiagnose.yml +- template: tools/ci/azure/fyi_hook.yml + parameters: + dependsOn: results_edge_canary + artifactName: edge-canary-results + +- job: results_safari + displayName: 'all tests: Safari' + condition: | + or(eq(variables['Build.SourceBranch'], 'refs/heads/epochs/daily'), + eq(variables['Build.SourceBranch'], 'refs/heads/triggers/safari_stable'), + and(eq(variables['Build.Reason'], 'Manual'), variables['run_all_safari'])) + strategy: + parallel: 8 # chosen to make runtime ~2h + timeoutInMinutes: 180 + pool: + vmImage: 'macOS-13' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/pip_install.yml + parameters: + packages: virtualenv + - template: tools/ci/azure/install_certs.yml + - template: tools/ci/azure/color_profile.yml + - template: tools/ci/azure/install_safari.yml + parameters: + channel: stable + - template: tools/ci/azure/update_hosts.yml + - template: tools/ci/azure/update_manifest.yml + - script: | + set -eux -o pipefail + export SYSTEM_VERSION_COMPAT=0 + ./wpt run --no-manifest-update --no-restart-on-unexpected --no-fail-on-unexpected --this-chunk=$(System.JobPositionInPhase) --total-chunks=$(System.TotalJobsInPhase) --chunk-type hash --log-wptreport $(Build.ArtifactStagingDirectory)/wpt_report_$(System.JobPositionInPhase).json --log-wptscreenshot $(Build.ArtifactStagingDirectory)/wpt_screenshot_$(System.JobPositionInPhase).txt --log-mach - --log-mach-level info --channel stable --kill-safari --max-restarts 100 safari + displayName: 'Run tests' + retryCountOnTaskFailure: 2 + - task: PublishBuildArtifacts@1 + displayName: 'Publish results' + inputs: + artifactName: 'safari-results' + - template: tools/ci/azure/publish_logs.yml + - template: tools/ci/azure/sysdiagnose.yml +- template: tools/ci/azure/fyi_hook.yml + parameters: + dependsOn: results_safari + artifactName: safari-results + +- job: results_safari_preview + displayName: 'all tests: Safari Technology Preview' + condition: | + or(eq(variables['Build.SourceBranch'], 'refs/heads/epochs/three_hourly'), + eq(variables['Build.SourceBranch'], 'refs/heads/triggers/safari_preview'), + and(eq(variables['Build.Reason'], 'Manual'), variables['run_all_safari_preview'])) + strategy: + parallel: 8 # chosen to make runtime ~2h + timeoutInMinutes: 180 + pool: + vmImage: 'macOS-13' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/pip_install.yml + parameters: + packages: virtualenv + - template: tools/ci/azure/install_certs.yml + - template: tools/ci/azure/color_profile.yml + - template: tools/ci/azure/install_safari.yml + - template: tools/ci/azure/update_hosts.yml + - template: tools/ci/azure/update_manifest.yml + - script: | + set -eux -o pipefail + export SYSTEM_VERSION_COMPAT=0 + ./wpt run --no-manifest-update --no-restart-on-unexpected --no-fail-on-unexpected --this-chunk=$(System.JobPositionInPhase) --total-chunks=$(System.TotalJobsInPhase) --chunk-type hash --log-wptreport $(Build.ArtifactStagingDirectory)/wpt_report_$(System.JobPositionInPhase).json --log-wptscreenshot $(Build.ArtifactStagingDirectory)/wpt_screenshot_$(System.JobPositionInPhase).txt --log-mach - --log-mach-level info --channel preview --kill-safari --max-restarts 100 safari + displayName: 'Run tests' + retryCountOnTaskFailure: 2 + - task: PublishBuildArtifacts@1 + displayName: 'Publish results' + inputs: + artifactName: 'safari-preview-results' + - template: tools/ci/azure/publish_logs.yml + - template: tools/ci/azure/sysdiagnose.yml +- template: tools/ci/azure/fyi_hook.yml + parameters: + dependsOn: results_safari_preview + artifactName: safari-preview-results + +- job: results_wktr_preview + displayName: 'all tests: WebKitTestRunner' + condition: | + or(eq(variables['Build.SourceBranch'], 'refs/heads/triggers/wktr_preview'), + and(eq(variables['Build.Reason'], 'Manual'), variables['run_all_wktr_preview'])) + strategy: + parallel: 8 # chosen to make runtime ~2h + timeoutInMinutes: 180 + pool: + vmImage: 'macOS-13' + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.11' + - template: tools/ci/azure/checkout.yml + - template: tools/ci/azure/pip_install.yml + parameters: + packages: virtualenv + - template: tools/ci/azure/install_certs.yml + - template: tools/ci/azure/color_profile.yml + - template: tools/ci/azure/update_hosts.yml + - template: tools/ci/azure/update_manifest.yml + - script: | + set -eux -o pipefail + export SYSTEM_VERSION_COMPAT=0 + ./wpt run --no-manifest-update --no-restart-on-unexpected --no-fail-on-unexpected --this-chunk=$(System.JobPositionInPhase) --total-chunks=$(System.TotalJobsInPhase) --chunk-type hash --log-wptreport $(Build.ArtifactStagingDirectory)/wpt_report_$(System.JobPositionInPhase).json --log-wptscreenshot $(Build.ArtifactStagingDirectory)/wpt_screenshot_$(System.JobPositionInPhase).txt --log-mach - --log-mach-level info --channel experimental --install-browser --yes wktr + displayName: 'Run tests' + - task: PublishBuildArtifacts@1 + displayName: 'Publish results' + inputs: + artifactName: 'wktr-preview-results' + - template: tools/ci/azure/publish_logs.yml + - template: tools/ci/azure/sysdiagnose.yml +- template: tools/ci/azure/fyi_hook.yml + parameters: + dependsOn: results_wktr_preview + artifactName: wktr-preview-results diff --git a/test/wpt/tests/.gitattributes b/test/wpt/tests/.gitattributes new file mode 100644 index 0000000..5c11e4e --- /dev/null +++ b/test/wpt/tests/.gitattributes @@ -0,0 +1 @@ +* -text diff --git a/test/wpt/tests/.gitignore b/test/wpt/tests/.gitignore new file mode 100644 index 0000000..061700a --- /dev/null +++ b/test/wpt/tests/.gitignore @@ -0,0 +1,52 @@ +# Python +*.py[co] +.cache/ +.coverage* +.mypy_cache/ +.pytest_cache/ +.tox/ +.virtualenv/ +_venv*/ +_virtualenv/ + +# Node +node_modules/ + +# WPT repo stuff +.wptcache/ +/MANIFEST.json +/_certs +/config.json + +# Files generated when regenerating pre-generated certs +/tools/certs/0*.pem +/tools/certs/index.txt* +/tools/certs/serial* + +# Various OS/editor specific files +*# +*.orig +*.rej +*.svn +*.sw[po] +*.xcodeproj +*Thumbs.db +*~ +.DS_Store +.directory* +.idea/ +.vscode/ +\#* +scratch + +# Testsuite-specific rules +/conformance-checkers/vnu.jar +/cors/resources/log.txt +/css/build-temp +/css/dist +/css/dist_last +/url/tools/IdnaTestV2.txt +/webaudio/idl/* + +# w3c-test.org PR-branch mirroring +/submissions/ diff --git a/test/wpt/tests/.mailmap b/test/wpt/tests/.mailmap new file mode 100644 index 0000000..5293948 --- /dev/null +++ b/test/wpt/tests/.mailmap @@ -0,0 +1,9 @@ +# People who've changed name: + +# Sam Sneddon: +Sam Sneddon +Sam Sneddon + +# Theresa O'Connor: +Theresa O'Connor +Theresa O'Connor diff --git a/test/wpt/tests/.taskcluster.yml b/test/wpt/tests/.taskcluster.yml new file mode 100644 index 0000000..c817999 --- /dev/null +++ b/test/wpt/tests/.taskcluster.yml @@ -0,0 +1,82 @@ +version: 1 +reporting: checks-v1 +policy: + pullRequests: public +tasks: + $let: + run_task: + $if: 'tasks_for == "github-push"' + then: + $if: 'event.ref in ["refs/heads/master", "refs/heads/epochs/daily", "refs/heads/epochs/weekly", "refs/heads/triggers/chrome_stable", "refs/heads/triggers/chrome_beta", "refs/heads/triggers/chrome_dev", "refs/heads/triggers/chrome_nightly", "refs/heads/triggers/firefox_stable", "refs/heads/triggers/firefox_beta", "refs/heads/triggers/firefox_nightly", "refs/heads/triggers/webkitgtk_minibrowser_stable", "refs/heads/triggers/webkitgtk_minibrowser_beta", "refs/heads/triggers/webkitgtk_minibrowser_nightly", "refs/heads/triggers/servo_nightly"]' + then: true + else: false + else: + $if: 'tasks_for == "github-pull-request"' + then: + $if: 'event.action in ["opened", "reopened", "synchronize"]' + then: true + else: false + else: false + in: + - $if: run_task + then: + $let: + event_str: {$json: {$eval: event}} + scopes: + $if: 'tasks_for == "github-push"' + then: + $let: + branch: + $if: "event.ref[:11] == 'refs/heads/'" + then: "${event.ref[11:]}" + else: "${event.ref}" + in: "assume:repo:github.com/${event.repository.full_name}:branch:${branch}" + else: "assume:repo:github.com/${event.repository.full_name}:pull-request" + rev: + $if: 'tasks_for == "github-pull-request"' + then: "refs/pull/${event.number}/merge" + else: "${event.after}" + owner: + $if: 'tasks_for == "github-push"' + then: + $if: 'event.pusher.email' + then: + $if: '"@" in event.pusher.email' + then: ${event.pusher.email} + else: web-platform-tests@users.noreply.github.com + else: web-platform-tests@users.noreply.github.com + else: web-platform-tests@users.noreply.github.com + in: + created: {$fromNow: ''} + deadline: {$fromNow: '24 hours'} + provisionerId: proj-wpt + workerType: ci + metadata: + name: "wpt-decision-task" + description: "The task that creates all of the other tasks in the task graph" + owner: ${owner} + source: ${event.repository.clone_url} + payload: + image: webplatformtests/wpt:0.54 + maxRunTime: 7200 + artifacts: + public/results: + path: /home/test/artifacts + type: directory + command: + - /bin/bash + - --login + - -c + - set -ex; + ~/start.sh + ${event.repository.clone_url} + ${rev}; + cd ~/web-platform-tests; + ./wpt tc-decision --tasks-path=/home/test/artifacts/tasks.json + features : + taskclusterProxy: true + scopes: + - ${scopes} + extra: + github_event: "${event_str}" + diff --git a/test/wpt/tests/CODEOWNERS b/test/wpt/tests/CODEOWNERS new file mode 100644 index 0000000..140e0c6 --- /dev/null +++ b/test/wpt/tests/CODEOWNERS @@ -0,0 +1,6 @@ +# Require review for changes that often need an RFC +/resources/testdriver* @web-platform-tests/wpt-core-team +/resources/testharness* @web-platform-tests/wpt-core-team + +# Prevent accidentally touching tools/third_party +/tools/third_party/ @web-platform-tests/wpt-core-team diff --git a/test/wpt/tests/CODE_OF_CONDUCT.md b/test/wpt/tests/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..dae98ee --- /dev/null +++ b/test/wpt/tests/CODE_OF_CONDUCT.md @@ -0,0 +1,138 @@ +# Code of Conduct + +Contact: a moderator ([see below](#moderators)), or a member of the WPT +community that you feel you can trust. + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, sexual identity and +orientation, or any other dimension of diversity. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Moderators are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Moderators have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +Moderators are held to a higher standard than other community members. If a +moderator creates an inappropriate situation, they should expect less leeway +than others. + +## Scope + +This Code of Conduct applies within all community spaces, and also +applies when an individual is officially representing the community in +public spaces. Examples of representing our community include +the official Matrix channel (wpt:matrix.org); GitHub repositories under +the web-platform-tests organization; and the public-test-infra@w3.org +mailing list. + +There may arise situations where both the WPT code of conduct and that of +another organization (such as the WHATWG or W3C) may apply. +For example, a WPT-focused meeting at +[TPAC](https://www.w3.org/2002/09/TPOverview.html) would involve both the WPT +code of conduct and the [W3C code of ethics and professional +conduct](https://www.w3.org/Consortium/cepc/). +In such situations we operate under all code of conducts involved. +If you are placed in a situation where you feel this is inappropriate (e.g. if +you believe the code of conducts involved contradict one another), please +contact a [moderator](#moderators). + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the [moderators](#moderators). +All complaints will be reviewed and investigated promptly and fairly. + +All moderators are obligated to respect the privacy and security of the +reporter of any incident. + +Moderators will recuse themselves if they are directly involved in a report of +a code of conduct violation. + +## Enforcement Guidelines + +Moderators will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct. +These are not a consecutive set of steps; a single incident may be sufficient +enough to proceed straight to a temporary ban, for example. + +### 1. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 3. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Moderators + +This section lists the current moderators and how to reach them. + +* Nina Satragno - [nso@google.com](mailto:nso@google.com). Languages: English, Spanish. +* Boaz Sender - [boaz@bocoup.com](mailto:boaz@bocoup.com). Languages: English, Hebrew. +* Jory Burson - [jory@bocoup.education](mailto:jory@bocoup.education). Languages: English (fluent), Spanish (conversational). + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. diff --git a/test/wpt/tests/CONTRIBUTING.md b/test/wpt/tests/CONTRIBUTING.md new file mode 100644 index 0000000..cb80d8d --- /dev/null +++ b/test/wpt/tests/CONTRIBUTING.md @@ -0,0 +1,11 @@ +All contributions are licensed under the terms of the [3-Clause BSD License](LICENSE.md). + +Documentation +------------- + +See [web-platform-tests.org](https://web-platform-tests.org/). + +Code of Conduct +--------------- + +See [CODE_OF_CONDUCT.md](CODE_OF_CONDUCT.md). diff --git a/test/wpt/tests/FileAPI/Blob-methods-from-detached-frame.html b/test/wpt/tests/FileAPI/Blob-methods-from-detached-frame.html new file mode 100644 index 0000000..37efd5e --- /dev/null +++ b/test/wpt/tests/FileAPI/Blob-methods-from-detached-frame.html @@ -0,0 +1,59 @@ + + +Blob methods from detached frame work as expected + + + + + + diff --git a/test/wpt/tests/FileAPI/BlobURL/cross-partition.tentative.https.html b/test/wpt/tests/FileAPI/BlobURL/cross-partition.tentative.https.html new file mode 100644 index 0000000..c75ce07 --- /dev/null +++ b/test/wpt/tests/FileAPI/BlobURL/cross-partition.tentative.https.html @@ -0,0 +1,276 @@ + + + + + + + + + + + + + + + diff --git a/test/wpt/tests/FileAPI/BlobURL/support/file_test2.txt b/test/wpt/tests/FileAPI/BlobURL/support/file_test2.txt new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/FileAPI/BlobURL/test2-manual.html b/test/wpt/tests/FileAPI/BlobURL/test2-manual.html new file mode 100644 index 0000000..07fb27e --- /dev/null +++ b/test/wpt/tests/FileAPI/BlobURL/test2-manual.html @@ -0,0 +1,62 @@ + + + + + Blob and File reference URL Test(2) + + + + + + +
+
+
+ +
+

Test steps:

+
    +
  1. Download the file.
  2. +
  3. Select the file in the file inputbox.
  4. +
  5. Delete the file.
  6. +
  7. Click the 'start' button.
  8. +
+
+ +
+ + + + diff --git a/test/wpt/tests/FileAPI/FileReader/progress_event_bubbles_cancelable.html b/test/wpt/tests/FileAPI/FileReader/progress_event_bubbles_cancelable.html new file mode 100644 index 0000000..6a03243 --- /dev/null +++ b/test/wpt/tests/FileAPI/FileReader/progress_event_bubbles_cancelable.html @@ -0,0 +1,33 @@ + + +File API Test: Progress Event - bubbles, cancelable + + + + +
+ + diff --git a/test/wpt/tests/FileAPI/FileReader/support/file_test1.txt b/test/wpt/tests/FileAPI/FileReader/support/file_test1.txt new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/FileAPI/FileReader/test_errors-manual.html b/test/wpt/tests/FileAPI/FileReader/test_errors-manual.html new file mode 100644 index 0000000..b8c3f84 --- /dev/null +++ b/test/wpt/tests/FileAPI/FileReader/test_errors-manual.html @@ -0,0 +1,72 @@ + + + + + FileReader Errors Test + + + + + + +
+
+
+ +
+

Test steps:

+
    +
  1. Download the file.
  2. +
  3. Select the file in the file inputbox.
  4. +
  5. Delete the file.
  6. +
  7. Click the 'start' button.
  8. +
+
+ +
+ + + + diff --git a/test/wpt/tests/FileAPI/FileReader/test_notreadableerrors-manual.html b/test/wpt/tests/FileAPI/FileReader/test_notreadableerrors-manual.html new file mode 100644 index 0000000..46d7359 --- /dev/null +++ b/test/wpt/tests/FileAPI/FileReader/test_notreadableerrors-manual.html @@ -0,0 +1,42 @@ + + +FileReader NotReadableError Test + + + + +
+
+
+ +
+

Test steps:

+
    +
  1. Download the file.
  2. +
  3. Select the file in the file inputbox.
  4. +
  5. Delete the file's readable permission.
  6. +
  7. Click the 'start' button.
  8. +
+
+ + + diff --git a/test/wpt/tests/FileAPI/FileReader/test_securityerrors-manual.html b/test/wpt/tests/FileAPI/FileReader/test_securityerrors-manual.html new file mode 100644 index 0000000..add93ed --- /dev/null +++ b/test/wpt/tests/FileAPI/FileReader/test_securityerrors-manual.html @@ -0,0 +1,40 @@ + + +FileReader SecurityError Test + + + + +
+
+
+ +
+

Test steps:

+
    +
  1. Select a system sensitive file (e.g. files in /usr/bin, password files, + and other native operating system executables) in the file inputbox.
  2. +
  3. Click the 'start' button.
  4. +
+
+ + diff --git a/test/wpt/tests/FileAPI/FileReader/workers.html b/test/wpt/tests/FileAPI/FileReader/workers.html new file mode 100644 index 0000000..8e114ee --- /dev/null +++ b/test/wpt/tests/FileAPI/FileReader/workers.html @@ -0,0 +1,27 @@ + + + + + diff --git a/test/wpt/tests/FileAPI/FileReaderSync.worker.js b/test/wpt/tests/FileAPI/FileReaderSync.worker.js new file mode 100644 index 0000000..3d7a022 --- /dev/null +++ b/test/wpt/tests/FileAPI/FileReaderSync.worker.js @@ -0,0 +1,56 @@ +importScripts("/resources/testharness.js"); + +var blob, empty_blob, readerSync; +setup(() => { + readerSync = new FileReaderSync(); + blob = new Blob(["test"]); + empty_blob = new Blob(); +}); + +test(() => { + assert_true(readerSync instanceof FileReaderSync); +}, "Interface"); + +test(() => { + var text = readerSync.readAsText(blob); + assert_equals(text, "test"); +}, "readAsText"); + +test(() => { + var text = readerSync.readAsText(empty_blob); + assert_equals(text, ""); +}, "readAsText with empty blob"); + +test(() => { + var data = readerSync.readAsDataURL(blob); + assert_equals(data.indexOf("data:"), 0); +}, "readAsDataURL"); + +test(() => { + var data = readerSync.readAsDataURL(empty_blob); + assert_equals(data.indexOf("data:"), 0); +}, "readAsDataURL with empty blob"); + +test(() => { + var data = readerSync.readAsBinaryString(blob); + assert_equals(data, "test"); +}, "readAsBinaryString"); + +test(() => { + var data = readerSync.readAsBinaryString(empty_blob); + assert_equals(data, ""); +}, "readAsBinaryString with empty blob"); + +test(() => { + var data = readerSync.readAsArrayBuffer(blob); + assert_true(data instanceof ArrayBuffer); + assert_equals(data.byteLength, "test".length); +}, "readAsArrayBuffer"); + +test(() => { + var data = readerSync.readAsArrayBuffer(empty_blob); + assert_true(data instanceof ArrayBuffer); + assert_equals(data.byteLength, 0); +}, "readAsArrayBuffer with empty blob"); + +done(); diff --git a/test/wpt/tests/FileAPI/META.yml b/test/wpt/tests/FileAPI/META.yml new file mode 100644 index 0000000..506a59f --- /dev/null +++ b/test/wpt/tests/FileAPI/META.yml @@ -0,0 +1,6 @@ +spec: https://w3c.github.io/FileAPI/ +suggested_reviewers: + - inexorabletash + - zqzhang + - jdm + - mkruisselbrink diff --git a/test/wpt/tests/FileAPI/blob/Blob-array-buffer.any.js b/test/wpt/tests/FileAPI/blob/Blob-array-buffer.any.js new file mode 100644 index 0000000..2310646 --- /dev/null +++ b/test/wpt/tests/FileAPI/blob/Blob-array-buffer.any.js @@ -0,0 +1,45 @@ +// META: title=Blob Array Buffer +// META: script=../support/Blob.js +'use strict'; + +promise_test(async () => { + const input_arr = new TextEncoder().encode("PASS"); + const blob = new Blob([input_arr]); + const array_buffer = await blob.arrayBuffer(); + assert_true(array_buffer instanceof ArrayBuffer); + assert_equals_typed_array(new Uint8Array(array_buffer), input_arr); +}, "Blob.arrayBuffer()") + +promise_test(async () => { + const input_arr = new TextEncoder().encode(""); + const blob = new Blob([input_arr]); + const array_buffer = await blob.arrayBuffer(); + assert_true(array_buffer instanceof ArrayBuffer); + assert_equals_typed_array(new Uint8Array(array_buffer), input_arr); +}, "Blob.arrayBuffer() empty Blob data") + +promise_test(async () => { + const input_arr = new TextEncoder().encode("\u08B8\u000a"); + const blob = new Blob([input_arr]); + const array_buffer = await blob.arrayBuffer(); + assert_equals_typed_array(new Uint8Array(array_buffer), input_arr); +}, "Blob.arrayBuffer() non-ascii input") + +promise_test(async () => { + const input_arr = [8, 241, 48, 123, 151]; + const typed_arr = new Uint8Array(input_arr); + const blob = new Blob([typed_arr]); + const array_buffer = await blob.arrayBuffer(); + assert_equals_typed_array(new Uint8Array(array_buffer), typed_arr); +}, "Blob.arrayBuffer() non-unicode input") + +promise_test(async () => { + const input_arr = new TextEncoder().encode("PASS"); + const blob = new Blob([input_arr]); + const array_buffer_results = await Promise.all([blob.arrayBuffer(), + blob.arrayBuffer(), blob.arrayBuffer()]); + for (let array_buffer of array_buffer_results) { + assert_true(array_buffer instanceof ArrayBuffer); + assert_equals_typed_array(new Uint8Array(array_buffer), input_arr); + } +}, "Blob.arrayBuffer() concurrent reads") diff --git a/test/wpt/tests/FileAPI/blob/Blob-constructor-dom.window.js b/test/wpt/tests/FileAPI/blob/Blob-constructor-dom.window.js new file mode 100644 index 0000000..4fd4a43 --- /dev/null +++ b/test/wpt/tests/FileAPI/blob/Blob-constructor-dom.window.js @@ -0,0 +1,53 @@ +// META: title=Blob constructor +// META: script=../support/Blob.js +'use strict'; + +var test_error = { + name: "test", + message: "test error", +}; + +test(function() { + var args = [ + document.createElement("div"), + window, + ]; + args.forEach(function(arg) { + assert_throws_js(TypeError, function() { + new Blob(arg); + }, "Should throw for argument " + format_value(arg) + "."); + }); +}, "Passing platform objects for blobParts should throw a TypeError."); + +test(function() { + var element = document.createElement("div"); + element.appendChild(document.createElement("div")); + element.appendChild(document.createElement("p")); + var list = element.children; + Object.defineProperty(list, "length", { + get: function() { throw test_error; } + }); + assert_throws_exactly(test_error, function() { + new Blob(list); + }); +}, "A platform object that supports indexed properties should be treated as a sequence for the blobParts argument (overwritten 'length'.)"); + +test_blob(function() { + var select = document.createElement("select"); + select.appendChild(document.createElement("option")); + return new Blob(select); +}, { + expected: "[object HTMLOptionElement]", + type: "", + desc: "Passing an platform object that supports indexed properties as the blobParts array should work (select)." +}); + +test_blob(function() { + var elm = document.createElement("div"); + elm.setAttribute("foo", "bar"); + return new Blob(elm.attributes); +}, { + expected: "[object Attr]", + type: "", + desc: "Passing an platform object that supports indexed properties as the blobParts array should work (attributes)." +}); \ No newline at end of file diff --git a/test/wpt/tests/FileAPI/blob/Blob-constructor-endings.html b/test/wpt/tests/FileAPI/blob/Blob-constructor-endings.html new file mode 100644 index 0000000..04edd2a --- /dev/null +++ b/test/wpt/tests/FileAPI/blob/Blob-constructor-endings.html @@ -0,0 +1,104 @@ + + +Blob constructor: endings option + + + + diff --git a/test/wpt/tests/FileAPI/blob/Blob-constructor.any.js b/test/wpt/tests/FileAPI/blob/Blob-constructor.any.js new file mode 100644 index 0000000..d16f760 --- /dev/null +++ b/test/wpt/tests/FileAPI/blob/Blob-constructor.any.js @@ -0,0 +1,468 @@ +// META: title=Blob constructor +// META: script=../support/Blob.js +'use strict'; + +test(function() { + assert_true("Blob" in globalThis, "globalThis should have a Blob property."); + assert_equals(Blob.length, 0, "Blob.length should be 0."); + assert_true(Blob instanceof Function, "Blob should be a function."); +}, "Blob interface object"); + +// Step 1. +test(function() { + var blob = new Blob(); + assert_true(blob instanceof Blob); + assert_equals(String(blob), '[object Blob]'); + assert_equals(blob.size, 0); + assert_equals(blob.type, ""); +}, "Blob constructor with no arguments"); +test(function() { + assert_throws_js(TypeError, function() { var blob = Blob(); }); +}, "Blob constructor with no arguments, without 'new'"); +test(function() { + var blob = new Blob; + assert_true(blob instanceof Blob); + assert_equals(blob.size, 0); + assert_equals(blob.type, ""); +}, "Blob constructor without brackets"); +test(function() { + var blob = new Blob(undefined); + assert_true(blob instanceof Blob); + assert_equals(String(blob), '[object Blob]'); + assert_equals(blob.size, 0); + assert_equals(blob.type, ""); +}, "Blob constructor with undefined as first argument"); + +// blobParts argument (WebIDL). +test(function() { + var args = [ + null, + true, + false, + 0, + 1, + 1.5, + "FAIL", + new Date(), + new RegExp(), + {}, + { 0: "FAIL", length: 1 }, + ]; + args.forEach(function(arg) { + assert_throws_js(TypeError, function() { + new Blob(arg); + }, "Should throw for argument " + format_value(arg) + "."); + }); +}, "Passing non-objects, Dates and RegExps for blobParts should throw a TypeError."); + +test_blob(function() { + return new Blob({ + [Symbol.iterator]: Array.prototype[Symbol.iterator], + }); +}, { + expected: "", + type: "", + desc: "A plain object with @@iterator should be treated as a sequence for the blobParts argument." +}); +test(t => { + const blob = new Blob({ + [Symbol.iterator]() { + var i = 0; + return {next: () => [ + {done:false, value:'ab'}, + {done:false, value:'cde'}, + {done:true} + ][i++] + }; + } + }); + assert_equals(blob.size, 5, 'Custom @@iterator should be treated as a sequence'); +}, "A plain object with custom @@iterator should be treated as a sequence for the blobParts argument."); +test_blob(function() { + return new Blob({ + [Symbol.iterator]: Array.prototype[Symbol.iterator], + 0: "PASS", + length: 1 + }); +}, { + expected: "PASS", + type: "", + desc: "A plain object with @@iterator and a length property should be treated as a sequence for the blobParts argument." +}); +test_blob(function() { + return new Blob(new String("xyz")); +}, { + expected: "xyz", + type: "", + desc: "A String object should be treated as a sequence for the blobParts argument." +}); +test_blob(function() { + return new Blob(new Uint8Array([1, 2, 3])); +}, { + expected: "123", + type: "", + desc: "A Uint8Array object should be treated as a sequence for the blobParts argument." +}); + +var test_error = { + name: "test", + message: "test error", +}; + +test(function() { + var obj = { + [Symbol.iterator]: Array.prototype[Symbol.iterator], + get length() { throw test_error; } + }; + assert_throws_exactly(test_error, function() { + new Blob(obj); + }); +}, "The length getter should be invoked and any exceptions should be propagated."); + +test(function() { + assert_throws_exactly(test_error, function() { + var obj = { + [Symbol.iterator]: Array.prototype[Symbol.iterator], + length: { + valueOf: null, + toString: function() { throw test_error; } + } + }; + new Blob(obj); + }); + assert_throws_exactly(test_error, function() { + var obj = { + [Symbol.iterator]: Array.prototype[Symbol.iterator], + length: { valueOf: function() { throw test_error; } } + }; + new Blob(obj); + }); +}, "ToUint32 should be applied to the length and any exceptions should be propagated."); + +test(function() { + var received = []; + var obj = { + get [Symbol.iterator]() { + received.push("Symbol.iterator"); + return Array.prototype[Symbol.iterator]; + }, + get length() { + received.push("length getter"); + return { + valueOf: function() { + received.push("length valueOf"); + return 3; + } + }; + }, + get 0() { + received.push("0 getter"); + return { + toString: function() { + received.push("0 toString"); + return "a"; + } + }; + }, + get 1() { + received.push("1 getter"); + throw test_error; + }, + get 2() { + received.push("2 getter"); + assert_unreached("Should not call the getter for 2 if the getter for 1 threw."); + } + }; + assert_throws_exactly(test_error, function() { + new Blob(obj); + }); + assert_array_equals(received, [ + "Symbol.iterator", + "length getter", + "length valueOf", + "0 getter", + "0 toString", + "length getter", + "length valueOf", + "1 getter", + ]); +}, "Getters and value conversions should happen in order until an exception is thrown."); + +// XXX should add tests edge cases of ToLength(length) + +test(function() { + assert_throws_exactly(test_error, function() { + new Blob([{ toString: function() { throw test_error; } }]); + }, "Throwing toString"); + assert_throws_exactly(test_error, function() { + new Blob([{ toString: undefined, valueOf: function() { throw test_error; } }]); + }, "Throwing valueOf"); + assert_throws_exactly(test_error, function() { + new Blob([{ + toString: function() { throw test_error; }, + valueOf: function() { assert_unreached("Should not call valueOf if toString is present."); } + }]); + }, "Throwing toString and valueOf"); + assert_throws_js(TypeError, function() { + new Blob([{toString: null, valueOf: null}]); + }, "Null toString and valueOf"); +}, "ToString should be called on elements of the blobParts array and any exceptions should be propagated."); + +test_blob(function() { + var arr = [ + { toString: function() { arr.pop(); return "PASS"; } }, + { toString: function() { assert_unreached("Should have removed the second element of the array rather than called toString() on it."); } } + ]; + return new Blob(arr); +}, { + expected: "PASS", + type: "", + desc: "Changes to the blobParts array should be reflected in the returned Blob (pop)." +}); + +test_blob(function() { + var arr = [ + { + toString: function() { + if (arr.length === 3) { + return "A"; + } + arr.unshift({ + toString: function() { + assert_unreached("Should only access index 0 once."); + } + }); + return "P"; + } + }, + { + toString: function() { + return "SS"; + } + } + ]; + return new Blob(arr); +}, { + expected: "PASS", + type: "", + desc: "Changes to the blobParts array should be reflected in the returned Blob (unshift)." +}); + +test_blob(function() { + // https://www.w3.org/Bugs/Public/show_bug.cgi?id=17652 + return new Blob([ + null, + undefined, + true, + false, + 0, + 1, + new String("stringobject"), + [], + ['x', 'y'], + {}, + { 0: "FAIL", length: 1 }, + { toString: function() { return "stringA"; } }, + { toString: undefined, valueOf: function() { return "stringB"; } }, + { valueOf: function() { assert_unreached("Should not call valueOf if toString is present on the prototype."); } } + ]); +}, { + expected: "nullundefinedtruefalse01stringobjectx,y[object Object][object Object]stringAstringB[object Object]", + type: "", + desc: "ToString should be called on elements of the blobParts array." +}); + +test_blob(function() { + return new Blob([ + new ArrayBuffer(8) + ]); +}, { + expected: "\0\0\0\0\0\0\0\0", + type: "", + desc: "ArrayBuffer elements of the blobParts array should be supported." +}); + +test_blob(function() { + return new Blob([ + new Uint8Array([0x50, 0x41, 0x53, 0x53]), + new Int8Array([0x50, 0x41, 0x53, 0x53]), + new Uint16Array([0x4150, 0x5353]), + new Int16Array([0x4150, 0x5353]), + new Uint32Array([0x53534150]), + new Int32Array([0x53534150]), + new Float32Array([0xD341500000]) + ]); +}, { + expected: "PASSPASSPASSPASSPASSPASSPASS", + type: "", + desc: "Passing typed arrays as elements of the blobParts array should work." +}); +test_blob(function() { + return new Blob([ + // 0x535 3415053534150 + // 0x535 = 0b010100110101 -> Sign = +, Exponent = 1333 - 1023 = 310 + // 0x13415053534150 * 2**(-52) + // ==> 0x13415053534150 * 2**258 = 2510297372767036725005267563121821874921913208671273727396467555337665343087229079989707079680 + new Float64Array([2510297372767036725005267563121821874921913208671273727396467555337665343087229079989707079680]) + ]); +}, { + expected: "PASSPASS", + type: "", + desc: "Passing a Float64Array as element of the blobParts array should work." +}); + +test_blob(function() { + return new Blob([ + new BigInt64Array([BigInt("0x5353415053534150")]), + new BigUint64Array([BigInt("0x5353415053534150")]) + ]); +}, { + expected: "PASSPASSPASSPASS", + type: "", + desc: "Passing BigInt typed arrays as elements of the blobParts array should work." +}); + +var t_ports = async_test("Passing a FrozenArray as the blobParts array should work (FrozenArray)."); +t_ports.step(function() { + var channel = new MessageChannel(); + channel.port2.onmessage = this.step_func(function(e) { + var b_ports = new Blob(e.ports); + assert_equals(b_ports.size, "[object MessagePort]".length); + this.done(); + }); + var channel2 = new MessageChannel(); + channel.port1.postMessage('', [channel2.port1]); +}); + +test_blob(function() { + var blob = new Blob(['foo']); + return new Blob([blob, blob]); +}, { + expected: "foofoo", + type: "", + desc: "Array with two blobs" +}); + +test_blob_binary(function() { + var view = new Uint8Array([0, 255, 0]); + return new Blob([view.buffer, view.buffer]); +}, { + expected: [0, 255, 0, 0, 255, 0], + type: "", + desc: "Array with two buffers" +}); + +test_blob_binary(function() { + var view = new Uint8Array([0, 255, 0, 4]); + var blob = new Blob([view, view]); + assert_equals(blob.size, 8); + var view1 = new Uint16Array(view.buffer, 2); + return new Blob([view1, view.buffer, view1]); +}, { + expected: [0, 4, 0, 255, 0, 4, 0, 4], + type: "", + desc: "Array with two bufferviews" +}); + +test_blob(function() { + var view = new Uint8Array([0]); + var blob = new Blob(["fo"]); + return new Blob([view.buffer, blob, "foo"]); +}, { + expected: "\0fofoo", + type: "", + desc: "Array with mixed types" +}); + +test(function() { + const accessed = []; + const stringified = []; + + new Blob([], { + get type() { accessed.push('type'); }, + get endings() { accessed.push('endings'); } + }); + new Blob([], { + type: { toString: () => { stringified.push('type'); return ''; } }, + endings: { toString: () => { stringified.push('endings'); return 'transparent'; } } + }); + assert_array_equals(accessed, ['endings', 'type']); + assert_array_equals(stringified, ['endings', 'type']); +}, "options properties should be accessed in lexicographic order."); + +test(function() { + assert_throws_exactly(test_error, function() { + new Blob( + [{ toString: function() { throw test_error } }], + { + get type() { assert_unreached("type getter should not be called."); } + } + ); + }); +}, "Arguments should be evaluated from left to right."); + +[ + null, + undefined, + {}, + { unrecognized: true }, + /regex/, + function() {} +].forEach(function(arg, idx) { + test_blob(function() { + return new Blob([], arg); + }, { + expected: "", + type: "", + desc: "Passing " + format_value(arg) + " (index " + idx + ") for options should use the defaults." + }); + test_blob(function() { + return new Blob(["\na\r\nb\n\rc\r"], arg); + }, { + expected: "\na\r\nb\n\rc\r", + type: "", + desc: "Passing " + format_value(arg) + " (index " + idx + ") for options should use the defaults (with newlines)." + }); +}); + +[ + 123, + 123.4, + true, + 'abc' +].forEach(arg => { + test(t => { + assert_throws_js(TypeError, () => new Blob([], arg), + 'Blob constructor should throw with invalid property bag'); + }, `Passing ${JSON.stringify(arg)} for options should throw`); +}); + +var type_tests = [ + // blobParts, type, expected type + [[], '', ''], + [[], 'a', 'a'], + [[], 'A', 'a'], + [[], 'text/html', 'text/html'], + [[], 'TEXT/HTML', 'text/html'], + [[], 'text/plain;charset=utf-8', 'text/plain;charset=utf-8'], + [[], '\u00E5', ''], + [[], '\uD801\uDC7E', ''], // U+1047E + [[], ' image/gif ', ' image/gif '], + [[], '\timage/gif\t', ''], + [[], 'image/gif;\u007f', ''], + [[], '\u0130mage/gif', ''], // uppercase i with dot + [[], '\u0131mage/gif', ''], // lowercase dotless i + [[], 'image/gif\u0000', ''], + // check that type isn't changed based on sniffing + [[0x3C, 0x48, 0x54, 0x4D, 0x4C, 0x3E], 'unknown/unknown', 'unknown/unknown'], // "" + [[0x00, 0xFF], 'text/plain', 'text/plain'], + [[0x47, 0x49, 0x46, 0x38, 0x39, 0x61], 'image/png', 'image/png'], // "GIF89a" +]; + +type_tests.forEach(function(t) { + test(function() { + var arr = new Uint8Array([t[0]]).buffer; + var b = new Blob([arr], {type:t[1]}); + assert_equals(b.type, t[2]); + }, "Blob with type " + format_value(t[1])); +}); diff --git a/test/wpt/tests/FileAPI/blob/Blob-in-worker.worker.js b/test/wpt/tests/FileAPI/blob/Blob-in-worker.worker.js new file mode 100644 index 0000000..a0ca845 --- /dev/null +++ b/test/wpt/tests/FileAPI/blob/Blob-in-worker.worker.js @@ -0,0 +1,9 @@ +importScripts("/resources/testharness.js"); + +promise_test(async () => { + const data = "TEST"; + const blob = new Blob([data], {type: "text/plain"}); + assert_equals(await blob.text(), data); +}, 'Create Blob in Worker'); + +done(); diff --git a/test/wpt/tests/FileAPI/blob/Blob-slice-overflow.any.js b/test/wpt/tests/FileAPI/blob/Blob-slice-overflow.any.js new file mode 100644 index 0000000..388fd92 --- /dev/null +++ b/test/wpt/tests/FileAPI/blob/Blob-slice-overflow.any.js @@ -0,0 +1,32 @@ +// META: title=Blob slice overflow +'use strict'; + +var text = ''; + +for (var i = 0; i < 2000; ++i) { + text += 'A'; +} + +test(function() { + var blob = new Blob([text]); + var sliceBlob = blob.slice(-1, blob.size); + assert_equals(sliceBlob.size, 1, "Blob slice size"); +}, "slice start is negative, relativeStart will be max((size + start), 0)"); + +test(function() { + var blob = new Blob([text]); + var sliceBlob = blob.slice(blob.size + 1, blob.size); + assert_equals(sliceBlob.size, 0, "Blob slice size"); +}, "slice start is greater than blob size, relativeStart will be min(start, size)"); + +test(function() { + var blob = new Blob([text]); + var sliceBlob = blob.slice(blob.size - 2, -1); + assert_equals(sliceBlob.size, 1, "Blob slice size"); +}, "slice end is negative, relativeEnd will be max((size + end), 0)"); + +test(function() { + var blob = new Blob([text]); + var sliceBlob = blob.slice(blob.size - 2, blob.size + 999); + assert_equals(sliceBlob.size, 2, "Blob slice size"); +}, "slice end is greater than blob size, relativeEnd will be min(end, size)"); diff --git a/test/wpt/tests/FileAPI/blob/Blob-slice.any.js b/test/wpt/tests/FileAPI/blob/Blob-slice.any.js new file mode 100644 index 0000000..1f85d44 --- /dev/null +++ b/test/wpt/tests/FileAPI/blob/Blob-slice.any.js @@ -0,0 +1,231 @@ +// META: title=Blob slice +// META: script=../support/Blob.js +'use strict'; + +test_blob(function() { + var blobTemp = new Blob(["PASS"]); + return blobTemp.slice(); +}, { + expected: "PASS", + type: "", + desc: "no-argument Blob slice" +}); + +test(function() { + var blob1, blob2; + + test_blob(function() { + return blob1 = new Blob(["squiggle"]); + }, { + expected: "squiggle", + type: "", + desc: "blob1." + }); + + test_blob(function() { + return blob2 = new Blob(["steak"], {type: "content/type"}); + }, { + expected: "steak", + type: "content/type", + desc: "blob2." + }); + + test_blob(function() { + return new Blob().slice(0,0,null); + }, { + expected: "", + type: "null", + desc: "null type Blob slice" + }); + + test_blob(function() { + return new Blob().slice(0,0,undefined); + }, { + expected: "", + type: "", + desc: "undefined type Blob slice" + }); + + test_blob(function() { + return new Blob().slice(0,0); + }, { + expected: "", + type: "", + desc: "no type Blob slice" + }); + + var arrayBuffer = new ArrayBuffer(16); + var int8View = new Int8Array(arrayBuffer); + for (var i = 0; i < 16; i++) { + int8View[i] = i + 65; + } + + var testData = [ + [ + ["PASSSTRING"], + [{start: -6, contents: "STRING"}, + {start: -12, contents: "PASSSTRING"}, + {start: 4, contents: "STRING"}, + {start: 12, contents: ""}, + {start: 0, end: -6, contents: "PASS"}, + {start: 0, end: -12, contents: ""}, + {start: 0, end: 4, contents: "PASS"}, + {start: 0, end: 12, contents: "PASSSTRING"}, + {start: 7, end: 4, contents: ""}] + ], + + // Test 3 strings + [ + ["foo", "bar", "baz"], + [{start: 0, end: 9, contents: "foobarbaz"}, + {start: 0, end: 3, contents: "foo"}, + {start: 3, end: 9, contents: "barbaz"}, + {start: 6, end: 9, contents: "baz"}, + {start: 6, end: 12, contents: "baz"}, + {start: 0, end: 9, contents: "foobarbaz"}, + {start: 0, end: 11, contents: "foobarbaz"}, + {start: 10, end: 15, contents: ""}] + ], + + // Test string, Blob, string + [ + ["foo", blob1, "baz"], + [{start: 0, end: 3, contents: "foo"}, + {start: 3, end: 11, contents: "squiggle"}, + {start: 2, end: 4, contents: "os"}, + {start: 10, end: 12, contents: "eb"}] + ], + + // Test blob, string, blob + [ + [blob1, "foo", blob1], + [{start: 0, end: 8, contents: "squiggle"}, + {start: 7, end: 9, contents: "ef"}, + {start: 10, end: 12, contents: "os"}, + {start: 1, end: 4, contents: "qui"}, + {start: 12, end: 15, contents: "qui"}, + {start: 40, end: 60, contents: ""}] + ], + + // Test blobs all the way down + [ + [blob2, blob1, blob2], + [{start: 0, end: 5, contents: "steak"}, + {start: 5, end: 13, contents: "squiggle"}, + {start: 13, end: 18, contents: "steak"}, + {start: 1, end: 3, contents: "te"}, + {start: 6, end: 10, contents: "quig"}] + ], + + // Test an ArrayBufferView + [ + [int8View, blob1, "foo"], + [{start: 0, end: 8, contents: "ABCDEFGH"}, + {start: 8, end: 18, contents: "IJKLMNOPsq"}, + {start: 17, end: 20, contents: "qui"}, + {start: 4, end: 12, contents: "EFGHIJKL"}] + ], + + // Test a partial ArrayBufferView + [ + [new Uint8Array(arrayBuffer, 3, 5), blob1, "foo"], + [{start: 0, end: 8, contents: "DEFGHsqu"}, + {start: 8, end: 18, contents: "igglefoo"}, + {start: 4, end: 12, contents: "Hsquiggl"}] + ], + + // Test type coercion of a number + [ + [3, int8View, "foo"], + [{start: 0, end: 8, contents: "3ABCDEFG"}, + {start: 8, end: 18, contents: "HIJKLMNOPf"}, + {start: 17, end: 21, contents: "foo"}, + {start: 4, end: 12, contents: "DEFGHIJK"}] + ], + + [ + [(new Uint8Array([0, 255, 0])).buffer, + new Blob(['abcd']), + 'efgh', + 'ijklmnopqrstuvwxyz'], + [{start: 1, end: 4, contents: "\uFFFD\u0000a"}, + {start: 4, end: 8, contents: "bcde"}, + {start: 8, end: 12, contents: "fghi"}, + {start: 1, end: 12, contents: "\uFFFD\u0000abcdefghi"}] + ] + ]; + + testData.forEach(function(data, i) { + var blobs = data[0]; + var tests = data[1]; + tests.forEach(function(expectations, j) { + test(function() { + var blob = new Blob(blobs); + assert_true(blob instanceof Blob); + assert_false(blob instanceof File); + + test_blob(function() { + return expectations.end === undefined + ? blob.slice(expectations.start) + : blob.slice(expectations.start, expectations.end); + }, { + expected: expectations.contents, + type: "", + desc: "Slicing test: slice (" + i + "," + j + ")." + }); + }, "Slicing test (" + i + "," + j + ")."); + }); + }); +}, "Slices"); + +var invalidTypes = [ + "\xFF", + "te\x09xt/plain", + "te\x00xt/plain", + "te\x1Fxt/plain", + "te\x7Fxt/plain" +]; +invalidTypes.forEach(function(type) { + test_blob(function() { + var blob = new Blob(["PASS"]); + return blob.slice(0, 4, type); + }, { + expected: "PASS", + type: "", + desc: "Invalid contentType (" + format_value(type) + ")" + }); +}); + +var validTypes = [ + "te(xt/plain", + "te)xt/plain", + "text/plain", + "te@xt/plain", + "te,xt/plain", + "te;xt/plain", + "te:xt/plain", + "te\\xt/plain", + "te\"xt/plain", + "te/xt/plain", + "te[xt/plain", + "te]xt/plain", + "te?xt/plain", + "te=xt/plain", + "te{xt/plain", + "te}xt/plain", + "te\x20xt/plain", + "TEXT/PLAIN", + "text/plain;charset = UTF-8", + "text/plain;charset=UTF-8" +]; +validTypes.forEach(function(type) { + test_blob(function() { + var blob = new Blob(["PASS"]); + return blob.slice(0, 4, type); + }, { + expected: "PASS", + type: type.toLowerCase(), + desc: "Valid contentType (" + format_value(type) + ")" + }); +}); diff --git a/test/wpt/tests/FileAPI/blob/Blob-stream-byob-crash.html b/test/wpt/tests/FileAPI/blob/Blob-stream-byob-crash.html new file mode 100644 index 0000000..5992ed1 --- /dev/null +++ b/test/wpt/tests/FileAPI/blob/Blob-stream-byob-crash.html @@ -0,0 +1,11 @@ + + diff --git a/test/wpt/tests/FileAPI/blob/Blob-stream-sync-xhr-crash.html b/test/wpt/tests/FileAPI/blob/Blob-stream-sync-xhr-crash.html new file mode 100644 index 0000000..fe54fb6 --- /dev/null +++ b/test/wpt/tests/FileAPI/blob/Blob-stream-sync-xhr-crash.html @@ -0,0 +1,13 @@ + + diff --git a/test/wpt/tests/FileAPI/blob/Blob-stream.any.js b/test/wpt/tests/FileAPI/blob/Blob-stream.any.js new file mode 100644 index 0000000..87710a1 --- /dev/null +++ b/test/wpt/tests/FileAPI/blob/Blob-stream.any.js @@ -0,0 +1,83 @@ +// META: title=Blob Stream +// META: script=../support/Blob.js +// META: script=/common/gc.js +'use strict'; + +// Helper function that triggers garbage collection while reading a chunk +// if perform_gc is true. +async function read_and_gc(reader, perform_gc) { + // Passing Uint8Array for byte streams; non-byte streams will simply ignore it + const read_promise = reader.read(new Uint8Array(64)); + if (perform_gc) { + await garbageCollect(); + } + return read_promise; +} + +// Takes in a ReadableStream and reads from it until it is done, returning +// an array that contains the results of each read operation. If perform_gc +// is true, garbage collection is triggered while reading every chunk. +async function read_all_chunks(stream, { perform_gc = false, mode } = {}) { + assert_true(stream instanceof ReadableStream); + assert_true('getReader' in stream); + const reader = stream.getReader({ mode }); + + assert_true('read' in reader); + let read_value = await read_and_gc(reader, perform_gc); + + let out = []; + let i = 0; + while (!read_value.done) { + for (let val of read_value.value) { + out[i++] = val; + } + read_value = await read_and_gc(reader, perform_gc); + } + return out; +} + +promise_test(async () => { + const blob = new Blob(["PASS"]); + const stream = blob.stream(); + const chunks = await read_all_chunks(stream); + for (let [index, value] of chunks.entries()) { + assert_equals(value, "PASS".charCodeAt(index)); + } +}, "Blob.stream()") + +promise_test(async () => { + const blob = new Blob(); + const stream = blob.stream(); + const chunks = await read_all_chunks(stream); + assert_array_equals(chunks, []); +}, "Blob.stream() empty Blob") + +promise_test(async () => { + const input_arr = [8, 241, 48, 123, 151]; + const typed_arr = new Uint8Array(input_arr); + const blob = new Blob([typed_arr]); + const stream = blob.stream(); + const chunks = await read_all_chunks(stream); + assert_array_equals(chunks, input_arr); +}, "Blob.stream() non-unicode input") + +promise_test(async() => { + const input_arr = [8, 241, 48, 123, 151]; + const typed_arr = new Uint8Array(input_arr); + let blob = new Blob([typed_arr]); + const stream = blob.stream(); + blob = null; + await garbageCollect(); + const chunks = await read_all_chunks(stream, { perform_gc: true }); + assert_array_equals(chunks, input_arr); +}, "Blob.stream() garbage collection of blob shouldn't break stream" + + "consumption") + +promise_test(async () => { + const input_arr = [8, 241, 48, 123, 151]; + const typed_arr = new Uint8Array(input_arr); + let blob = new Blob([typed_arr]); + const stream = blob.stream(); + const chunks = await read_all_chunks(stream, { mode: "byob" }); + assert_array_equals(chunks, input_arr); +}, "Reading Blob.stream() with BYOB reader") diff --git a/test/wpt/tests/FileAPI/blob/Blob-text.any.js b/test/wpt/tests/FileAPI/blob/Blob-text.any.js new file mode 100644 index 0000000..d04fa97 --- /dev/null +++ b/test/wpt/tests/FileAPI/blob/Blob-text.any.js @@ -0,0 +1,64 @@ +// META: title=Blob Text +// META: script=../support/Blob.js +'use strict'; + +promise_test(async () => { + const blob = new Blob(["PASS"]); + const text = await blob.text(); + assert_equals(text, "PASS"); +}, "Blob.text()") + +promise_test(async () => { + const blob = new Blob(); + const text = await blob.text(); + assert_equals(text, ""); +}, "Blob.text() empty blob data") + +promise_test(async () => { + const blob = new Blob(["P", "A", "SS"]); + const text = await blob.text(); + assert_equals(text, "PASS"); +}, "Blob.text() multi-element array in constructor") + +promise_test(async () => { + const non_unicode = "\u0061\u030A"; + const input_arr = new TextEncoder().encode(non_unicode); + const blob = new Blob([input_arr]); + const text = await blob.text(); + assert_equals(text, non_unicode); +}, "Blob.text() non-unicode") + +promise_test(async () => { + const blob = new Blob(["PASS"], { type: "text/plain;charset=utf-16le" }); + const text = await blob.text(); + assert_equals(text, "PASS"); +}, "Blob.text() different charset param in type option") + +promise_test(async () => { + const non_unicode = "\u0061\u030A"; + const input_arr = new TextEncoder().encode(non_unicode); + const blob = new Blob([input_arr], { type: "text/plain;charset=utf-16le" }); + const text = await blob.text(); + assert_equals(text, non_unicode); +}, "Blob.text() different charset param with non-ascii input") + +promise_test(async () => { + const input_arr = new Uint8Array([192, 193, 245, 246, 247, 248, 249, 250, 251, + 252, 253, 254, 255]); + const blob = new Blob([input_arr]); + const text = await blob.text(); + assert_equals(text, "\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd" + + "\ufffd\ufffd\ufffd\ufffd"); +}, "Blob.text() invalid utf-8 input") + +promise_test(async () => { + const input_arr = new Uint8Array([192, 193, 245, 246, 247, 248, 249, 250, 251, + 252, 253, 254, 255]); + const blob = new Blob([input_arr]); + const text_results = await Promise.all([blob.text(), blob.text(), + blob.text()]); + for (let text of text_results) { + assert_equals(text, "\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd" + + "\ufffd\ufffd\ufffd\ufffd"); + } +}, "Blob.text() concurrent reads") diff --git a/test/wpt/tests/FileAPI/file/File-constructor-endings.html b/test/wpt/tests/FileAPI/file/File-constructor-endings.html new file mode 100644 index 0000000..1282b6c --- /dev/null +++ b/test/wpt/tests/FileAPI/file/File-constructor-endings.html @@ -0,0 +1,104 @@ + + +File constructor: endings option + + + + diff --git a/test/wpt/tests/FileAPI/file/File-constructor.any.js b/test/wpt/tests/FileAPI/file/File-constructor.any.js new file mode 100644 index 0000000..0b0185c --- /dev/null +++ b/test/wpt/tests/FileAPI/file/File-constructor.any.js @@ -0,0 +1,155 @@ +// META: title=File constructor + +const to_string_obj = { toString: () => 'a string' }; +const to_string_throws = { toString: () => { throw new Error('expected'); } }; + +test(function() { + assert_true("File" in globalThis, "globalThis should have a File property."); +}, "File interface object exists"); + +test(t => { + assert_throws_js(TypeError, () => new File(), + 'Bits argument is required'); + assert_throws_js(TypeError, () => new File([]), + 'Name argument is required'); +}, 'Required arguments'); + +function test_first_argument(arg1, expectedSize, testName) { + test(function() { + var file = new File(arg1, "dummy"); + assert_true(file instanceof File); + assert_equals(file.name, "dummy"); + assert_equals(file.size, expectedSize); + assert_equals(file.type, ""); + // assert_false(file.isClosed); XXX: File.isClosed doesn't seem to be implemented + assert_not_equals(file.lastModified, ""); + }, testName); +} + +test_first_argument([], 0, "empty fileBits"); +test_first_argument(["bits"], 4, "DOMString fileBits"); +test_first_argument(["ð“½ð“®ð”ð“½"], 16, "Unicode DOMString fileBits"); +test_first_argument([new String('string object')], 13, "String object fileBits"); +test_first_argument([new Blob()], 0, "Empty Blob fileBits"); +test_first_argument([new Blob(["bits"])], 4, "Blob fileBits"); +test_first_argument([new File([], 'world.txt')], 0, "Empty File fileBits"); +test_first_argument([new File(["bits"], 'world.txt')], 4, "File fileBits"); +test_first_argument([new ArrayBuffer(8)], 8, "ArrayBuffer fileBits"); +test_first_argument([new Uint8Array([0x50, 0x41, 0x53, 0x53])], 4, "Typed array fileBits"); +test_first_argument(["bits", new Blob(["bits"]), new Blob(), new Uint8Array([0x50, 0x41]), + new Uint16Array([0x5353]), new Uint32Array([0x53534150])], 16, "Various fileBits"); +test_first_argument([12], 2, "Number in fileBits"); +test_first_argument([[1,2,3]], 5, "Array in fileBits"); +test_first_argument([{}], 15, "Object in fileBits"); // "[object Object]" +if (globalThis.document !== undefined) { + test_first_argument([document.body], 24, "HTMLBodyElement in fileBits"); // "[object HTMLBodyElement]" +} +test_first_argument([to_string_obj], 8, "Object with toString in fileBits"); +test_first_argument({[Symbol.iterator]() { + let i = 0; + return {next: () => [ + {done:false, value:'ab'}, + {done:false, value:'cde'}, + {done:true} + ][i++]}; +}}, 5, 'Custom @@iterator'); + +[ + 'hello', + 0, + null +].forEach(arg => { + test(t => { + assert_throws_js(TypeError, () => new File(arg, 'world.html'), + 'Constructor should throw for invalid bits argument'); + }, `Invalid bits argument: ${JSON.stringify(arg)}`); +}); + +test(t => { + assert_throws_js(Error, () => new File([to_string_throws], 'name.txt'), + 'Constructor should propagate exceptions'); +}, 'Bits argument: object that throws'); + + +function test_second_argument(arg2, expectedFileName, testName) { + test(function() { + var file = new File(["bits"], arg2); + assert_true(file instanceof File); + assert_equals(file.name, expectedFileName); + }, testName); +} + +test_second_argument("dummy", "dummy", "Using fileName"); +test_second_argument("dummy/foo", "dummy/foo", + "No replacement when using special character in fileName"); +test_second_argument(null, "null", "Using null fileName"); +test_second_argument(1, "1", "Using number fileName"); +test_second_argument('', '', "Using empty string fileName"); +if (globalThis.document !== undefined) { + test_second_argument(document.body, '[object HTMLBodyElement]', "Using object fileName"); +} + +// testing the third argument +[ + {type: 'text/plain', expected: 'text/plain'}, + {type: 'text/plain;charset=UTF-8', expected: 'text/plain;charset=utf-8'}, + {type: 'TEXT/PLAIN', expected: 'text/plain'}, + {type: 'ð“½ð“®ð”ð“½/ð”­ð”©ð”žð”¦ð”«', expected: ''}, + {type: 'ascii/nonprintable\u001F', expected: ''}, + {type: 'ascii/nonprintable\u007F', expected: ''}, + {type: 'nonascii\u00EE', expected: ''}, + {type: 'nonascii\u1234', expected: ''}, + {type: 'nonparsable', expected: 'nonparsable'} +].forEach(testCase => { + test(t => { + var file = new File(["bits"], "dummy", { type: testCase.type}); + assert_true(file instanceof File); + assert_equals(file.type, testCase.expected); + }, `Using type in File constructor: ${testCase.type}`); +}); +test(function() { + var file = new File(["bits"], "dummy", { lastModified: 42 }); + assert_true(file instanceof File); + assert_equals(file.lastModified, 42); +}, "Using lastModified"); +test(function() { + var file = new File(["bits"], "dummy", { name: "foo" }); + assert_true(file instanceof File); + assert_equals(file.name, "dummy"); +}, "Misusing name"); +test(function() { + var file = new File(["bits"], "dummy", { unknownKey: "value" }); + assert_true(file instanceof File); + assert_equals(file.name, "dummy"); +}, "Unknown properties are ignored"); + +[ + 123, + 123.4, + true, + 'abc' +].forEach(arg => { + test(t => { + assert_throws_js(TypeError, () => new File(['bits'], 'name.txt', arg), + 'Constructor should throw for invalid property bag type'); + }, `Invalid property bag: ${JSON.stringify(arg)}`); +}); + +[ + null, + undefined, + [1,2,3], + /regex/, + function() {} +].forEach(arg => { + test(t => { + assert_equals(new File(['bits'], 'name.txt', arg).size, 4, + 'Constructor should accept object-ish property bag type'); + }, `Unusual but valid property bag: ${arg}`); +}); + +test(t => { + assert_throws_js(Error, + () => new File(['bits'], 'name.txt', {type: to_string_throws}), + 'Constructor should propagate exceptions'); +}, 'Property bag propagates exceptions'); diff --git a/test/wpt/tests/FileAPI/file/Worker-read-file-constructor.worker.js b/test/wpt/tests/FileAPI/file/Worker-read-file-constructor.worker.js new file mode 100644 index 0000000..4e003b3 --- /dev/null +++ b/test/wpt/tests/FileAPI/file/Worker-read-file-constructor.worker.js @@ -0,0 +1,15 @@ +importScripts("/resources/testharness.js"); + +async_test(function() { + var file = new File(["bits"], "dummy", { 'type': 'text/plain', lastModified: 42 }); + var reader = new FileReader(); + reader.onload = this.step_func_done(function() { + assert_equals(file.name, "dummy", "file name"); + assert_equals(reader.result, "bits", "file content"); + assert_equals(file.lastModified, 42, "file lastModified"); + }); + reader.onerror = this.unreached_func("Unexpected error event"); + reader.readAsText(file); +}, "FileReader in Worker"); + +done(); diff --git a/test/wpt/tests/FileAPI/file/resources/echo-content-escaped.py b/test/wpt/tests/FileAPI/file/resources/echo-content-escaped.py new file mode 100644 index 0000000..5370e1e --- /dev/null +++ b/test/wpt/tests/FileAPI/file/resources/echo-content-escaped.py @@ -0,0 +1,26 @@ +from wptserve.utils import isomorphic_encode + +# Outputs the request body, with controls and non-ASCII bytes escaped +# (b"\n" becomes b"\\x0a"), and with backslashes doubled. +# As a convenience, CRLF newlines are left as is. + +def escape_byte(byte): + # Convert int byte into a single-char binary string. + byte = bytes([byte]) + if b"\0" <= byte <= b"\x1F" or byte >= b"\x7F": + return b"\\x%02x" % ord(byte) + if byte == b"\\": + return b"\\\\" + return byte + +def main(request, response): + + headers = [(b"X-Request-Method", isomorphic_encode(request.method)), + (b"X-Request-Content-Length", request.headers.get(b"Content-Length", b"NO")), + (b"X-Request-Content-Type", request.headers.get(b"Content-Type", b"NO")), + # Avoid any kind of content sniffing on the response. + (b"Content-Type", b"text/plain; charset=UTF-8")] + + content = b"".join(map(escape_byte, request.body)).replace(b"\\x0d\\x0a", b"\r\n") + + return headers, content diff --git a/test/wpt/tests/FileAPI/file/send-file-form-controls.html b/test/wpt/tests/FileAPI/file/send-file-form-controls.html new file mode 100644 index 0000000..6347065 --- /dev/null +++ b/test/wpt/tests/FileAPI/file/send-file-form-controls.html @@ -0,0 +1,113 @@ + + +Upload files named using controls + + + + + + + + diff --git a/test/wpt/tests/FileAPI/file/send-file-form-iso-2022-jp.html b/test/wpt/tests/FileAPI/file/send-file-form-iso-2022-jp.html new file mode 100644 index 0000000..c931c9b --- /dev/null +++ b/test/wpt/tests/FileAPI/file/send-file-form-iso-2022-jp.html @@ -0,0 +1,65 @@ + + + +Upload files in ISO-2022-JP form + + + + + + + + diff --git a/test/wpt/tests/FileAPI/file/send-file-form-punctuation.html b/test/wpt/tests/FileAPI/file/send-file-form-punctuation.html new file mode 100644 index 0000000..a6568e2 --- /dev/null +++ b/test/wpt/tests/FileAPI/file/send-file-form-punctuation.html @@ -0,0 +1,226 @@ + + +Upload files named using punctuation + + + + + + + + diff --git a/test/wpt/tests/FileAPI/file/send-file-form-utf-8.html b/test/wpt/tests/FileAPI/file/send-file-form-utf-8.html new file mode 100644 index 0000000..1be44f4 --- /dev/null +++ b/test/wpt/tests/FileAPI/file/send-file-form-utf-8.html @@ -0,0 +1,62 @@ + + +Upload files in UTF-8 form + + + + + + + + diff --git a/test/wpt/tests/FileAPI/file/send-file-form-windows-1252.html b/test/wpt/tests/FileAPI/file/send-file-form-windows-1252.html new file mode 100644 index 0000000..21b219f --- /dev/null +++ b/test/wpt/tests/FileAPI/file/send-file-form-windows-1252.html @@ -0,0 +1,62 @@ + + +Upload files in Windows-1252 form + + + + + + + + diff --git a/test/wpt/tests/FileAPI/file/send-file-form-x-user-defined.html b/test/wpt/tests/FileAPI/file/send-file-form-x-user-defined.html new file mode 100644 index 0000000..8d6605d --- /dev/null +++ b/test/wpt/tests/FileAPI/file/send-file-form-x-user-defined.html @@ -0,0 +1,63 @@ + + +Upload files in x-user-defined form + + + + + + + + diff --git a/test/wpt/tests/FileAPI/file/send-file-form.html b/test/wpt/tests/FileAPI/file/send-file-form.html new file mode 100644 index 0000000..baa8d42 --- /dev/null +++ b/test/wpt/tests/FileAPI/file/send-file-form.html @@ -0,0 +1,25 @@ + + +Upload ASCII-named file in UTF-8 form + + + + + + + + diff --git a/test/wpt/tests/FileAPI/file/send-file-formdata-controls.any.js b/test/wpt/tests/FileAPI/file/send-file-formdata-controls.any.js new file mode 100644 index 0000000..e95d3aa --- /dev/null +++ b/test/wpt/tests/FileAPI/file/send-file-formdata-controls.any.js @@ -0,0 +1,69 @@ +// META: title=FormData: FormData: Upload files named using controls +// META: script=../support/send-file-formdata-helper.js + "use strict"; + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-NUL-[\0].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-BS-[\b].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-VT-[\v].txt", + }); + + // These have characters that undergo processing in name=, + // filename=, and/or value; formDataPostFileUploadTest postprocesses + // expectedEncodedBaseName for these internally. + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-LF-[\n].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-LF-CR-[\n\r].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-CR-[\r].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-CR-LF-[\r\n].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-HT-[\t].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-FF-[\f].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-DEL-[\x7F].txt", + }); + + // The rest should be passed through unmodified: + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-ESC-[\x1B].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-SPACE-[ ].txt", + }); diff --git a/test/wpt/tests/FileAPI/file/send-file-formdata-punctuation.any.js b/test/wpt/tests/FileAPI/file/send-file-formdata-punctuation.any.js new file mode 100644 index 0000000..987dba3 --- /dev/null +++ b/test/wpt/tests/FileAPI/file/send-file-formdata-punctuation.any.js @@ -0,0 +1,144 @@ +// META: title=FormData: FormData: Upload files named using punctuation +// META: script=../support/send-file-formdata-helper.js + "use strict"; + + // These have characters that undergo processing in name=, + // filename=, and/or value; formDataPostFileUploadTest postprocesses + // expectedEncodedBaseName for these internally. + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-QUOTATION-MARK-[\x22].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: '"file-for-upload-in-form-double-quoted.txt"', + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-REVERSE-SOLIDUS-[\\].txt", + }); + + // The rest should be passed through unmodified: + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-EXCLAMATION-MARK-[!].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-DOLLAR-SIGN-[$].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-PERCENT-SIGN-[%].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-AMPERSAND-[&].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-APOSTROPHE-['].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-LEFT-PARENTHESIS-[(].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-RIGHT-PARENTHESIS-[)].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-ASTERISK-[*].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-PLUS-SIGN-[+].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-COMMA-[,].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-FULL-STOP-[.].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-SOLIDUS-[/].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-COLON-[:].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-SEMICOLON-[;].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-EQUALS-SIGN-[=].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-QUESTION-MARK-[?].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-CIRCUMFLEX-ACCENT-[^].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-LEFT-SQUARE-BRACKET-[[].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-RIGHT-SQUARE-BRACKET-[]].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-LEFT-CURLY-BRACKET-[{].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-VERTICAL-LINE-[|].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-RIGHT-CURLY-BRACKET-[}].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form-TILDE-[~].txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "'file-for-upload-in-form-single-quoted.txt'", + }); diff --git a/test/wpt/tests/FileAPI/file/send-file-formdata-utf-8.any.js b/test/wpt/tests/FileAPI/file/send-file-formdata-utf-8.any.js new file mode 100644 index 0000000..b8bd74c --- /dev/null +++ b/test/wpt/tests/FileAPI/file/send-file-formdata-utf-8.any.js @@ -0,0 +1,33 @@ +// META: title=FormData: FormData: Upload files in UTF-8 fetch() +// META: script=../support/send-file-formdata-helper.js + "use strict"; + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form.txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "x-user-defined", + fileBaseName: "file-for-upload-in-form-\uF7F0\uF793\uF783\uF7A0.txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "windows-1252", + fileBaseName: "file-for-upload-in-form-☺😂.txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "JIS X 0201 and JIS X 0208", + fileBaseName: "file-for-upload-in-form-★星★.txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "Unicode", + fileBaseName: "file-for-upload-in-form-☺😂.txt", + }); + + formDataPostFileUploadTest({ + fileNameSource: "Unicode", + fileBaseName: `file-for-upload-in-form-${kTestChars}.txt`, + }); diff --git a/test/wpt/tests/FileAPI/file/send-file-formdata.any.js b/test/wpt/tests/FileAPI/file/send-file-formdata.any.js new file mode 100644 index 0000000..e13a348 --- /dev/null +++ b/test/wpt/tests/FileAPI/file/send-file-formdata.any.js @@ -0,0 +1,8 @@ +// META: title=FormData: Upload ASCII-named file in UTF-8 form +// META: script=../support/send-file-formdata-helper.js + "use strict"; + + formDataPostFileUploadTest({ + fileNameSource: "ASCII", + fileBaseName: "file-for-upload-in-form.txt", + }); diff --git a/test/wpt/tests/FileAPI/fileReader.any.js b/test/wpt/tests/FileAPI/fileReader.any.js new file mode 100644 index 0000000..2876dcb --- /dev/null +++ b/test/wpt/tests/FileAPI/fileReader.any.js @@ -0,0 +1,59 @@ +// META: title=FileReader States + +'use strict'; + +test(function () { + assert_true( + "FileReader" in globalThis, + "globalThis should have a FileReader property.", + ); +}, "FileReader interface object"); + +test(function () { + var fileReader = new FileReader(); + assert_true(fileReader instanceof FileReader); +}, "no-argument FileReader constructor"); + +var t_abort = async_test("FileReader States -- abort"); +t_abort.step(function () { + var fileReader = new FileReader(); + assert_equals(fileReader.readyState, 0); + assert_equals(fileReader.readyState, FileReader.EMPTY); + + var blob = new Blob(); + fileReader.readAsArrayBuffer(blob); + assert_equals(fileReader.readyState, 1); + assert_equals(fileReader.readyState, FileReader.LOADING); + + fileReader.onabort = this.step_func(function (e) { + assert_equals(fileReader.readyState, 2); + assert_equals(fileReader.readyState, FileReader.DONE); + t_abort.done(); + }); + fileReader.abort(); + fileReader.onabort = this.unreached_func("abort event should fire sync"); +}); + +var t_event = async_test("FileReader States -- events"); +t_event.step(function () { + var fileReader = new FileReader(); + + var blob = new Blob(); + fileReader.readAsArrayBuffer(blob); + + fileReader.onloadstart = this.step_func(function (e) { + assert_equals(fileReader.readyState, 1); + assert_equals(fileReader.readyState, FileReader.LOADING); + }); + + fileReader.onprogress = this.step_func(function (e) { + assert_equals(fileReader.readyState, 1); + assert_equals(fileReader.readyState, FileReader.LOADING); + }); + + fileReader.onloadend = this.step_func(function (e) { + assert_equals(fileReader.readyState, 2); + assert_equals(fileReader.readyState, FileReader.DONE); + t_event.done(); + }); +}); diff --git a/test/wpt/tests/FileAPI/filelist-section/filelist.html b/test/wpt/tests/FileAPI/filelist-section/filelist.html new file mode 100644 index 0000000..b97dcde --- /dev/null +++ b/test/wpt/tests/FileAPI/filelist-section/filelist.html @@ -0,0 +1,57 @@ + + + + + FileAPI Test: filelist + + + + + + + + + +
+ +
+
+ + + + + diff --git a/test/wpt/tests/FileAPI/filelist-section/filelist_multiple_selected_files-manual.html b/test/wpt/tests/FileAPI/filelist-section/filelist_multiple_selected_files-manual.html new file mode 100644 index 0000000..2efaa05 --- /dev/null +++ b/test/wpt/tests/FileAPI/filelist-section/filelist_multiple_selected_files-manual.html @@ -0,0 +1,64 @@ + + + + + FileAPI Test: filelist_multiple_selected_files + + + + + + + + + +
+ +
+
+

Test steps:

+
    +
  1. Download upload.txt, upload.zip to local.
  2. +
  3. Select the local two files (upload.txt, upload.zip) to run the test.
  4. +
+
+ +
+ + + + diff --git a/test/wpt/tests/FileAPI/filelist-section/filelist_selected_file-manual.html b/test/wpt/tests/FileAPI/filelist-section/filelist_selected_file-manual.html new file mode 100644 index 0000000..966aadd --- /dev/null +++ b/test/wpt/tests/FileAPI/filelist-section/filelist_selected_file-manual.html @@ -0,0 +1,64 @@ + + + + + FileAPI Test: filelist_selected_file + + + + + + + + + +
+ +
+
+

Test steps:

+
    +
  1. Download upload.txt to local.
  2. +
  3. Select the local upload.txt file to run the test.
  4. +
+
+ +
+ + + + diff --git a/test/wpt/tests/FileAPI/filelist-section/support/upload.txt b/test/wpt/tests/FileAPI/filelist-section/support/upload.txt new file mode 100644 index 0000000..f45965b --- /dev/null +++ b/test/wpt/tests/FileAPI/filelist-section/support/upload.txt @@ -0,0 +1 @@ +Hello, this is test file for file upload. diff --git a/test/wpt/tests/FileAPI/filelist-section/support/upload.zip b/test/wpt/tests/FileAPI/filelist-section/support/upload.zip new file mode 100644 index 0000000..a933d6a Binary files /dev/null and b/test/wpt/tests/FileAPI/filelist-section/support/upload.zip differ diff --git a/test/wpt/tests/FileAPI/historical.https.html b/test/wpt/tests/FileAPI/historical.https.html new file mode 100644 index 0000000..4f841f1 --- /dev/null +++ b/test/wpt/tests/FileAPI/historical.https.html @@ -0,0 +1,65 @@ + + + + + Historical features + + + + + +
+ + + diff --git a/test/wpt/tests/FileAPI/idlharness-manual.html b/test/wpt/tests/FileAPI/idlharness-manual.html new file mode 100644 index 0000000..c1d8b0c --- /dev/null +++ b/test/wpt/tests/FileAPI/idlharness-manual.html @@ -0,0 +1,45 @@ + + + + + File API manual IDL tests + + + + + + + + +

File API manual IDL tests

+ +

Either download upload.txt and select it below or select an + arbitrary local file.

+ +
+ +
+ +
+ + + + diff --git a/test/wpt/tests/FileAPI/idlharness.any.js b/test/wpt/tests/FileAPI/idlharness.any.js new file mode 100644 index 0000000..1744242 --- /dev/null +++ b/test/wpt/tests/FileAPI/idlharness.any.js @@ -0,0 +1,19 @@ +// META: script=/resources/WebIDLParser.js +// META: script=/resources/idlharness.js +// META: timeout=long + +'use strict'; + +// https://w3c.github.io/FileAPI/ + +idl_test( + ['FileAPI'], + ['dom', 'html', 'url'], + idl_array => { + idl_array.add_objects({ + Blob: ['new Blob(["TEST"])'], + File: ['new File(["myFileBits"], "myFileName")'], + FileReader: ['new FileReader()'] + }); + } +); diff --git a/test/wpt/tests/FileAPI/idlharness.html b/test/wpt/tests/FileAPI/idlharness.html new file mode 100644 index 0000000..45e8684 --- /dev/null +++ b/test/wpt/tests/FileAPI/idlharness.html @@ -0,0 +1,37 @@ + + + + + File API automated IDL tests (requiring dom) + + + + + + + + +

File API automated IDL tests

+ +
+ +
+ +
+ + + + + diff --git a/test/wpt/tests/FileAPI/idlharness.worker.js b/test/wpt/tests/FileAPI/idlharness.worker.js new file mode 100644 index 0000000..002aaed --- /dev/null +++ b/test/wpt/tests/FileAPI/idlharness.worker.js @@ -0,0 +1,17 @@ +importScripts("/resources/testharness.js"); +importScripts("/resources/WebIDLParser.js", "/resources/idlharness.js"); + +'use strict'; + +// https://w3c.github.io/FileAPI/ + +idl_test( + ['FileAPI'], + ['dom', 'html', 'url'], + idl_array => { + idl_array.add_objects({ + FileReaderSync: ['new FileReaderSync()'] + }); + } +); +done(); diff --git a/test/wpt/tests/FileAPI/progress-manual.html b/test/wpt/tests/FileAPI/progress-manual.html new file mode 100644 index 0000000..b2e03b3 --- /dev/null +++ b/test/wpt/tests/FileAPI/progress-manual.html @@ -0,0 +1,49 @@ + + +Process Events for FileReader + + + + +Please choose one file through this input below.
+ +
+ diff --git a/test/wpt/tests/FileAPI/reading-data-section/Determining-Encoding.any.js b/test/wpt/tests/FileAPI/reading-data-section/Determining-Encoding.any.js new file mode 100644 index 0000000..5b69f7e --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/Determining-Encoding.any.js @@ -0,0 +1,81 @@ +// META: title=FileAPI Test: Blob Determining Encoding + +var t = async_test("Blob Determing Encoding with encoding argument"); +t.step(function() { + // string 'hello' + var data = [0xFE,0xFF,0x00,0x68,0x00,0x65,0x00,0x6C,0x00,0x6C,0x00,0x6F]; + var blob = new Blob([new Uint8Array(data)]); + var reader = new FileReader(); + + reader.onloadend = t.step_func_done (function(event) { + assert_equals(this.result, "hello", "The FileReader should read the ArrayBuffer through UTF-16BE.") + }, reader); + + reader.readAsText(blob, "UTF-16BE"); +}); + +var t = async_test("Blob Determing Encoding with type attribute"); +t.step(function() { + var data = [0xFE,0xFF,0x00,0x68,0x00,0x65,0x00,0x6C,0x00,0x6C,0x00,0x6F]; + var blob = new Blob([new Uint8Array(data)], {type:"text/plain;charset=UTF-16BE"}); + var reader = new FileReader(); + + reader.onloadend = t.step_func_done (function(event) { + assert_equals(this.result, "hello", "The FileReader should read the ArrayBuffer through UTF-16BE.") + }, reader); + + reader.readAsText(blob); +}); + + +var t = async_test("Blob Determing Encoding with UTF-8 BOM"); +t.step(function() { + var data = [0xEF,0xBB,0xBF,0x68,0x65,0x6C,0x6C,0xC3,0xB6]; + var blob = new Blob([new Uint8Array(data)]); + var reader = new FileReader(); + + reader.onloadend = t.step_func_done (function(event) { + assert_equals(this.result, "hellö", "The FileReader should read the blob with UTF-8."); + }, reader); + + reader.readAsText(blob); +}); + +var t = async_test("Blob Determing Encoding without anything implying charset."); +t.step(function() { + var data = [0x68,0x65,0x6C,0x6C,0xC3,0xB6]; + var blob = new Blob([new Uint8Array(data)]); + var reader = new FileReader(); + + reader.onloadend = t.step_func_done (function(event) { + assert_equals(this.result, "hellö", "The FileReader should read the blob by default with UTF-8."); + }, reader); + + reader.readAsText(blob); +}); + +var t = async_test("Blob Determing Encoding with UTF-16BE BOM"); +t.step(function() { + var data = [0xFE,0xFF,0x00,0x68,0x00,0x65,0x00,0x6C,0x00,0x6C,0x00,0x6F]; + var blob = new Blob([new Uint8Array(data)]); + var reader = new FileReader(); + + reader.onloadend = t.step_func_done (function(event) { + assert_equals(this.result, "hello", "The FileReader should read the ArrayBuffer through UTF-16BE."); + }, reader); + + reader.readAsText(blob); +}); + +var t = async_test("Blob Determing Encoding with UTF-16LE BOM"); +t.step(function() { + var data = [0xFF,0xFE,0x68,0x00,0x65,0x00,0x6C,0x00,0x6C,0x00,0x6F,0x00]; + var blob = new Blob([new Uint8Array(data)]); + var reader = new FileReader(); + + reader.onloadend = t.step_func_done (function(event) { + assert_equals(this.result, "hello", "The FileReader should read the ArrayBuffer through UTF-16LE."); + }, reader); + + reader.readAsText(blob); +}); diff --git a/test/wpt/tests/FileAPI/reading-data-section/FileReader-event-handler-attributes.any.js b/test/wpt/tests/FileAPI/reading-data-section/FileReader-event-handler-attributes.any.js new file mode 100644 index 0000000..fc71c64 --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/FileReader-event-handler-attributes.any.js @@ -0,0 +1,17 @@ +// META: title=FileReader event handler attributes + +var attributes = [ + "onloadstart", + "onprogress", + "onload", + "onabort", + "onerror", + "onloadend", +]; +attributes.forEach(function(a) { + test(function() { + var reader = new FileReader(); + assert_equals(reader[a], null, + "event handler attribute should initially be null"); + }, "FileReader." + a + ": initial value"); +}); diff --git a/test/wpt/tests/FileAPI/reading-data-section/FileReader-multiple-reads.any.js b/test/wpt/tests/FileAPI/reading-data-section/FileReader-multiple-reads.any.js new file mode 100644 index 0000000..4b19c69 --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/FileReader-multiple-reads.any.js @@ -0,0 +1,81 @@ +// META: title=FileReader: starting new reads while one is in progress + +test(function() { + var blob_1 = new Blob(['TEST000000001']) + var blob_2 = new Blob(['TEST000000002']) + var reader = new FileReader(); + reader.readAsText(blob_1) + assert_equals(reader.readyState, FileReader.LOADING, "readyState Must be LOADING") + assert_throws_dom("InvalidStateError", function () { + reader.readAsText(blob_2) + }) +}, 'test FileReader InvalidStateError exception for readAsText'); + +test(function() { + var blob_1 = new Blob(['TEST000000001']) + var blob_2 = new Blob(['TEST000000002']) + var reader = new FileReader(); + reader.readAsDataURL(blob_1) + assert_equals(reader.readyState, FileReader.LOADING, "readyState Must be LOADING") + assert_throws_dom("InvalidStateError", function () { + reader.readAsDataURL(blob_2) + }) +}, 'test FileReader InvalidStateError exception for readAsDataURL'); + +test(function() { + var blob_1 = new Blob(['TEST000000001']) + var blob_2 = new Blob(['TEST000000002']) + var reader = new FileReader(); + reader.readAsArrayBuffer(blob_1) + assert_equals(reader.readyState, FileReader.LOADING, "readyState Must be LOADING") + assert_throws_dom("InvalidStateError", function () { + reader.readAsArrayBuffer(blob_2) + }) +}, 'test FileReader InvalidStateError exception for readAsArrayBuffer'); + +async_test(function() { + var blob_1 = new Blob(['TEST000000001']) + var blob_2 = new Blob(['TEST000000002']) + var reader = new FileReader(); + var triggered = false; + reader.onloadstart = this.step_func_done(function() { + assert_false(triggered, "Only one loadstart event should be dispatched"); + triggered = true; + assert_equals(reader.readyState, FileReader.LOADING, + "readyState must be LOADING") + assert_throws_dom("InvalidStateError", function () { + reader.readAsArrayBuffer(blob_2) + }) + }); + reader.readAsArrayBuffer(blob_1) + assert_equals(reader.readyState, FileReader.LOADING, "readyState Must be LOADING") +}, 'test FileReader InvalidStateError exception in onloadstart event for readAsArrayBuffer'); + +async_test(function() { + var blob_1 = new Blob(['TEST000000001']) + var blob_2 = new Blob(['TEST000000002']) + var reader = new FileReader(); + reader.onloadend = this.step_func_done(function() { + assert_equals(reader.readyState, FileReader.DONE, + "readyState must be DONE") + reader.readAsArrayBuffer(blob_2) + assert_equals(reader.readyState, FileReader.LOADING, "readyState Must be LOADING") + }); + reader.readAsArrayBuffer(blob_1) + assert_equals(reader.readyState, FileReader.LOADING, "readyState Must be LOADING") +}, 'test FileReader no InvalidStateError exception in loadend event handler for readAsArrayBuffer'); + +async_test(function() { + var blob_1 = new Blob([new Uint8Array(0x414141)]); + var blob_2 = new Blob(['TEST000000002']); + var reader = new FileReader(); + reader.onloadstart = this.step_func(function() { + reader.abort(); + reader.onloadstart = null; + reader.onloadend = this.step_func_done(function() { + assert_equals('TEST000000002', reader.result); + }); + reader.readAsText(blob_2); + }); + reader.readAsText(blob_1); +}, 'test abort and restart in onloadstart event for readAsText'); diff --git a/test/wpt/tests/FileAPI/reading-data-section/filereader_abort.any.js b/test/wpt/tests/FileAPI/reading-data-section/filereader_abort.any.js new file mode 100644 index 0000000..c778ae5 --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/filereader_abort.any.js @@ -0,0 +1,38 @@ +// META: title=FileAPI Test: filereader_abort + + test(function() { + var readerNoRead = new FileReader(); + readerNoRead.abort(); + assert_equals(readerNoRead.readyState, readerNoRead.EMPTY); + assert_equals(readerNoRead.result, null); + }, "Aborting before read"); + + promise_test(t => { + var blob = new Blob(["TEST THE ABORT METHOD"]); + var readerAbort = new FileReader(); + + var eventWatcher = new EventWatcher(t, readerAbort, + ['abort', 'loadstart', 'loadend', 'error', 'load']); + + // EventWatcher doesn't let us inspect the state after the abort event, + // so add an extra event handler for that. + readerAbort.addEventListener('abort', t.step_func(e => { + assert_equals(readerAbort.readyState, readerAbort.DONE); + })); + + readerAbort.readAsText(blob); + return eventWatcher.wait_for('loadstart') + .then(() => { + assert_equals(readerAbort.readyState, readerAbort.LOADING); + // 'abort' and 'loadend' events are dispatched synchronously, so + // call wait_for before calling abort. + var nextEvent = eventWatcher.wait_for(['abort', 'loadend']); + readerAbort.abort(); + return nextEvent; + }) + .then(() => { + // https://www.w3.org/Bugs/Public/show_bug.cgi?id=24401 + assert_equals(readerAbort.result, null); + assert_equals(readerAbort.readyState, readerAbort.DONE); + }); + }, "Aborting after read"); diff --git a/test/wpt/tests/FileAPI/reading-data-section/filereader_error.any.js b/test/wpt/tests/FileAPI/reading-data-section/filereader_error.any.js new file mode 100644 index 0000000..9845962 --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/filereader_error.any.js @@ -0,0 +1,19 @@ +// META: title=FileAPI Test: filereader_error + + async_test(function() { + var blob = new Blob(["TEST THE ERROR ATTRIBUTE AND ERROR EVENT"]); + var reader = new FileReader(); + assert_equals(reader.error, null, "The error is null when no error occurred"); + + reader.onload = this.step_func(function(evt) { + assert_unreached("Should not dispatch the load event"); + }); + + reader.onloadend = this.step_func(function(evt) { + assert_equals(reader.result, null, "The result is null"); + this.done(); + }); + + reader.readAsText(blob); + reader.abort(); + }); diff --git a/test/wpt/tests/FileAPI/reading-data-section/filereader_events.any.js b/test/wpt/tests/FileAPI/reading-data-section/filereader_events.any.js new file mode 100644 index 0000000..ac69290 --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/filereader_events.any.js @@ -0,0 +1,19 @@ +promise_test(async t => { + var reader = new FileReader(); + var eventWatcher = new EventWatcher(t, reader, ['loadstart', 'progress', 'abort', 'error', 'load', 'loadend']); + reader.readAsText(new Blob([])); + await eventWatcher.wait_for('loadstart'); + // No progress event for an empty blob, as no data is loaded. + await eventWatcher.wait_for('load'); + await eventWatcher.wait_for('loadend'); +}, 'events are dispatched in the correct order for an empty blob'); + +promise_test(async t => { + var reader = new FileReader(); + var eventWatcher = new EventWatcher(t, reader, ['loadstart', 'progress', 'abort', 'error', 'load', 'loadend']); + reader.readAsText(new Blob(['a'])); + await eventWatcher.wait_for('loadstart'); + await eventWatcher.wait_for('progress'); + await eventWatcher.wait_for('load'); + await eventWatcher.wait_for('loadend'); +}, 'events are dispatched in the correct order for a non-empty blob'); diff --git a/test/wpt/tests/FileAPI/reading-data-section/filereader_file-manual.html b/test/wpt/tests/FileAPI/reading-data-section/filereader_file-manual.html new file mode 100644 index 0000000..702ca9a --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/filereader_file-manual.html @@ -0,0 +1,69 @@ + + + + + FileAPI Test: filereader_file + + + + + + + +
+

Test step:

+
    +
  1. Download blue-100x100.png to local.
  2. +
  3. Select the local file (blue-100x100.png) to run the test.
  4. +
+
+ +
+ +
+ +
+ + + diff --git a/test/wpt/tests/FileAPI/reading-data-section/filereader_file_img-manual.html b/test/wpt/tests/FileAPI/reading-data-section/filereader_file_img-manual.html new file mode 100644 index 0000000..fca42c7 --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/filereader_file_img-manual.html @@ -0,0 +1,47 @@ + + + + + FileAPI Test: filereader_file_img + + + + + + + +
+

Test step:

+
    +
  1. Download blue-100x100.png to local.
  2. +
  3. Select the local file (blue-100x100.png) to run the test.
  4. +
+
+ +
+ +
+ +
+ + + diff --git a/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsArrayBuffer.any.js b/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsArrayBuffer.any.js new file mode 100644 index 0000000..d06e317 --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsArrayBuffer.any.js @@ -0,0 +1,23 @@ +// META: title=FileAPI Test: filereader_readAsArrayBuffer + + async_test(function() { + var blob = new Blob(["TEST"]); + var reader = new FileReader(); + + reader.onload = this.step_func(function(evt) { + assert_equals(reader.result.byteLength, 4, "The byteLength is 4"); + assert_true(reader.result instanceof ArrayBuffer, "The result is instanceof ArrayBuffer"); + assert_equals(reader.readyState, reader.DONE); + this.done(); + }); + + reader.onloadstart = this.step_func(function(evt) { + assert_equals(reader.readyState, reader.LOADING); + }); + + reader.onprogress = this.step_func(function(evt) { + assert_equals(reader.readyState, reader.LOADING); + }); + + reader.readAsArrayBuffer(blob); + }); diff --git a/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsBinaryString.any.js b/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsBinaryString.any.js new file mode 100644 index 0000000..e69ff15 --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsBinaryString.any.js @@ -0,0 +1,23 @@ +// META: title=FileAPI Test: filereader_readAsBinaryString + +async_test(t => { + const blob = new Blob(["σ"]); + const reader = new FileReader(); + + reader.onload = t.step_func_done(() => { + assert_equals(typeof reader.result, "string", "The result is string"); + assert_equals(reader.result.length, 2, "The result length is 2"); + assert_equals(reader.result, "\xcf\x83", "The result is \xcf\x83"); + assert_equals(reader.readyState, reader.DONE); + }); + + reader.onloadstart = t.step_func(() => { + assert_equals(reader.readyState, reader.LOADING); + }); + + reader.onprogress = t.step_func(() => { + assert_equals(reader.readyState, reader.LOADING); + }); + + reader.readAsBinaryString(blob); +}); diff --git a/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsDataURL.any.js b/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsDataURL.any.js new file mode 100644 index 0000000..4f9dbf7 --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsDataURL.any.js @@ -0,0 +1,54 @@ +// META: title=FileAPI Test: FileReader.readAsDataURL + +async_test(function(testCase) { + var blob = new Blob(["TEST"]); + var reader = new FileReader(); + + reader.onload = this.step_func(function(evt) { + assert_equals(reader.readyState, reader.DONE); + testCase.done(); + }); + reader.onloadstart = this.step_func(function(evt) { + assert_equals(reader.readyState, reader.LOADING); + }); + reader.onprogress = this.step_func(function(evt) { + assert_equals(reader.readyState, reader.LOADING); + }); + + reader.readAsDataURL(blob); +}, 'FileReader readyState during readAsDataURL'); + +async_test(function(testCase) { + var blob = new Blob(["TEST"], { type: 'text/plain' }); + var reader = new FileReader(); + + reader.onload = this.step_func(function() { + assert_equals(reader.result, "data:text/plain;base64,VEVTVA=="); + testCase.done(); + }); + reader.readAsDataURL(blob); +}, 'readAsDataURL result for Blob with specified MIME type'); + +async_test(function(testCase) { + var blob = new Blob(["TEST"]); + var reader = new FileReader(); + + reader.onload = this.step_func(function() { + assert_equals(reader.result, + "data:application/octet-stream;base64,VEVTVA=="); + testCase.done(); + }); + reader.readAsDataURL(blob); +}, 'readAsDataURL result for Blob with unspecified MIME type'); + +async_test(function(testCase) { + var blob = new Blob([]); + var reader = new FileReader(); + + reader.onload = this.step_func(function() { + assert_equals(reader.result, + "data:application/octet-stream;base64,"); + testCase.done(); + }); + reader.readAsDataURL(blob); +}, 'readAsDataURL result for empty Blob'); \ No newline at end of file diff --git a/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsText.any.js b/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsText.any.js new file mode 100644 index 0000000..4d0fa11 --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/filereader_readAsText.any.js @@ -0,0 +1,36 @@ +// META: title=FileAPI Test: filereader_readAsText + + async_test(function() { + var blob = new Blob(["TEST"]); + var reader = new FileReader(); + + reader.onload = this.step_func(function(evt) { + assert_equals(typeof reader.result, "string", "The result is typeof string"); + assert_equals(reader.result, "TEST", "The result is TEST"); + this.done(); + }); + + reader.onloadstart = this.step_func(function(evt) { + assert_equals(reader.readyState, reader.LOADING, "The readyState"); + }); + + reader.onprogress = this.step_func(function(evt) { + assert_equals(reader.readyState, reader.LOADING); + }); + + reader.readAsText(blob); + }, "readAsText should correctly read UTF-8."); + + async_test(function() { + var blob = new Blob(["TEST"]); + var reader = new FileReader(); + var reader_UTF16 = new FileReader(); + reader_UTF16.onload = this.step_func(function(evt) { + // "TEST" in UTF-8 is 0x54 0x45 0x53 0x54. + // Decoded as utf-16 (little-endian), we get 0x4554 0x5453. + assert_equals(reader_UTF16.readyState, reader.DONE, "The readyState"); + assert_equals(reader_UTF16.result, "\u4554\u5453", "The result is not TEST"); + this.done(); + }); + reader_UTF16.readAsText(blob, "UTF-16"); + }, "readAsText should correctly read UTF-16."); diff --git a/test/wpt/tests/FileAPI/reading-data-section/filereader_readystate.any.js b/test/wpt/tests/FileAPI/reading-data-section/filereader_readystate.any.js new file mode 100644 index 0000000..3cb36ab --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/filereader_readystate.any.js @@ -0,0 +1,19 @@ +// META: title=FileAPI Test: filereader_readystate + + async_test(function() { + var blob = new Blob(["THIS TEST THE READYSTATE WHEN READ BLOB"]); + var reader = new FileReader(); + + assert_equals(reader.readyState, reader.EMPTY); + + reader.onloadstart = this.step_func(function(evt) { + assert_equals(reader.readyState, reader.LOADING); + }); + + reader.onloadend = this.step_func(function(evt) { + assert_equals(reader.readyState, reader.DONE); + this.done(); + }); + + reader.readAsDataURL(blob); + }); diff --git a/test/wpt/tests/FileAPI/reading-data-section/filereader_result.any.js b/test/wpt/tests/FileAPI/reading-data-section/filereader_result.any.js new file mode 100644 index 0000000..28c068b --- /dev/null +++ b/test/wpt/tests/FileAPI/reading-data-section/filereader_result.any.js @@ -0,0 +1,82 @@ +// META: title=FileAPI Test: filereader_result + + var blob, blob2; + setup(function() { + blob = new Blob(["This test the result attribute"]); + blob2 = new Blob(["This is a second blob"]); + }); + + async_test(function() { + var readText = new FileReader(); + assert_equals(readText.result, null); + + readText.onloadend = this.step_func(function(evt) { + assert_equals(typeof readText.result, "string", "The result type is string"); + assert_equals(readText.result, "This test the result attribute", "The result is correct"); + this.done(); + }); + + readText.readAsText(blob); + }, "readAsText"); + + async_test(function() { + var readDataURL = new FileReader(); + assert_equals(readDataURL.result, null); + + readDataURL.onloadend = this.step_func(function(evt) { + assert_equals(typeof readDataURL.result, "string", "The result type is string"); + assert_true(readDataURL.result.indexOf("VGhpcyB0ZXN0IHRoZSByZXN1bHQgYXR0cmlidXRl") != -1, "return the right base64 string"); + this.done(); + }); + + readDataURL.readAsDataURL(blob); + }, "readAsDataURL"); + + async_test(function() { + var readArrayBuffer = new FileReader(); + assert_equals(readArrayBuffer.result, null); + + readArrayBuffer.onloadend = this.step_func(function(evt) { + assert_true(readArrayBuffer.result instanceof ArrayBuffer, "The result is instanceof ArrayBuffer"); + this.done(); + }); + + readArrayBuffer.readAsArrayBuffer(blob); + }, "readAsArrayBuffer"); + + async_test(function() { + var readBinaryString = new FileReader(); + assert_equals(readBinaryString.result, null); + + readBinaryString.onloadend = this.step_func(function(evt) { + assert_equals(typeof readBinaryString.result, "string", "The result type is string"); + assert_equals(readBinaryString.result, "This test the result attribute", "The result is correct"); + this.done(); + }); + + readBinaryString.readAsBinaryString(blob); + }, "readAsBinaryString"); + + + for (let event of ['loadstart', 'progress']) { + for (let method of ['readAsText', 'readAsDataURL', 'readAsArrayBuffer', 'readAsBinaryString']) { + promise_test(async function(t) { + var reader = new FileReader(); + assert_equals(reader.result, null, 'result is null before read'); + + var eventWatcher = new EventWatcher(t, reader, + [event, 'loadend']); + + reader[method](blob); + assert_equals(reader.result, null, 'result is null after first read call'); + await eventWatcher.wait_for(event); + assert_equals(reader.result, null, 'result is null during event'); + await eventWatcher.wait_for('loadend'); + assert_not_equals(reader.result, null); + reader[method](blob); + assert_equals(reader.result, null, 'result is null after second read call'); + await eventWatcher.wait_for(event); + assert_equals(reader.result, null, 'result is null during second read event'); + }, 'result is null during "' + event + '" event for ' + method); + } + } diff --git a/test/wpt/tests/FileAPI/reading-data-section/support/blue-100x100.png b/test/wpt/tests/FileAPI/reading-data-section/support/blue-100x100.png new file mode 100644 index 0000000..5748719 Binary files /dev/null and b/test/wpt/tests/FileAPI/reading-data-section/support/blue-100x100.png differ diff --git a/test/wpt/tests/FileAPI/support/Blob.js b/test/wpt/tests/FileAPI/support/Blob.js new file mode 100644 index 0000000..2c24974 --- /dev/null +++ b/test/wpt/tests/FileAPI/support/Blob.js @@ -0,0 +1,70 @@ +'use strict' + +self.test_blob = (fn, expectations) => { + var expected = expectations.expected, + type = expectations.type, + desc = expectations.desc; + + var t = async_test(desc); + t.step(function() { + var blob = fn(); + assert_true(blob instanceof Blob); + assert_false(blob instanceof File); + assert_equals(blob.type, type); + assert_equals(blob.size, expected.length); + + var fr = new FileReader(); + fr.onload = t.step_func_done(function(event) { + assert_equals(this.result, expected); + }, fr); + fr.onerror = t.step_func(function(e) { + assert_unreached("got error event on FileReader"); + }); + fr.readAsText(blob, "UTF-8"); + }); +} + +self.test_blob_binary = (fn, expectations) => { + var expected = expectations.expected, + type = expectations.type, + desc = expectations.desc; + + var t = async_test(desc); + t.step(function() { + var blob = fn(); + assert_true(blob instanceof Blob); + assert_false(blob instanceof File); + assert_equals(blob.type, type); + assert_equals(blob.size, expected.length); + + var fr = new FileReader(); + fr.onload = t.step_func_done(function(event) { + assert_true(this.result instanceof ArrayBuffer, + "Result should be an ArrayBuffer"); + assert_array_equals(new Uint8Array(this.result), expected); + }, fr); + fr.onerror = t.step_func(function(e) { + assert_unreached("got error event on FileReader"); + }); + fr.readAsArrayBuffer(blob); + }); +} + +// Assert that two TypedArray objects have the same byte values +self.assert_equals_typed_array = (array1, array2) => { + const [view1, view2] = [array1, array2].map((array) => { + assert_true(array.buffer instanceof ArrayBuffer, + 'Expect input ArrayBuffers to contain field `buffer`'); + return new DataView(array.buffer, array.byteOffset, array.byteLength); + }); + + assert_equals(view1.byteLength, view2.byteLength, + 'Expect both arrays to be of the same byte length'); + + const byteLength = view1.byteLength; + + for (let i = 0; i < byteLength; ++i) { + assert_equals(view1.getUint8(i), view2.getUint8(i), + `Expect byte at buffer position ${i} to be equal`); + } +} diff --git a/test/wpt/tests/FileAPI/support/document-domain-setter.sub.html b/test/wpt/tests/FileAPI/support/document-domain-setter.sub.html new file mode 100644 index 0000000..61aebdf --- /dev/null +++ b/test/wpt/tests/FileAPI/support/document-domain-setter.sub.html @@ -0,0 +1,7 @@ + +Relevant/current/blob source page used as a test helper + + diff --git a/test/wpt/tests/FileAPI/support/empty-document.html b/test/wpt/tests/FileAPI/support/empty-document.html new file mode 100644 index 0000000..b9cd130 --- /dev/null +++ b/test/wpt/tests/FileAPI/support/empty-document.html @@ -0,0 +1,3 @@ + + + diff --git a/test/wpt/tests/FileAPI/support/historical-serviceworker.js b/test/wpt/tests/FileAPI/support/historical-serviceworker.js new file mode 100644 index 0000000..8bd89a2 --- /dev/null +++ b/test/wpt/tests/FileAPI/support/historical-serviceworker.js @@ -0,0 +1,5 @@ +importScripts('/resources/testharness.js'); + +test(() => { + assert_false('FileReaderSync' in self); +}, '"FileReaderSync" should not be supported in service workers'); diff --git a/test/wpt/tests/FileAPI/support/incumbent.sub.html b/test/wpt/tests/FileAPI/support/incumbent.sub.html new file mode 100644 index 0000000..63a81cd --- /dev/null +++ b/test/wpt/tests/FileAPI/support/incumbent.sub.html @@ -0,0 +1,22 @@ + +Incumbent page used as a test helper + + + + + + diff --git a/test/wpt/tests/FileAPI/support/send-file-form-helper.js b/test/wpt/tests/FileAPI/support/send-file-form-helper.js new file mode 100644 index 0000000..d6adf21 --- /dev/null +++ b/test/wpt/tests/FileAPI/support/send-file-form-helper.js @@ -0,0 +1,282 @@ +'use strict'; + +// See /FileAPI/file/resources/echo-content-escaped.py +function escapeString(string) { + return string.replace(/\\/g, "\\\\").replace( + /[^\x20-\x7E]/g, + (x) => { + let hex = x.charCodeAt(0).toString(16); + if (hex.length < 2) hex = "0" + hex; + return `\\x${hex}`; + }, + ).replace(/\\x0d\\x0a/g, "\r\n"); +} + +// Rationale for this particular test character sequence, which is +// used in filenames and also in file contents: +// +// - ABC~ ensures the string starts with something we can read to +// ensure it is from the correct source; ~ is used because even +// some 1-byte otherwise-ASCII-like parts of ISO-2022-JP +// interpret it differently. +// - ‾¥ are inside a single-byte range of ISO-2022-JP and help +// diagnose problems due to filesystem encoding or locale +// - ≈ is inside IBM437 and helps diagnose problems due to filesystem +// encoding or locale +// - ¤ is inside Latin-1 and helps diagnose problems due to +// filesystem encoding or locale; it is also the "simplest" case +// needing substitution in ISO-2022-JP +// - ï½¥ is inside a single-byte range of ISO-2022-JP in some variants +// and helps diagnose problems due to filesystem encoding or locale; +// on the web it is distinct when decoding but unified when encoding +// - ・ is inside a double-byte range of ISO-2022-JP and helps +// diagnose problems due to filesystem encoding or locale +// - • is inside Windows-1252 and helps diagnose problems due to +// filesystem encoding or locale and also ensures these aren't +// accidentally turned into e.g. control codes +// - ∙ is inside IBM437 and helps diagnose problems due to filesystem +// encoding or locale +// - · is inside Latin-1 and helps diagnose problems due to +// filesystem encoding or locale and also ensures HTML named +// character references (e.g. ·) are not used +// - ☼ is inside IBM437 shadowing C0 and helps diagnose problems due to +// filesystem encoding or locale and also ensures these aren't +// accidentally turned into e.g. control codes +// - ★ is inside ISO-2022-JP on a non-Kanji page and makes correct +// output easier to spot +// - 星 is inside ISO-2022-JP on a Kanji page and makes correct +// output easier to spot +// - 🌟 is outside the BMP and makes incorrect surrogate pair +// substitution detectable and ensures substitutions work +// correctly immediately after Kanji 2-byte ISO-2022-JP +// - 星 repeated here ensures the correct codec state is used +// after a non-BMP substitution +// - ★ repeated here also makes correct output easier to spot +// - ☼ is inside IBM437 shadowing C0 and helps diagnose problems due to +// filesystem encoding or locale and also ensures these aren't +// accidentally turned into e.g. control codes and also ensures +// substitutions work correctly immediately after non-Kanji +// 2-byte ISO-2022-JP +// - · is inside Latin-1 and helps diagnose problems due to +// filesystem encoding or locale and also ensures HTML named +// character references (e.g. ·) are not used +// - ∙ is inside IBM437 and helps diagnose problems due to filesystem +// encoding or locale +// - • is inside Windows-1252 and again helps diagnose problems +// due to filesystem encoding or locale +// - ・ is inside a double-byte range of ISO-2022-JP and helps +// diagnose problems due to filesystem encoding or locale +// - ï½¥ is inside a single-byte range of ISO-2022-JP in some variants +// and helps diagnose problems due to filesystem encoding or locale; +// on the web it is distinct when decoding but unified when encoding +// - ¤ is inside Latin-1 and helps diagnose problems due to +// filesystem encoding or locale; again it is a "simple" +// substitution case +// - ≈ is inside IBM437 and helps diagnose problems due to filesystem +// encoding or locale +// - ¥‾ are inside a single-byte range of ISO-2022-JP and help +// diagnose problems due to filesystem encoding or locale +// - ~XYZ ensures earlier errors don't lead to misencoding of +// simple ASCII +// +// Overall the near-symmetry makes common I18N mistakes like +// off-by-1-after-non-BMP easier to spot. All the characters +// are also allowed in Windows Unicode filenames. +const kTestChars = 'ABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ'; + +// The kTestFallback* strings represent the expected byte sequence from +// encoding kTestChars with the given encoding with "html" replacement +// mode, isomorphic-decoded. That means, characters that can't be +// encoded in that encoding get HTML-escaped, but no further +// `escapeString`-like escapes are needed. +const kTestFallbackUtf8 = ( + "ABC~\xE2\x80\xBE\xC2\xA5\xE2\x89\x88\xC2\xA4\xEF\xBD\xA5\xE3\x83\xBB\xE2" + + "\x80\xA2\xE2\x88\x99\xC2\xB7\xE2\x98\xBC\xE2\x98\x85\xE6\x98\x9F\xF0\x9F" + + "\x8C\x9F\xE6\x98\x9F\xE2\x98\x85\xE2\x98\xBC\xC2\xB7\xE2\x88\x99\xE2\x80" + + "\xA2\xE3\x83\xBB\xEF\xBD\xA5\xC2\xA4\xE2\x89\x88\xC2\xA5\xE2\x80\xBE~XYZ" +); + +const kTestFallbackIso2022jp = ( + ("ABC~\x1B(J~\\≈¤\x1B$B!&!&\x1B(B•∙·☼\x1B$B!z@1\x1B(B🌟" + + "\x1B$B@1!z\x1B(B☼·∙•\x1B$B!&!&\x1B(B¤≈\x1B(J\\~\x1B(B~XYZ") + .replace(/[^\0-\x7F]/gu, (x) => `&#${x.codePointAt(0)};`) +); + +const kTestFallbackWindows1252 = ( + "ABC~‾\xA5≈\xA4・・\x95∙\xB7☼★星🌟星★☼\xB7∙\x95・・\xA4≈\xA5‾~XYZ".replace( + /[^\0-\xFF]/gu, + (x) => `&#${x.codePointAt(0)};`, + ) +); + +const kTestFallbackXUserDefined = kTestChars.replace( + /[^\0-\x7F]/gu, + (x) => `&#${x.codePointAt(0)};`, +); + +// formPostFileUploadTest - verifies multipart upload structure and +// numeric character reference replacement for filenames, field names, +// and field values using form submission. +// +// Uses /FileAPI/file/resources/echo-content-escaped.py to echo the +// upload POST with controls and non-ASCII bytes escaped. This is done +// because navigations whose response body contains [\0\b\v] may get +// treated as a download, which is not what we want. Use the +// `escapeString` function to replicate that kind of escape (note that +// it takes an isomorphic-decoded string, not a byte sequence). +// +// Fields in the parameter object: +// +// - fileNameSource: purely explanatory and gives a clue about which +// character encoding is the source for the non-7-bit-ASCII parts of +// the fileBaseName, or Unicode if no smaller-than-Unicode source +// contains all the characters. Used in the test name. +// - fileBaseName: the not-necessarily-just-7-bit-ASCII file basename +// used for the constructed test file. Used in the test name. +// - formEncoding: the acceptCharset of the form used to submit the +// test file. Used in the test name. +// - expectedEncodedBaseName: the expected formEncoding-encoded +// version of fileBaseName, isomorphic-decoded. That means, characters +// that can't be encoded in that encoding get HTML-escaped, but no +// further `escapeString`-like escapes are needed. +const formPostFileUploadTest = ({ + fileNameSource, + fileBaseName, + formEncoding, + expectedEncodedBaseName, +}) => { + promise_test(async testCase => { + + if (document.readyState !== 'complete') { + await new Promise(resolve => addEventListener('load', resolve)); + } + + const formTargetFrame = Object.assign(document.createElement('iframe'), { + name: 'formtargetframe', + }); + document.body.append(formTargetFrame); + testCase.add_cleanup(() => { + document.body.removeChild(formTargetFrame); + }); + + const form = Object.assign(document.createElement('form'), { + acceptCharset: formEncoding, + action: '/FileAPI/file/resources/echo-content-escaped.py', + method: 'POST', + enctype: 'multipart/form-data', + target: formTargetFrame.name, + }); + document.body.append(form); + testCase.add_cleanup(() => { + document.body.removeChild(form); + }); + + // Used to verify that the browser agrees with the test about + // which form charset is used. + form.append(Object.assign(document.createElement('input'), { + type: 'hidden', + name: '_charset_', + })); + + // Used to verify that the browser agrees with the test about + // field value replacement and encoding independently of file system + // idiosyncracies. + form.append(Object.assign(document.createElement('input'), { + type: 'hidden', + name: 'filename', + value: fileBaseName, + })); + + // Same, but with name and value reversed to ensure field names + // get the same treatment. + form.append(Object.assign(document.createElement('input'), { + type: 'hidden', + name: fileBaseName, + value: 'filename', + })); + + const fileInput = Object.assign(document.createElement('input'), { + type: 'file', + name: 'file', + }); + form.append(fileInput); + + // Removes c:\fakepath\ or other pseudofolder and returns just the + // final component of filePath; allows both / and \ as segment + // delimiters. + const baseNameOfFilePath = filePath => filePath.split(/[\/\\]/).pop(); + await new Promise(resolve => { + const dataTransfer = new DataTransfer; + dataTransfer.items.add( + new File([kTestChars], fileBaseName, {type: 'text/plain'})); + fileInput.files = dataTransfer.files; + // For historical reasons .value will be prefixed with + // c:\fakepath\, but the basename should match the file name + // exposed through the newer .files[0].name API. This check + // verifies that assumption. + assert_equals( + baseNameOfFilePath(fileInput.files[0].name), + baseNameOfFilePath(fileInput.value), + `The basename of the field's value should match its files[0].name`); + form.submit(); + formTargetFrame.onload = resolve; + }); + + const formDataText = formTargetFrame.contentDocument.body.textContent; + const formDataLines = formDataText.split('\n'); + if (formDataLines.length && !formDataLines[formDataLines.length - 1]) { + --formDataLines.length; + } + assert_greater_than( + formDataLines.length, + 2, + `${fileBaseName}: multipart form data must have at least 3 lines: ${ + JSON.stringify(formDataText) + }`); + const boundary = formDataLines[0]; + assert_equals( + formDataLines[formDataLines.length - 1], + boundary + '--', + `${fileBaseName}: multipart form data must end with ${boundary}--: ${ + JSON.stringify(formDataText) + }`); + + const asValue = expectedEncodedBaseName.replace(/\r\n?|\n/g, "\r\n"); + const asName = asValue.replace(/[\r\n"]/g, encodeURIComponent); + const asFilename = expectedEncodedBaseName.replace(/[\r\n"]/g, encodeURIComponent); + + // The response body from echo-content-escaped.py has controls and non-ASCII + // bytes escaped, so any caller-provided field that might contain such bytes + // must be passed to `escapeString`, after any other expected + // transformations. + const expectedText = [ + boundary, + 'Content-Disposition: form-data; name="_charset_"', + '', + formEncoding, + boundary, + 'Content-Disposition: form-data; name="filename"', + '', + // Unlike for names and filenames, multipart/form-data values don't escape + // \r\n linebreaks, and when they're read from an iframe they become \n. + escapeString(asValue).replace(/\r\n/g, "\n"), + boundary, + `Content-Disposition: form-data; name="${escapeString(asName)}"`, + '', + 'filename', + boundary, + `Content-Disposition: form-data; name="file"; ` + + `filename="${escapeString(asFilename)}"`, + 'Content-Type: text/plain', + '', + escapeString(kTestFallbackUtf8), + boundary + '--', + ].join('\n'); + + assert_true( + formDataText.startsWith(expectedText), + `Unexpected multipart-shaped form data received:\n${ + formDataText + }\nExpected:\n${expectedText}`); + }, `Upload ${fileBaseName} (${fileNameSource}) in ${formEncoding} form`); +}; diff --git a/test/wpt/tests/FileAPI/support/send-file-formdata-helper.js b/test/wpt/tests/FileAPI/support/send-file-formdata-helper.js new file mode 100644 index 0000000..53c8cca --- /dev/null +++ b/test/wpt/tests/FileAPI/support/send-file-formdata-helper.js @@ -0,0 +1,99 @@ +"use strict"; + +const kTestChars = "ABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ"; + +// formDataPostFileUploadTest - verifies multipart upload structure and +// numeric character reference replacement for filenames, field names, +// and field values using FormData and fetch(). +// +// Uses /fetch/api/resources/echo-content.py to echo the upload +// POST (unlike in send-file-form-helper.js, here we expect all +// multipart/form-data request bodies to be UTF-8, so we don't need to +// escape controls and non-ASCII bytes). +// +// Fields in the parameter object: +// +// - fileNameSource: purely explanatory and gives a clue about which +// character encoding is the source for the non-7-bit-ASCII parts of +// the fileBaseName, or Unicode if no smaller-than-Unicode source +// contains all the characters. Used in the test name. +// - fileBaseName: the not-necessarily-just-7-bit-ASCII file basename +// used for the constructed test file. Used in the test name. +const formDataPostFileUploadTest = ({ + fileNameSource, + fileBaseName, +}) => { + promise_test(async (testCase) => { + const formData = new FormData(); + let file = new Blob([kTestChars], { type: "text/plain" }); + try { + // Switch to File in browsers that allow this + file = new File([file], fileBaseName, { type: file.type }); + } catch (ignoredException) { + } + + // Used to verify that the browser agrees with the test about + // field value replacement and encoding independently of file system + // idiosyncracies. + formData.append("filename", fileBaseName); + + // Same, but with name and value reversed to ensure field names + // get the same treatment. + formData.append(fileBaseName, "filename"); + + formData.append("file", file, fileBaseName); + + const formDataText = await (await fetch( + `/fetch/api/resources/echo-content.py`, + { + method: "POST", + body: formData, + }, + )).text(); + const formDataLines = formDataText.split("\r\n"); + if (formDataLines.length && !formDataLines[formDataLines.length - 1]) { + --formDataLines.length; + } + assert_greater_than( + formDataLines.length, + 2, + `${fileBaseName}: multipart form data must have at least 3 lines: ${ + JSON.stringify(formDataText) + }`, + ); + const boundary = formDataLines[0]; + assert_equals( + formDataLines[formDataLines.length - 1], + boundary + "--", + `${fileBaseName}: multipart form data must end with ${boundary}--: ${ + JSON.stringify(formDataText) + }`, + ); + + const asValue = fileBaseName.replace(/\r\n?|\n/g, "\r\n"); + const asName = asValue.replace(/[\r\n"]/g, encodeURIComponent); + const asFilename = fileBaseName.replace(/[\r\n"]/g, encodeURIComponent); + const expectedText = [ + boundary, + 'Content-Disposition: form-data; name="filename"', + "", + asValue, + boundary, + `Content-Disposition: form-data; name="${asName}"`, + "", + "filename", + boundary, + `Content-Disposition: form-data; name="file"; ` + + `filename="${asFilename}"`, + "Content-Type: text/plain", + "", + kTestChars, + boundary + "--", + ].join("\r\n"); + + assert_true( + formDataText.startsWith(expectedText), + `Unexpected multipart-shaped form data received:\n${formDataText}\nExpected:\n${expectedText}`, + ); + }, `Upload ${fileBaseName} (${fileNameSource}) in fetch with FormData`); +}; diff --git a/test/wpt/tests/FileAPI/support/upload.txt b/test/wpt/tests/FileAPI/support/upload.txt new file mode 100644 index 0000000..5ab2f8a --- /dev/null +++ b/test/wpt/tests/FileAPI/support/upload.txt @@ -0,0 +1 @@ +Hello \ No newline at end of file diff --git a/test/wpt/tests/FileAPI/support/url-origin.html b/test/wpt/tests/FileAPI/support/url-origin.html new file mode 100644 index 0000000..6375511 --- /dev/null +++ b/test/wpt/tests/FileAPI/support/url-origin.html @@ -0,0 +1,6 @@ + + diff --git a/test/wpt/tests/FileAPI/unicode.html b/test/wpt/tests/FileAPI/unicode.html new file mode 100644 index 0000000..ce3e357 --- /dev/null +++ b/test/wpt/tests/FileAPI/unicode.html @@ -0,0 +1,46 @@ + + +Blob/Unicode interaction: normalization and encoding + + + diff --git a/test/wpt/tests/FileAPI/url/cross-global-revoke.sub.html b/test/wpt/tests/FileAPI/url/cross-global-revoke.sub.html new file mode 100644 index 0000000..ce9d680 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/cross-global-revoke.sub.html @@ -0,0 +1,62 @@ + + + + + + + diff --git a/test/wpt/tests/FileAPI/url/multi-global-origin-serialization.sub.html b/test/wpt/tests/FileAPI/url/multi-global-origin-serialization.sub.html new file mode 100644 index 0000000..0052b26 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/multi-global-origin-serialization.sub.html @@ -0,0 +1,26 @@ + + +Blob URL serialization (specifically the origin) in multi-global situations + + + + + + + + + + + diff --git a/test/wpt/tests/FileAPI/url/resources/create-helper.html b/test/wpt/tests/FileAPI/url/resources/create-helper.html new file mode 100644 index 0000000..fa6cf4e --- /dev/null +++ b/test/wpt/tests/FileAPI/url/resources/create-helper.html @@ -0,0 +1,7 @@ + + \ No newline at end of file diff --git a/test/wpt/tests/FileAPI/url/resources/create-helper.js b/test/wpt/tests/FileAPI/url/resources/create-helper.js new file mode 100644 index 0000000..e6344f7 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/resources/create-helper.js @@ -0,0 +1,4 @@ +self.addEventListener('message', e => { + let url = URL.createObjectURL(e.data.blob); + self.postMessage({url: url}); +}); diff --git a/test/wpt/tests/FileAPI/url/resources/fetch-tests.js b/test/wpt/tests/FileAPI/url/resources/fetch-tests.js new file mode 100644 index 0000000..a81ea1e --- /dev/null +++ b/test/wpt/tests/FileAPI/url/resources/fetch-tests.js @@ -0,0 +1,71 @@ +// This method generates a number of tests verifying fetching of blob URLs, +// allowing the same tests to be used both with fetch() and XMLHttpRequest. +// +// |fetch_method| is only used in test names, and should describe the +// (javascript) method being used by the other two arguments (i.e. 'fetch' or 'XHR'). +// +// |fetch_should_succeed| is a callback that is called with the Test and a URL. +// Fetching the URL is expected to succeed. The callback should return a promise +// resolved with whatever contents were fetched. +// +// |fetch_should_fail| similarly is a callback that is called with the Test, a URL +// to fetch, and optionally a method to use to do the fetch. If no method is +// specified the callback should use the 'GET' method. Fetching of these URLs is +// expected to fail, and the callback should return a promise that resolves iff +// fetching did indeed fail. +function fetch_tests(fetch_method, fetch_should_succeed, fetch_should_fail) { + const blob_contents = 'test blob contents'; + const blob = new Blob([blob_contents]); + + promise_test(t => { + const url = URL.createObjectURL(blob); + + return fetch_should_succeed(t, url).then(text => { + assert_equals(text, blob_contents); + }); + }, 'Blob URLs can be used in ' + fetch_method); + + promise_test(t => { + const url = URL.createObjectURL(blob); + + return fetch_should_succeed(t, url + '#fragment').then(text => { + assert_equals(text, blob_contents); + }); + }, fetch_method + ' with a fragment should succeed'); + + promise_test(t => { + const url = URL.createObjectURL(blob); + URL.revokeObjectURL(url); + + return fetch_should_fail(t, url); + }, fetch_method + ' of a revoked URL should fail'); + + promise_test(t => { + const url = URL.createObjectURL(blob); + URL.revokeObjectURL(url + '#fragment'); + + return fetch_should_succeed(t, url).then(text => { + assert_equals(text, blob_contents); + }); + }, 'Only exact matches should revoke URLs, using ' + fetch_method); + + promise_test(t => { + const url = URL.createObjectURL(blob); + + return fetch_should_fail(t, url + '?querystring'); + }, 'Appending a query string should cause ' + fetch_method + ' to fail'); + + promise_test(t => { + const url = URL.createObjectURL(blob); + + return fetch_should_fail(t, url + '/path'); + }, 'Appending a path should cause ' + fetch_method + ' to fail'); + + for (const method of ['HEAD', 'POST', 'DELETE', 'OPTIONS', 'PUT', 'CUSTOM']) { + const url = URL.createObjectURL(blob); + + promise_test(t => { + return fetch_should_fail(t, url, method); + }, fetch_method + ' with method "' + method + '" should fail'); + } +} \ No newline at end of file diff --git a/test/wpt/tests/FileAPI/url/resources/revoke-helper.html b/test/wpt/tests/FileAPI/url/resources/revoke-helper.html new file mode 100644 index 0000000..adf5a01 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/resources/revoke-helper.html @@ -0,0 +1,7 @@ + + \ No newline at end of file diff --git a/test/wpt/tests/FileAPI/url/resources/revoke-helper.js b/test/wpt/tests/FileAPI/url/resources/revoke-helper.js new file mode 100644 index 0000000..c3e05b6 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/resources/revoke-helper.js @@ -0,0 +1,9 @@ +self.addEventListener('message', e => { + URL.revokeObjectURL(e.data.url); + // Registering a new object URL will make absolutely sure that the revocation + // has propagated. Without this at least in chrome it is possible for the + // below postMessage to arrive at its destination before the revocation has + // been fully processed. + URL.createObjectURL(new Blob([])); + self.postMessage('revoked'); +}); diff --git a/test/wpt/tests/FileAPI/url/sandboxed-iframe.html b/test/wpt/tests/FileAPI/url/sandboxed-iframe.html new file mode 100644 index 0000000..a52939a --- /dev/null +++ b/test/wpt/tests/FileAPI/url/sandboxed-iframe.html @@ -0,0 +1,32 @@ + + +FileAPI Test: Verify behavior of Blob URL in unique origins + + + + + + + diff --git a/test/wpt/tests/FileAPI/url/unicode-origin.sub.html b/test/wpt/tests/FileAPI/url/unicode-origin.sub.html new file mode 100644 index 0000000..2c4921c --- /dev/null +++ b/test/wpt/tests/FileAPI/url/unicode-origin.sub.html @@ -0,0 +1,23 @@ + + +FileAPI Test: Verify origin of Blob URL + + + + diff --git a/test/wpt/tests/FileAPI/url/url-charset.window.js b/test/wpt/tests/FileAPI/url/url-charset.window.js new file mode 100644 index 0000000..777709b --- /dev/null +++ b/test/wpt/tests/FileAPI/url/url-charset.window.js @@ -0,0 +1,34 @@ +async_test(t => { + // This could be detected as ISO-2022-JP, in which case there would be no + // bbb` + ], + {type: 'text/html;charset=utf-8'}); + const url = URL.createObjectURL(blob); + const win = window.open(url); + t.add_cleanup(() => { + win.close(); + }); + + win.onload = t.step_func_done(() => { + assert_equals(win.document.charset, 'UTF-8'); + }); +}, 'Blob charset should override any auto-detected charset.'); + +async_test(t => { + const blob = new Blob( + [`\n`], + {type: 'text/html;charset=utf-8'}); + const url = URL.createObjectURL(blob); + const win = window.open(url); + t.add_cleanup(() => { + win.close(); + }); + + win.onload = t.step_func_done(() => { + assert_equals(win.document.charset, 'UTF-8'); + }); +}, 'Blob charset should override .'); diff --git a/test/wpt/tests/FileAPI/url/url-format.any.js b/test/wpt/tests/FileAPI/url/url-format.any.js new file mode 100644 index 0000000..69c5111 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/url-format.any.js @@ -0,0 +1,70 @@ +// META: timeout=long +const blob = new Blob(['test']); +const file = new File(['test'], 'name'); + +test(t => { + const url_count = 5000; + let list = []; + + t.add_cleanup(() => { + for (let url of list) { + URL.revokeObjectURL(url); + } + }); + + for (let i = 0; i < url_count; ++i) + list.push(URL.createObjectURL(blob)); + + list.sort(); + + for (let i = 1; i < list.length; ++i) + assert_not_equals(list[i], list[i-1], 'generated Blob URLs should be unique'); +}, 'Generated Blob URLs are unique'); + +test(() => { + const url = URL.createObjectURL(blob); + assert_equals(typeof url, 'string'); + assert_true(url.startsWith('blob:')); +}, 'Blob URL starts with "blob:"'); + +test(() => { + const url = URL.createObjectURL(file); + assert_equals(typeof url, 'string'); + assert_true(url.startsWith('blob:')); +}, 'Blob URL starts with "blob:" for Files'); + +test(() => { + const url = URL.createObjectURL(blob); + assert_equals(new URL(url).origin, location.origin); + if (location.origin !== 'null') { + assert_true(url.includes(location.origin)); + assert_true(url.startsWith('blob:' + location.protocol)); + } +}, 'Origin of Blob URL matches our origin'); + +test(() => { + const url = URL.createObjectURL(blob); + const url_record = new URL(url); + assert_equals(url_record.protocol, 'blob:'); + assert_equals(url_record.origin, location.origin); + assert_equals(url_record.host, '', 'host should be an empty string'); + assert_equals(url_record.port, '', 'port should be an empty string'); + const uuid_path_re = /\/[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i; + assert_true(uuid_path_re.test(url_record.pathname), 'Path must end with a valid UUID'); + if (location.origin !== 'null') { + const nested_url = new URL(url_record.pathname); + assert_equals(nested_url.origin, location.origin); + assert_equals(nested_url.pathname.search(uuid_path_re), 0, 'Path must be a valid UUID'); + assert_true(url.includes(location.origin)); + assert_true(url.startsWith('blob:' + location.protocol)); + } +}, 'Blob URL parses correctly'); + +test(() => { + const url = URL.createObjectURL(file); + assert_equals(new URL(url).origin, location.origin); + if (location.origin !== 'null') { + assert_true(url.includes(location.origin)); + assert_true(url.startsWith('blob:' + location.protocol)); + } +}, 'Origin of Blob URL matches our origin for Files'); diff --git a/test/wpt/tests/FileAPI/url/url-in-tags-revoke.window.js b/test/wpt/tests/FileAPI/url/url-in-tags-revoke.window.js new file mode 100644 index 0000000..1cdad79 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/url-in-tags-revoke.window.js @@ -0,0 +1,115 @@ +// META: timeout=long +async_test(t => { + const run_result = 'test_frame_OK'; + const blob_contents = '\n\n' + + ''; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + + const frame = document.createElement('iframe'); + frame.setAttribute('src', url); + frame.setAttribute('style', 'display:none;'); + document.body.appendChild(frame); + URL.revokeObjectURL(url); + + frame.onload = t.step_func_done(() => { + assert_equals(frame.contentWindow.test_result, run_result); + }); +}, 'Fetching a blob URL immediately before revoking it works in an iframe.'); + +async_test(t => { + const run_result = 'test_frame_OK'; + const blob_contents = '\n\n' + + ''; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + + const frame = document.createElement('iframe'); + frame.setAttribute('src', '/common/blank.html'); + frame.setAttribute('style', 'display:none;'); + document.body.appendChild(frame); + + frame.onload = t.step_func(() => { + frame.contentWindow.location = url; + URL.revokeObjectURL(url); + frame.onload = t.step_func_done(() => { + assert_equals(frame.contentWindow.test_result, run_result); + }); + }); +}, 'Fetching a blob URL immediately before revoking it works in an iframe navigation.'); + +async_test(t => { + const run_result = 'test_frame_OK'; + const blob_contents = '\n\n' + + ''; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + const win = window.open(url); + URL.revokeObjectURL(url); + add_completion_callback(() => { win.close(); }); + + win.onload = t.step_func_done(() => { + assert_equals(win.test_result, run_result); + }); +}, 'Opening a blob URL in a new window immediately before revoking it works.'); + +function receive_message_on_channel(t, channel_name) { + const channel = new BroadcastChannel(channel_name); + return new Promise(resolve => { + channel.addEventListener('message', t.step_func(e => { + resolve(e.data); + })); + }); +} + +function window_contents_for_channel(channel_name) { + return '\n' + + ''; +} + +async_test(t => { + const channel_name = 'noopener-window-test'; + const blob = new Blob([window_contents_for_channel(channel_name)], {type: 'text/html'}); + receive_message_on_channel(t, channel_name).then(t.step_func_done(t => { + assert_equals(t, 'foobar'); + })); + const url = URL.createObjectURL(blob); + const win = window.open(); + win.opener = null; + win.location = url; + URL.revokeObjectURL(url); +}, 'Opening a blob URL in a noopener about:blank window immediately before revoking it works.'); + +async_test(t => { + const run_result = 'test_script_OK'; + const blob_contents = 'window.script_test_result = "' + run_result + '";'; + const blob = new Blob([blob_contents]); + const url = URL.createObjectURL(blob); + + const e = document.createElement('script'); + e.setAttribute('src', url); + e.onload = t.step_func_done(() => { + assert_equals(window.script_test_result, run_result); + }); + + document.body.appendChild(e); + URL.revokeObjectURL(url); +}, 'Fetching a blob URL immediately before revoking it works in '; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + + const frame = document.createElement('iframe'); + frame.setAttribute('src', url); + frame.setAttribute('style', 'display:none;'); + document.body.appendChild(frame); + + frame.onload = t.step_func_done(() => { + assert_equals(frame.contentWindow.test_result, run_result); + }); +}, 'Blob URLs can be used in iframes, and are treated same origin'); + +async_test(t => { + const blob_contents = '\n\n' + + '\n' + + '\n' + + '
\n' + + '
'; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + + const frame = document.createElement('iframe'); + frame.setAttribute('src', url + '#block2'); + document.body.appendChild(frame); + frame.contentWindow.onscroll = t.step_func_done(() => { + assert_equals(frame.contentWindow.scrollY, 5000); + }); +}, 'Blob URL fragment is implemented.'); diff --git a/test/wpt/tests/FileAPI/url/url-lifetime.html b/test/wpt/tests/FileAPI/url/url-lifetime.html new file mode 100644 index 0000000..ad5d667 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/url-lifetime.html @@ -0,0 +1,56 @@ + + + + + + \ No newline at end of file diff --git a/test/wpt/tests/FileAPI/url/url-reload.window.js b/test/wpt/tests/FileAPI/url/url-reload.window.js new file mode 100644 index 0000000..d333b3a --- /dev/null +++ b/test/wpt/tests/FileAPI/url/url-reload.window.js @@ -0,0 +1,36 @@ +function blob_url_reload_test(t, revoke_before_reload) { + const run_result = 'test_frame_OK'; + const blob_contents = '\n\n' + + ''; + const blob = new Blob([blob_contents], {type: 'text/html'}); + const url = URL.createObjectURL(blob); + + const frame = document.createElement('iframe'); + frame.setAttribute('src', url); + frame.setAttribute('style', 'display:none;'); + document.body.appendChild(frame); + + frame.onload = t.step_func(() => { + if (revoke_before_reload) + URL.revokeObjectURL(url); + assert_equals(frame.contentWindow.test_result, run_result); + frame.contentWindow.test_result = null; + frame.onload = t.step_func_done(() => { + assert_equals(frame.contentWindow.test_result, run_result); + }); + // Slight delay before reloading to ensure revoke actually has had a chance + // to be processed. + t.step_timeout(() => { + frame.contentWindow.location.reload(); + }, 250); + }); +} + +async_test(t => { + blob_url_reload_test(t, false); +}, 'Reloading a blob URL succeeds.'); + + +async_test(t => { + blob_url_reload_test(t, true); +}, 'Reloading a blob URL succeeds even if the URL was revoked.'); diff --git a/test/wpt/tests/FileAPI/url/url-with-fetch.any.js b/test/wpt/tests/FileAPI/url/url-with-fetch.any.js new file mode 100644 index 0000000..54e6a3d --- /dev/null +++ b/test/wpt/tests/FileAPI/url/url-with-fetch.any.js @@ -0,0 +1,72 @@ +// META: script=resources/fetch-tests.js +// META: script=/common/gc.js + +function fetch_should_succeed(test, request) { + return fetch(request).then(response => response.text()); +} + +function fetch_should_fail(test, url, method = 'GET') { + return promise_rejects_js(test, TypeError, fetch(url, {method: method})); +} + +fetch_tests('fetch', fetch_should_succeed, fetch_should_fail); + +promise_test(t => { + const blob_contents = 'test blob contents'; + const blob_type = 'image/png'; + const blob = new Blob([blob_contents], {type: blob_type}); + const url = URL.createObjectURL(blob); + + return fetch(url).then(response => { + assert_equals(response.headers.get('Content-Type'), blob_type); + }); +}, 'fetch should return Content-Type from Blob'); + +promise_test(t => { + const blob_contents = 'test blob contents'; + const blob = new Blob([blob_contents]); + const url = URL.createObjectURL(blob); + const request = new Request(url); + + // Revoke the object URL. Request should take a reference to the blob as + // soon as it receives it in open(), so the request succeeds even though we + // revoke the URL before calling fetch(). + URL.revokeObjectURL(url); + + return fetch_should_succeed(t, request).then(text => { + assert_equals(text, blob_contents); + }); +}, 'Revoke blob URL after creating Request, will fetch'); + +promise_test(async t => { + const blob_contents = 'test blob contents'; + const blob = new Blob([blob_contents]); + const url = URL.createObjectURL(blob); + let request = new Request(url); + + // Revoke the object URL. Request should take a reference to the blob as + // soon as it receives it in open(), so the request succeeds even though we + // revoke the URL before calling fetch(). + URL.revokeObjectURL(url); + + request = request.clone(); + await garbageCollect(); + + const text = await fetch_should_succeed(t, request); + assert_equals(text, blob_contents); +}, 'Revoke blob URL after creating Request, then clone Request, will fetch'); + +promise_test(function(t) { + const blob_contents = 'test blob contents'; + const blob = new Blob([blob_contents]); + const url = URL.createObjectURL(blob); + + const result = fetch_should_succeed(t, url).then(text => { + assert_equals(text, blob_contents); + }); + + // Revoke the object URL. fetch should have already resolved the blob URL. + URL.revokeObjectURL(url); + + return result; +}, 'Revoke blob URL after calling fetch, fetch should succeed'); diff --git a/test/wpt/tests/FileAPI/url/url-with-xhr.any.js b/test/wpt/tests/FileAPI/url/url-with-xhr.any.js new file mode 100644 index 0000000..29d8308 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/url-with-xhr.any.js @@ -0,0 +1,68 @@ +// META: script=resources/fetch-tests.js + +function xhr_should_succeed(test, url) { + return new Promise((resolve, reject) => { + const xhr = new XMLHttpRequest(); + xhr.open('GET', url); + xhr.onload = test.step_func(() => { + assert_equals(xhr.status, 200); + assert_equals(xhr.statusText, 'OK'); + resolve(xhr.response); + }); + xhr.onerror = () => reject('Got unexpected error event'); + xhr.send(); + }); +} + +function xhr_should_fail(test, url, method = 'GET') { + const xhr = new XMLHttpRequest(); + xhr.open(method, url); + const result1 = new Promise((resolve, reject) => { + xhr.onload = () => reject('Got unexpected load event'); + xhr.onerror = resolve; + }); + const result2 = new Promise(resolve => { + xhr.onreadystatechange = test.step_func(() => { + if (xhr.readyState !== xhr.DONE) return; + assert_equals(xhr.status, 0); + resolve(); + }); + }); + xhr.send(); + return Promise.all([result1, result2]); +} + +fetch_tests('XHR', xhr_should_succeed, xhr_should_fail); + +async_test(t => { + const blob_contents = 'test blob contents'; + const blob_type = 'image/png'; + const blob = new Blob([blob_contents], {type: blob_type}); + const url = URL.createObjectURL(blob); + const xhr = new XMLHttpRequest(); + xhr.open('GET', url); + xhr.onloadend = t.step_func_done(() => { + assert_equals(xhr.getResponseHeader('Content-Type'), blob_type); + }); + xhr.send(); +}, 'XHR should return Content-Type from Blob'); + +async_test(t => { + const blob_contents = 'test blob contents'; + const blob = new Blob([blob_contents]); + const url = URL.createObjectURL(blob); + const xhr = new XMLHttpRequest(); + xhr.open('GET', url); + + // Revoke the object URL. XHR should take a reference to the blob as soon as + // it receives it in open(), so the request succeeds even though we revoke the + // URL before calling send(). + URL.revokeObjectURL(url); + + xhr.onload = t.step_func_done(() => { + assert_equals(xhr.response, blob_contents); + }); + xhr.onerror = t.unreached_func('Got unexpected error event'); + + xhr.send(); +}, 'Revoke blob URL after open(), will fetch'); diff --git a/test/wpt/tests/FileAPI/url/url_createobjecturl_file-manual.html b/test/wpt/tests/FileAPI/url/url_createobjecturl_file-manual.html new file mode 100644 index 0000000..7ae3251 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/url_createobjecturl_file-manual.html @@ -0,0 +1,45 @@ + + +FileAPI Test: Creating Blob URL with File + + + + + + +
+

Test steps:

+
    +
  1. Download blue96x96.png to local.
  2. +
  3. Select the local file (blue96x96.png) to run the test.
  4. +
+
+ +
+ +
+ +
+ + + diff --git a/test/wpt/tests/FileAPI/url/url_createobjecturl_file_img-manual.html b/test/wpt/tests/FileAPI/url/url_createobjecturl_file_img-manual.html new file mode 100644 index 0000000..534c1de --- /dev/null +++ b/test/wpt/tests/FileAPI/url/url_createobjecturl_file_img-manual.html @@ -0,0 +1,28 @@ + + +FileAPI Test: Creating Blob URL with File as image source + + + +
+

Test steps:

+
    +
  1. Download blue96x96.png to local.
  2. +
  3. Select the local file (blue96x96.png) to run the test.
  4. +
+

Pass/fail criteria:

+

Test passes if there is a filled blue square.

+ +

+

+
+ + + diff --git a/test/wpt/tests/FileAPI/url/url_xmlhttprequest_img-ref.html b/test/wpt/tests/FileAPI/url/url_xmlhttprequest_img-ref.html new file mode 100644 index 0000000..7d73904 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/url_xmlhttprequest_img-ref.html @@ -0,0 +1,12 @@ + + +FileAPI Reference File + + + +

Test passes if there is a filled blue square.

+ +

+ +

+ diff --git a/test/wpt/tests/FileAPI/url/url_xmlhttprequest_img.html b/test/wpt/tests/FileAPI/url/url_xmlhttprequest_img.html new file mode 100644 index 0000000..468dcb0 --- /dev/null +++ b/test/wpt/tests/FileAPI/url/url_xmlhttprequest_img.html @@ -0,0 +1,27 @@ + + + +FileAPI Test: Creating Blob URL via XMLHttpRequest as image source + + + + +

Test passes if there is a filled blue square.

+ +

+ +

+ + + + diff --git a/test/wpt/tests/LICENSE.md b/test/wpt/tests/LICENSE.md new file mode 100644 index 0000000..39c46d0 --- /dev/null +++ b/test/wpt/tests/LICENSE.md @@ -0,0 +1,11 @@ +# The 3-Clause BSD License + +Copyright © web-platform-tests contributors + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/test/wpt/tests/README.md b/test/wpt/tests/README.md new file mode 100644 index 0000000..7e8e994 --- /dev/null +++ b/test/wpt/tests/README.md @@ -0,0 +1,124 @@ +The web-platform-tests Project +============================== + +[![Taskcluster CI Status](https://community-tc.services.mozilla.com/api/github/v1/repository/web-platform-tests/wpt/master/badge.svg)](https://community-tc.services.mozilla.com/api/github/v1/repository/web-platform-tests/wpt/master/latest) [![documentation](https://github.com/web-platform-tests/wpt/workflows/documentation/badge.svg)](https://github.com/web-platform-tests/wpt/actions?query=workflow%3Adocumentation+branch%3Amaster) [![manifest](https://github.com/web-platform-tests/wpt/workflows/manifest/badge.svg)](https://github.com/web-platform-tests/wpt/actions?query=workflow%3Amanifest+branch%3Amaster) [![Python 3](https://pyup.io/repos/github/web-platform-tests/wpt/python-3-shield.svg)](https://pyup.io/repos/github/web-platform-tests/wpt/) + +The web-platform-tests Project is a cross-browser test suite for the +Web-platform stack. Writing tests in a way that allows them to be run in all +browsers gives browser projects confidence that they are shipping software that +is compatible with other implementations, and that later implementations will +be compatible with their implementations. This in turn gives Web +authors/developers confidence that they can actually rely on the Web platform +to deliver on the promise of working across browsers and devices without +needing extra layers of abstraction to paper over the gaps left by +specification editors and implementors. + +The most important sources of information and activity are: + +- [github.com/web-platform-tests/wpt](https://github.com/web-platform-tests/wpt): + the canonical location of the project's source code revision history and the + discussion forum for changes to the code +- [web-platform-tests.org](https://web-platform-tests.org): the documentation + website; details how to set up the project, how to write tests, how to give + and receive peer review, how to serve as an administrator, and more +- [wpt.live](https://wpt.live): a public deployment of the test suite, + allowing anyone to run the tests by visiting from an + Internet-enabled browser of their choice +- [wpt.fyi](https://wpt.fyi): an archive of test results collected from an + array of web browsers on a regular basis +- [Real-time chat room](https://app.element.io/#/room/#wpt:matrix.org): the + `wpt:matrix.org` matrix channel; includes participants located + around the world, but busiest during the European working day. +- [Mailing list](https://lists.w3.org/Archives/Public/public-test-infra/): a + public and low-traffic discussion list +- [RFCs](https://github.com/web-platform-tests/rfcs): a repo for requesting + comments on substantial changes that would impact other stakeholders or + users; people who work on WPT infra are encouraged to watch the repo. + +**If you'd like clarification about anything**, don't hesitate to ask in the +chat room or on the mailing list. + +Setting Up the Repo +=================== + +Clone or otherwise get https://github.com/web-platform-tests/wpt. + +Note: because of the frequent creation and deletion of branches in this +repo, it is recommended to "prune" stale branches when fetching updates, +i.e. use `git pull --prune` (or `git fetch -p && git merge`). + +Running the Tests +================= + +See the [documentation website](https://web-platform-tests.org/running-tests/) +and in particular the +[system setup for running tests locally](https://web-platform-tests.org/running-tests/from-local-system.html#system-setup). + +Command Line Tools +================== + +The `wpt` command provides a frontend to a variety of tools for +working with and running web-platform-tests. Some of the most useful +commands are: + +* `wpt serve` - For starting the wpt http server +* `wpt run` - For running tests in a browser +* `wpt lint` - For running the lint against all tests +* `wpt manifest` - For updating or generating a `MANIFEST.json` test manifest +* `wpt install` - For installing the latest release of a browser or + webdriver server on the local machine. +* `wpt serve-wave` - For starting the wpt http server and the WAVE test runner. +For more details on how to use the WAVE test runner see the [documentation](./tools/wave/docs/usage/usage.md). + +Windows Notes +============================================= + +On Windows `wpt` commands must be prefixed with `python` or the path +to the python binary (if `python` is not in your `%PATH%`). + +```bash +python wpt [command] +``` + +Alternatively, you may also use +[Bash on Ubuntu on Windows](https://msdn.microsoft.com/en-us/commandline/wsl/about) +in the Windows 10 Anniversary Update build, then access your windows +partition from there to launch `wpt` commands. + +Please make sure git and your text editor do not automatically convert +line endings, as it will cause lint errors. For git, please set +`git config core.autocrlf false` in your working tree. + +Publication +=========== + +The master branch is automatically synced to [wpt.live](https://wpt.live/) and +[w3c-test.org](https://w3c-test.org/). + +Contributing +============ + +Save the Web, Write Some Tests! + +Absolutely everyone is welcome to contribute to test development. No +test is too small or too simple, especially if it corresponds to +something for which you've noted an interoperability bug in a browser. + +The way to contribute is just as usual: + +* Fork this repository (and make sure you're still relatively in sync + with it if you forked a while ago). +* Create a branch for your changes: + `git checkout -b topic`. +* Make your changes. +* Run `./wpt lint` as described above. +* Commit locally and push that to your repo. +* Create a pull request based on the above. + +Issues with web-platform-tests +------------------------------ + +If you spot an issue with a test and are not comfortable providing a +pull request per above to fix it, please +[file a new issue](https://github.com/web-platform-tests/wpt/issues/new). +Thank you! diff --git a/test/wpt/tests/common/CustomCorsResponse.py b/test/wpt/tests/common/CustomCorsResponse.py new file mode 100644 index 0000000..fc4d122 --- /dev/null +++ b/test/wpt/tests/common/CustomCorsResponse.py @@ -0,0 +1,30 @@ +import json + +def main(request, response): + '''Handler for getting an HTTP response customised by the given query + parameters. + + The returned response will have + - HTTP headers defined by the 'headers' query parameter + - Must be a serialized JSON dictionary mapping header names to header + values + - HTTP status code defined by the 'status' query parameter + - Must be a positive serialized JSON integer like the string '200' + - Response content defined by the 'content' query parameter + - Must be a serialized JSON string representing the desired response body + ''' + def query_parameter_or_default(param, default): + return request.GET.first(param) if param in request.GET else default + + headers = json.loads(query_parameter_or_default(b'headers', b'"{}"')) + for k, v in headers.items(): + response.headers.set(k, v) + + # Note that, in order to have out-of-the-box support for tests that don't call + # setup({'allow_uncaught_exception': true}) + # we return a no-op JS payload. This approach will avoid syntax errors in + # script resources that would otherwise cause the test harness to fail. + response.content = json.loads(query_parameter_or_default(b'content', + b'"/* CustomCorsResponse.py content */"')) + response.status_code = json.loads(query_parameter_or_default(b'status', + b'200')) diff --git a/test/wpt/tests/common/META.yml b/test/wpt/tests/common/META.yml new file mode 100644 index 0000000..ca4d2e5 --- /dev/null +++ b/test/wpt/tests/common/META.yml @@ -0,0 +1,3 @@ +suggested_reviewers: + - zqzhang + - deniak diff --git a/test/wpt/tests/common/PrefixedLocalStorage.js b/test/wpt/tests/common/PrefixedLocalStorage.js new file mode 100644 index 0000000..2f4e7b6 --- /dev/null +++ b/test/wpt/tests/common/PrefixedLocalStorage.js @@ -0,0 +1,116 @@ +/** + * Supports pseudo-"namespacing" localStorage for a given test + * by generating and using a unique prefix for keys. Why trounce on other + * tests' localStorage items when you can keep it "separated"? + * + * PrefixedLocalStorageTest: Instantiate in testharness.js tests to generate + * a new unique-ish prefix + * PrefixedLocalStorageResource: Instantiate in supporting test resource + * files to use/share a prefix generated by a test. + */ +var PrefixedLocalStorage = function () { + this.prefix = ''; // Prefix for localStorage keys + this.param = 'prefixedLocalStorage'; // Param to use in querystrings +}; + +PrefixedLocalStorage.prototype.clear = function () { + if (this.prefix === '') { return; } + Object.keys(localStorage).forEach(sKey => { + if (sKey.indexOf(this.prefix) === 0) { + localStorage.removeItem(sKey); + } + }); +}; + +/** + * Append/replace prefix parameter and value in URI querystring + * Use to generate URLs to resource files that will share the prefix. + */ +PrefixedLocalStorage.prototype.url = function (uri) { + function updateUrlParameter (uri, key, value) { + var i = uri.indexOf('#'); + var hash = (i === -1) ? '' : uri.substr(i); + uri = (i === -1) ? uri : uri.substr(0, i); + var re = new RegExp(`([?&])${key}=.*?(&|$)`, 'i'); + var separator = uri.indexOf('?') !== -1 ? '&' : '?'; + uri = (uri.match(re)) ? uri.replace(re, `$1${key}=${value}$2`) : + `${uri}${separator}${key}=${value}`; + return uri + hash; + } + return updateUrlParameter(uri, this.param, this.prefix); +}; + +PrefixedLocalStorage.prototype.prefixedKey = function (baseKey) { + return `${this.prefix}${baseKey}`; +}; + +PrefixedLocalStorage.prototype.setItem = function (baseKey, value) { + localStorage.setItem(this.prefixedKey(baseKey), value); +}; + +/** + * Listen for `storage` events pertaining to a particular key, + * prefixed with this object's prefix. Ignore when value is being set to null + * (i.e. removeItem). + */ +PrefixedLocalStorage.prototype.onSet = function (baseKey, fn) { + window.addEventListener('storage', e => { + var match = this.prefixedKey(baseKey); + if (e.newValue !== null && e.key.indexOf(match) === 0) { + fn.call(this, e); + } + }); +}; + +/***************************************************************************** + * Use in a testharnessjs test to generate a new key prefix. + * async_test(t => { + * var prefixedStorage = new PrefixedLocalStorageTest(); + * t.add_cleanup(() => prefixedStorage.cleanup()); + * /... + * }); + */ +var PrefixedLocalStorageTest = function () { + PrefixedLocalStorage.call(this); + this.prefix = `${document.location.pathname}-${Math.random()}-${Date.now()}-`; +}; +PrefixedLocalStorageTest.prototype = Object.create(PrefixedLocalStorage.prototype); +PrefixedLocalStorageTest.prototype.constructor = PrefixedLocalStorageTest; + +/** + * Use in a cleanup function to clear out prefixed entries in localStorage + */ +PrefixedLocalStorageTest.prototype.cleanup = function () { + this.setItem('closeAll', 'true'); + this.clear(); +}; + +/***************************************************************************** + * Use in test resource files to share a prefix generated by a + * PrefixedLocalStorageTest. Will look in URL querystring for prefix. + * Setting `close_on_cleanup` opt truthy will make this script's window listen + * for storage `closeAll` event from controlling test and close itself. + * + * var PrefixedLocalStorageResource({ close_on_cleanup: true }); + */ +var PrefixedLocalStorageResource = function (options) { + PrefixedLocalStorage.call(this); + this.options = Object.assign({}, { + close_on_cleanup: false + }, options || {}); + // Check URL querystring for prefix to use + var regex = new RegExp(`[?&]${this.param}(=([^&#]*)|&|#|$)`), + results = regex.exec(document.location.href); + if (results && results[2]) { + this.prefix = results[2]; + } + // Optionally have this window close itself when the PrefixedLocalStorageTest + // sets a `closeAll` item. + if (this.options.close_on_cleanup) { + this.onSet('closeAll', () => { + window.close(); + }); + } +}; +PrefixedLocalStorageResource.prototype = Object.create(PrefixedLocalStorage.prototype); +PrefixedLocalStorageResource.prototype.constructor = PrefixedLocalStorageResource; diff --git a/test/wpt/tests/common/PrefixedLocalStorage.js.headers b/test/wpt/tests/common/PrefixedLocalStorage.js.headers new file mode 100644 index 0000000..6805c32 --- /dev/null +++ b/test/wpt/tests/common/PrefixedLocalStorage.js.headers @@ -0,0 +1 @@ +Content-Type: text/javascript; charset=utf-8 diff --git a/test/wpt/tests/common/PrefixedPostMessage.js b/test/wpt/tests/common/PrefixedPostMessage.js new file mode 100644 index 0000000..674b528 --- /dev/null +++ b/test/wpt/tests/common/PrefixedPostMessage.js @@ -0,0 +1,100 @@ +/** + * Supports pseudo-"namespacing" for window-posted messages for a given test + * by generating and using a unique prefix that gets wrapped into message + * objects. This makes it more feasible to have multiple tests that use + * `window.postMessage` in a single test file. Basically, make it possible + * for the each test to listen for only the messages that are pertinent to it. + * + * 'Prefix' not an elegant term to use here but this models itself after + * PrefixedLocalStorage. + * + * PrefixedMessageTest: Instantiate in testharness.js tests to generate + * a new unique-ish prefix that can be used by other test support files + * PrefixedMessageResource: Instantiate in supporting test resource + * files to use/share a prefix generated by a test. + */ +var PrefixedMessage = function () { + this.prefix = ''; + this.param = 'prefixedMessage'; // Param to use in querystrings +}; + +/** + * Generate a URL that adds/replaces param with this object's prefix + * Use to link to test support files that make use of + * PrefixedMessageResource. + */ +PrefixedMessage.prototype.url = function (uri) { + function updateUrlParameter (uri, key, value) { + var i = uri.indexOf('#'); + var hash = (i === -1) ? '' : uri.substr(i); + uri = (i === -1) ? uri : uri.substr(0, i); + var re = new RegExp(`([?&])${key}=.*?(&|$)`, 'i'); + var separator = uri.indexOf('?') !== -1 ? '&' : '?'; + uri = (uri.match(re)) ? uri.replace(re, `$1${key}=${value}$2`) : + `${uri}${separator}${key}=${value}`; + return uri + hash; + } + return updateUrlParameter(uri, this.param, this.prefix); +}; + +/** + * Add an eventListener on `message` but only invoke the given callback + * for messages whose object contains this object's prefix. Remove the + * event listener once the anticipated message has been received. + */ +PrefixedMessage.prototype.onMessage = function (fn) { + window.addEventListener('message', e => { + if (typeof e.data === 'object' && e.data.hasOwnProperty('prefix')) { + if (e.data.prefix === this.prefix) { + // Only invoke callback when `data` is an object containing + // a `prefix` key with this object's prefix value + // Note fn is invoked with "unwrapped" data first, then the event `e` + // (which contains the full, wrapped e.data should it be needed) + fn.call(this, e.data.data, e); + window.removeEventListener('message', fn); + } + } + }); +}; + +/** + * Instantiate in a test file (e.g. during `setup`) to create a unique-ish + * prefix that can be shared by support files + */ +var PrefixedMessageTest = function () { + PrefixedMessage.call(this); + this.prefix = `${document.location.pathname}-${Math.random()}-${Date.now()}-`; +}; +PrefixedMessageTest.prototype = Object.create(PrefixedMessage.prototype); +PrefixedMessageTest.prototype.constructor = PrefixedMessageTest; + +/** + * Instantiate in a test support script to use a "prefix" generated by a + * PrefixedMessageTest in a controlling test file. It will look for + * the prefix in a URL param (see also PrefixedMessage#url) + */ +var PrefixedMessageResource = function () { + PrefixedMessage.call(this); + // Check URL querystring for prefix to use + var regex = new RegExp(`[?&]${this.param}(=([^&#]*)|&|#|$)`), + results = regex.exec(document.location.href); + if (results && results[2]) { + this.prefix = results[2]; + } +}; +PrefixedMessageResource.prototype = Object.create(PrefixedMessage.prototype); +PrefixedMessageResource.prototype.constructor = PrefixedMessageResource; + +/** + * This is how a test resource document can "send info" to its + * opener context. It will whatever message is being sent (`data`) in + * an object that injects the prefix. + */ +PrefixedMessageResource.prototype.postToOpener = function (data) { + if (window.opener) { + window.opener.postMessage({ + prefix: this.prefix, + data: data + }, '*'); + } +}; diff --git a/test/wpt/tests/common/PrefixedPostMessage.js.headers b/test/wpt/tests/common/PrefixedPostMessage.js.headers new file mode 100644 index 0000000..6805c32 --- /dev/null +++ b/test/wpt/tests/common/PrefixedPostMessage.js.headers @@ -0,0 +1 @@ +Content-Type: text/javascript; charset=utf-8 diff --git a/test/wpt/tests/common/README.md b/test/wpt/tests/common/README.md new file mode 100644 index 0000000..9aef19c --- /dev/null +++ b/test/wpt/tests/common/README.md @@ -0,0 +1,10 @@ +The files in this directory are non-infrastructure support files that can be used by tests. + +* `blank.html` - An empty HTML document. +* `domain-setter.sub.html` - An HTML document that sets `document.domain`. +* `dummy.xhtml` - An XHTML document. +* `dummy.xml` - An XML document. +* `text-plain.txt` - A text/plain document. +* `*.js` - Utility scripts. These are documented in the source. +* `*.py` - wptserve [Python Handlers](https://web-platform-tests.org/writing-tests/python-handlers/). These are documented in the source. +* `security-features` - Documented in `security-features/README.md`. diff --git a/test/wpt/tests/common/__init__.py b/test/wpt/tests/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/common/arrays.js b/test/wpt/tests/common/arrays.js new file mode 100644 index 0000000..2b31bb4 --- /dev/null +++ b/test/wpt/tests/common/arrays.js @@ -0,0 +1,31 @@ +/** + * Callback for checking equality of c and d. + * + * @callback equalityCallback + * @param {*} c + * @param {*} d + * @returns {boolean} + */ + +/** + * Returns true if the given arrays are equal. Optionally can pass an equality function. + * @param {Array} a + * @param {Array} b + * @param {equalityCallback} callbackFunction - defaults to `c === d` + * @returns {boolean} + */ +export function areArraysEqual(a, b, equalityFunction = (c, d) => { return c === d; }) { + try { + if (a.length !== b.length) + return false; + + for (let i = 0; i < a.length; i++) { + if (!equalityFunction(a[i], b[i])) + return false; + } + } catch (ex) { + return false; + } + + return true; +} diff --git a/test/wpt/tests/common/blank-with-cors.html b/test/wpt/tests/common/blank-with-cors.html new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/common/blank-with-cors.html.headers b/test/wpt/tests/common/blank-with-cors.html.headers new file mode 100644 index 0000000..cb762ef --- /dev/null +++ b/test/wpt/tests/common/blank-with-cors.html.headers @@ -0,0 +1 @@ +Access-Control-Allow-Origin: * diff --git a/test/wpt/tests/common/blank.html b/test/wpt/tests/common/blank.html new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/common/custom-cors-response.js b/test/wpt/tests/common/custom-cors-response.js new file mode 100644 index 0000000..be9c7ce --- /dev/null +++ b/test/wpt/tests/common/custom-cors-response.js @@ -0,0 +1,32 @@ +const custom_cors_response = (payload, base_url) => { + base_url = base_url || new URL(location.href); + + // Clone the given `payload` so that, as we modify it, we won't be mutating + // the caller's value in unexpected ways. + payload = Object.assign({}, payload); + payload.headers = payload.headers || {}; + // Note that, in order to have out-of-the-box support for tests that don't + // call `setup({'allow_uncaught_exception': true})` we return a no-op JS + // payload. This approach will avoid hitting syntax errors if the resource is + // interpreted as script. Without this workaround, the SyntaxError would be + // caught by the test harness and trigger a test failure. + payload.content = payload.content || '/* custom-cors-response.js content */'; + payload.status_code = payload.status_code || 200; + + // Assume that we'll be doing a CORS-enabled fetch so we'll need to set ACAO. + const acao = "Access-Control-Allow-Origin"; + if (!(acao in payload.headers)) { + payload.headers[acao] = '*'; + } + + if (!("Content-Type" in payload.headers)) { + payload.headers["Content-Type"] = "text/javascript"; + } + + let ret = new URL("/common/CustomCorsResponse.py", base_url); + for (const key in payload) { + ret.searchParams.append(key, JSON.stringify(payload[key])); + } + + return ret; +}; diff --git a/test/wpt/tests/common/dispatcher/README.md b/test/wpt/tests/common/dispatcher/README.md new file mode 100644 index 0000000..cfaafb6 --- /dev/null +++ b/test/wpt/tests/common/dispatcher/README.md @@ -0,0 +1,228 @@ +# `RemoteContext`: API for script execution in another context + +`RemoteContext` in `/common/dispatcher/dispatcher.js` provides an interface to +execute JavaScript in another global object (page or worker, the "executor"), +based on: + +- [WPT RFC 88: context IDs from uuid searchParams in URL](https://github.com/web-platform-tests/rfcs/pull/88), +- [WPT RFC 89: execute_script](https://github.com/web-platform-tests/rfcs/pull/89) and +- [WPT RFC 91: RemoteContext](https://github.com/web-platform-tests/rfcs/pull/91). + +Tests can send arbitrary javascript to executors to evaluate in its global +object, like: + +``` +// injector.html +const argOnLocalContext = ...; + +async function execute() { + window.open('executor.html?uuid=' + uuid); + const ctx = new RemoteContext(uuid); + await ctx.execute_script( + (arg) => functionOnRemoteContext(arg), + [argOnLocalContext]); +}; +``` + +and on executor: + +``` +// executor.html +function functionOnRemoteContext(arg) { ... } + +const uuid = new URLSearchParams(window.location.search).get('uuid'); +const executor = new Executor(uuid); +``` + +For concrete examples, see +[events.html](../../html/browsers/browsing-the-web/back-forward-cache/events.html) +and +[executor.html](../../html/browsers/browsing-the-web/back-forward-cache/resources/executor.html) +in back-forward cache tests. + +Note that `executor*` files under `/common/dispatcher/` are NOT for +`RemoteContext.execute_script()`. Use `remote-executor.html` instead. + +This is universal and avoids introducing many specific `XXX-helper.html` +resources. +Moreover, tests are easier to read, because the whole logic of the test can be +defined in a single file. + +## `new RemoteContext(uuid)` + +- `uuid` is a UUID string that identifies the remote context and should match + with the `uuid` parameter of the URL of the remote context. +- Callers should create the remote context outside this constructor (e.g. + `window.open('executor.html?uuid=' + uuid)`). + +## `RemoteContext.execute_script(fn, args)` + +- `fn` is a JavaScript function to execute on the remote context, which is + converted to a string using `toString()` and sent to the remote context. +- `args` is null or an array of arguments to pass to the function on the + remote context. Arguments are passed as JSON. +- If the return value of `fn` when executed in the remote context is a promise, + the promise returned by `execute_script` resolves to the resolved value of + that promise. Otherwise the `execute_script` promise resolves to the return + value of `fn`. + +Note that `fn` is evaluated on the remote context (`executor.html` in the +example above), while `args` are evaluated on the caller context +(`injector.html`) and then passed to the remote context. + +## Return value of injected functions and `execute_script()` + +If the return value of the injected function when executed in the remote +context is a promise, the promise returned by `execute_script` resolves to the +resolved value of that promise. Otherwise the `execute_script` promise resolves +to the return value of the function. + +When the return value of an injected script is a Promise, it should be resolved +before any navigation starts on the remote context. For example, it shouldn't +be resolved after navigating out and navigating back to the page again. +It's fine to create a Promise to be resolved after navigations, if it's not the +return value of the injected function. + +## Calling timing of `execute_script()` + +When `RemoteContext.execute_script()` is called when the remote context is not +active (for example before it is created, before navigation to the page, or +during the page is in back-forward cache), the injected script is evaluated +after the remote context becomes active. + +Multiple calls to `RemoteContext.execute_script()` will result in multiple scripts +being executed in remote context and ordering will be maintained. + +## Errors from `execute_script()` + +Errors from `execute_script()` will result in promise rejections, so it is +important to await the result. This can be `await ctx.execute_script(...)` for +every call but if there are multiple scripts to executed, it may be preferable +to wait on them in parallel to avoid incurring full round-trip time for each, +e.g. + +```js +await Promise.all( + ctx1.execute_script(...), + ctx1.execute_script(...), + ctx2.execute_script(...), + ctx2.execute_script(...), + ... +) +``` + +## Evaluation timing of injected functions + +The script injected by `RemoteContext.execute_script()` can be evaluated any +time during the remote context is active. +For example, even before DOMContentLoaded events or even during navigation. +It's the responsibility of test-specific code/helpers to ensure evaluation +timing constraints (which can be also test-specific), if any needed. + +### Ensuring evaluation timing around page load + +For example, to ensure that injected functions (`mainFunction` below) are +evaluated after the first `pageshow` event, we can use pure JavaScript code +like below: + +``` +// executor.html +window.pageShowPromise = new Promise(resolve => + window.addEventListener('pageshow', resolve, {once: true})); + + +// injector.html +const waitForPageShow = async () => { + while (!window.pageShowPromise) { + await new Promise(resolve => setTimeout(resolve, 100)); + } + await window.pageShowPromise; +}; + +await ctx.execute(waitForPageShow); +await ctx.execute(mainFunction); +``` + +### Ensuring evaluation timing around navigation out/unloading + +It can be important to ensure there are no injected functions nor code behind +`RemoteContext` (such as Fetch APIs accessing server-side stash) running after +navigation is initiated, for example in the case of back-forward cache testing. + +To ensure this, + +- Do not call the next `RemoteContext.execute()` for the remote context after + triggering the navigation, until we are sure that the remote context is not + active (e.g. after we confirm that the new page is loaded). +- Call `Executor.suspend(callback)` synchronously within the injected script. + This suspends executor-related code, and calls `callback` when it is ready + to start navigation. + +The code on the injector side would be like: + +``` +// injector.html +await ctx.execute_script(() => { + executor.suspend(() => { + location.href = 'new-url.html'; + }); +}); +``` + +## Future Work: Possible integration with `test_driver` + +Currently `RemoteContext` is implemented by JavaScript and WPT-server-side +stash, and not integrated with `test_driver` nor `testharness`. +There is a proposal of `test_driver`-integrated version (see the RFCs listed +above). + +The API semantics and guidelines in this document are designed to be applicable +to both the current stash-based `RemoteContext` and `test_driver`-based +version, and thus the tests using `RemoteContext` will be migrated with minimum +modifications (mostly in `/common/dispatcher/dispatcher.js` and executors), for +example in a +[draft CL](https://chromium-review.googlesource.com/c/chromium/src/+/3082215/). + + +# `send()`/`receive()` Message passing APIs + +`dispatcher.js` (and its server-side backend `dispatcher.py`) provides a +universal queue-based message passing API. +Each queue is identified by a UUID, and accessed via the following APIs: + +- `send(uuid, message)` pushes a string `message` to the queue `uuid`. +- `receive(uuid)` pops the first item from the queue `uuid`. +- `showRequestHeaders(origin, uuid)` and + `cacheableShowRequestHeaders(origin, uuid)` return URLs, that push request + headers to the queue `uuid` upon fetching. + +It works cross-origin, and even access different browser context groups. + +Messages are queued, this means one doesn't need to wait for the receiver to +listen, before sending the first message +(but still need to wait for the resolution of the promise returned by `send()` +to ensure the order between `send()`s). + +## Executors + +Similar to `RemoteContext.execute_script()`, `send()`/`receive()` can be used +for sending arbitrary javascript to be evaluated in another page or worker. + +- `executor.html` (as a Document), +- `executor-worker.js` (as a Web Worker), and +- `executor-service-worker.js` (as a Service Worker) + +are examples of executors. +Note that these executors are NOT compatible with +`RemoteContext.execute_script()`. + +## Future Work + +`send()`, `receive()` and the executors below are kept for COEP/COOP tests. + +For remote script execution, new tests should use +`RemoteContext.execute_script()` instead. + +For message passing, +[WPT RFC 90](https://github.com/web-platform-tests/rfcs/pull/90) is still under +discussion. diff --git a/test/wpt/tests/common/dispatcher/dispatcher.js b/test/wpt/tests/common/dispatcher/dispatcher.js new file mode 100644 index 0000000..a0f9f43 --- /dev/null +++ b/test/wpt/tests/common/dispatcher/dispatcher.js @@ -0,0 +1,256 @@ +// Define a universal message passing API. It works cross-origin and across +// browsing context groups. +const dispatcher_path = "/common/dispatcher/dispatcher.py"; +const dispatcher_url = new URL(dispatcher_path, location.href).href; + +// Return a promise, limiting the number of concurrent accesses to a shared +// resources to |max_concurrent_access|. +const concurrencyLimiter = (max_concurrency) => { + let pending = 0; + let waiting = []; + return async (task) => { + pending++; + if (pending > max_concurrency) + await new Promise(resolve => waiting.push(resolve)); + let result = await task(); + pending--; + waiting.shift()?.(); + return result; + }; +} + +// Wait for a random amount of time in the range [10ms,100ms]. +const randomDelay = () => { + return new Promise(resolve => setTimeout(resolve, 10 + 90*Math.random())); +} + +// Sending too many requests in parallel causes congestion. Limiting it improves +// throughput. +// +// Note: The following table has been determined on the test: +// ../cache-storage.tentative.https.html +// using Chrome with a 64 core CPU / 64GB ram, in release mode: +// ┌───────────┬───┬───┬───┬───┬───┬───┬───┬───┬───┬───┬───┬────┠+// │concurrency│ 1 │ 2 │ 3 │ 4 │ 5 │ 6 │ 10│ 15│ 20│ 30│ 50│ 100│ +// ├───────────┼───┼───┼───┼───┼───┼───┼───┼───┼───┼───┼───┼────┤ +// │time (s) │ 54│ 38│ 31│ 29│ 26│ 24│ 22│ 22│ 22│ 22│ 34│ 36 │ +// └───────────┴───┴───┴───┴───┴───┴───┴───┴───┴───┴───┴───┴────┘ +const limiter = concurrencyLimiter(6); + +// While requests to different remote contexts can go in parallel, we need to +// ensure that requests to each remote context are done in order. This maps a +// uuid to a queue of requests to send. A queue is processed until it is empty +// and then is deleted from the map. +const sendQueues = new Map(); + +// Sends a single item (with rate-limiting) and calls the associated resolver +// when it is successfully sent. +const sendItem = async function (uuid, resolver, message) { + await limiter(async () => { + // Requests might be dropped. Retry until getting a confirmation it has been + // processed. + while(1) { + try { + let response = await fetch(dispatcher_url + `?uuid=${uuid}`, { + method: 'POST', + body: message + }) + if (await response.text() == "done") { + resolver(); + return; + } + } catch (fetch_error) {} + await randomDelay(); + }; + }); +} + +// While the queue is non-empty, send the next item. This is async and new items +// may be added to the queue while others are being sent. +const processQueue = async function (uuid, queue) { + while (queue.length) { + const [resolver, message] = queue.shift(); + await sendItem(uuid, resolver, message); + } + // The queue is empty, delete it. + sendQueues.delete(uuid); +} + +const send = async function (uuid, message) { + const itemSentPromise = new Promise((resolve) => { + const item = [resolve, message]; + if (sendQueues.has(uuid)) { + // There is already a queue for `uuid`, just add to it and it will be processed. + sendQueues.get(uuid).push(item); + } else { + // There is no queue for `uuid`, create it and start processing. + const queue = [item]; + sendQueues.set(uuid, queue); + processQueue(uuid, queue); + } + }); + // Wait until the item has been successfully sent. + await itemSentPromise; +} + +const receive = async function (uuid) { + while(1) { + let data = "not ready"; + try { + data = await limiter(async () => { + let response = await fetch(dispatcher_url + `?uuid=${uuid}`); + return await response.text(); + }); + } catch (fetch_error) {} + + if (data == "not ready") { + await randomDelay(); + continue; + } + + return data; + } +} + +// Returns an URL. When called, the server sends toward the `uuid` queue the +// request headers. Useful for determining if something was requested with +// Cookies. +const showRequestHeaders = function(origin, uuid) { + return origin + dispatcher_path + `?uuid=${uuid}&show-headers`; +} + +// Same as above, except for the response is cacheable. +const cacheableShowRequestHeaders = function(origin, uuid) { + return origin + dispatcher_path + `?uuid=${uuid}&cacheable&show-headers`; +} + +// This script requires +// - `/common/utils.js` for `token()`. + +// Returns the URL of a document that can be used as a `RemoteContext`. +// +// `uuid` should be a UUID uniquely identifying the given remote context. +// `options` has the following shape: +// +// { +// host: (optional) Sets the returned URL's `host` property. Useful for +// cross-origin executors. +// protocol: (optional) Sets the returned URL's `protocol` property. +// } +function remoteExecutorUrl(uuid, options) { + const url = new URL("/common/dispatcher/remote-executor.html", location); + url.searchParams.set("uuid", uuid); + + if (options?.host) { + url.host = options.host; + } + + if (options?.protocol) { + url.protocol = options.protocol; + } + + return url; +} + +// Represents a remote executor. For more detailed explanation see `README.md`. +class RemoteContext { + // `uuid` is a UUID string that identifies the remote context and should + // match with the `uuid` parameter of the URL of the remote context. + constructor(uuid) { + this.context_id = uuid; + } + + // Evaluates the script `expr` on the executor. + // - If `expr` is evaluated to a Promise that is resolved with a value: + // `execute_script()` returns a Promise resolved with the value. + // - If `expr` is evaluated to a non-Promise value: + // `execute_script()` returns a Promise resolved with the value. + // - If `expr` throws an error or is evaluated to a Promise that is rejected: + // `execute_script()` returns a rejected Promise with the error's + // `message`. + // Note that currently the type of error (e.g. DOMException) is not + // preserved, except for `TypeError`. + // The values should be able to be serialized by JSON.stringify(). + async execute_script(fn, args) { + const receiver = token(); + await this.send({receiver: receiver, fn: fn.toString(), args: args}); + const response = JSON.parse(await receive(receiver)); + if (response.status === 'success') { + return response.value; + } + + // exception + if (response.name === 'TypeError') { + throw new TypeError(response.value); + } + throw new Error(response.value); + } + + async send(msg) { + return await send(this.context_id, JSON.stringify(msg)); + } +}; + +class Executor { + constructor(uuid) { + this.uuid = uuid; + + // If `suspend_callback` is not `null`, the executor should be suspended + // when there are no ongoing tasks. + this.suspend_callback = null; + + this.execute(); + } + + // Wait until there are no ongoing tasks nor fetch requests for polling + // tasks, and then suspend the executor and call `callback()`. + // Navigation from the executor page should be triggered inside `callback()`, + // to avoid conflict with in-flight fetch requests. + suspend(callback) { + this.suspend_callback = callback; + } + + resume() { + } + + async execute() { + while(true) { + if (this.suspend_callback !== null) { + this.suspend_callback(); + this.suspend_callback = null; + // Wait for `resume()` to be called. + await new Promise(resolve => this.resume = resolve); + + // Workaround for https://crbug.com/1244230. + // Without this workaround, the executor is resumed and the fetch + // request to poll the next task is initiated synchronously from + // pageshow event after the page restored from BFCache, and the fetch + // request promise is never resolved (and thus the test results in + // timeout) due to https://crbug.com/1244230. The root cause is not yet + // known, but setTimeout() with 0ms causes the resume triggered on + // another task and seems to resolve the issue. + await new Promise(resolve => setTimeout(resolve, 0)); + + continue; + } + + const task = JSON.parse(await receive(this.uuid)); + + let response; + try { + const value = await eval(task.fn).apply(null, task.args); + response = JSON.stringify({ + status: 'success', + value: value + }); + } catch(e) { + response = JSON.stringify({ + status: 'exception', + name: e.name, + value: e.message + }); + } + await send(task.receiver, response); + } + } +} diff --git a/test/wpt/tests/common/dispatcher/dispatcher.py b/test/wpt/tests/common/dispatcher/dispatcher.py new file mode 100644 index 0000000..9fe7a38 --- /dev/null +++ b/test/wpt/tests/common/dispatcher/dispatcher.py @@ -0,0 +1,53 @@ +import json +from wptserve.utils import isomorphic_decode + +# A server used to store and retrieve arbitrary data. +# This is used by: ./dispatcher.js +def main(request, response): + # This server is configured so that is accept to receive any requests and + # any cookies the web browser is willing to send. + response.headers.set(b"Access-Control-Allow-Credentials", b"true") + response.headers.set(b'Access-Control-Allow-Methods', b'OPTIONS, GET, POST') + response.headers.set(b'Access-Control-Allow-Headers', b'Content-Type') + response.headers.set(b"Access-Control-Allow-Origin", request.headers.get(b"origin") or '*') + + if b"cacheable" in request.GET: + response.headers.set(b"Cache-Control", b"max-age=31536000") + else: + response.headers.set(b'Cache-Control', b'no-cache, no-store, must-revalidate') + + # CORS preflight + if request.method == u'OPTIONS': + return b'' + + uuid = request.GET[b'uuid'] + stash = request.server.stash; + + # The stash is accessed concurrently by many clients. A lock is used to + # avoid unterleaved read/write from different clients. + with stash.lock: + queue = stash.take(uuid, '/common/dispatcher') or []; + + # Push into the |uuid| queue, the requested headers. + if b"show-headers" in request.GET: + headers = {}; + for key, value in request.headers.items(): + headers[isomorphic_decode(key)] = isomorphic_decode(request.headers[key]) + headers = json.dumps(headers); + queue.append(headers); + ret = b''; + + # Push into the |uuid| queue, the posted data. + elif request.method == u'POST': + queue.append(request.body) + ret = b'done' + + # Pull from the |uuid| queue, the posted data. + else: + if len(queue) == 0: + ret = b'not ready' + else: + ret = queue.pop(0) + + stash.put(uuid, queue, '/common/dispatcher') + return ret; diff --git a/test/wpt/tests/common/dispatcher/executor-service-worker.js b/test/wpt/tests/common/dispatcher/executor-service-worker.js new file mode 100644 index 0000000..0b47d66 --- /dev/null +++ b/test/wpt/tests/common/dispatcher/executor-service-worker.js @@ -0,0 +1,24 @@ +importScripts('./dispatcher.js'); + +const params = new URLSearchParams(location.search); +const uuid = params.get('uuid'); + +// The fetch handler must be registered before parsing the main script response. +// So do it here, for future use. +fetchHandler = () => {} +addEventListener('fetch', e => { + fetchHandler(e); +}); + +// Force ServiceWorker to immediately activate itself. +addEventListener('install', event => { + skipWaiting(); +}); + +let executeOrders = async function() { + while(true) { + let task = await receive(uuid); + eval(`(async () => {${task}})()`); + } +}; +executeOrders(); diff --git a/test/wpt/tests/common/dispatcher/executor-worker.js b/test/wpt/tests/common/dispatcher/executor-worker.js new file mode 100644 index 0000000..ea065a6 --- /dev/null +++ b/test/wpt/tests/common/dispatcher/executor-worker.js @@ -0,0 +1,12 @@ +importScripts('./dispatcher.js'); + +const params = new URLSearchParams(location.search); +const uuid = params.get('uuid'); + +let executeOrders = async function() { + while(true) { + let task = await receive(uuid); + eval(`(async () => {${task}})()`); + } +}; +executeOrders(); diff --git a/test/wpt/tests/common/dispatcher/executor.html b/test/wpt/tests/common/dispatcher/executor.html new file mode 100644 index 0000000..5fe6a95 --- /dev/null +++ b/test/wpt/tests/common/dispatcher/executor.html @@ -0,0 +1,15 @@ + + diff --git a/test/wpt/tests/common/dispatcher/remote-executor.html b/test/wpt/tests/common/dispatcher/remote-executor.html new file mode 100644 index 0000000..8b00303 --- /dev/null +++ b/test/wpt/tests/common/dispatcher/remote-executor.html @@ -0,0 +1,12 @@ + + + + + + + + diff --git a/test/wpt/tests/common/domain-setter.sub.html b/test/wpt/tests/common/domain-setter.sub.html new file mode 100644 index 0000000..ad3b9f8 --- /dev/null +++ b/test/wpt/tests/common/domain-setter.sub.html @@ -0,0 +1,8 @@ + + +A page that will likely be same-origin-domain but not same-origin + + diff --git a/test/wpt/tests/common/dummy.xhtml b/test/wpt/tests/common/dummy.xhtml new file mode 100644 index 0000000..dba6945 --- /dev/null +++ b/test/wpt/tests/common/dummy.xhtml @@ -0,0 +1,2 @@ + +Dummy XHTML document diff --git a/test/wpt/tests/common/dummy.xml b/test/wpt/tests/common/dummy.xml new file mode 100644 index 0000000..4a60c30 --- /dev/null +++ b/test/wpt/tests/common/dummy.xml @@ -0,0 +1 @@ +Dummy XML document diff --git a/test/wpt/tests/common/echo.py b/test/wpt/tests/common/echo.py new file mode 100644 index 0000000..911b54a --- /dev/null +++ b/test/wpt/tests/common/echo.py @@ -0,0 +1,6 @@ +def main(request, response): + # Without X-XSS-Protection to disable non-standard XSS protection the functionality this + # resource offers is useless + response.headers.set(b"X-XSS-Protection", b"0") + response.headers.set(b"Content-Type", b"text/html") + response.content = request.GET.first(b"content") diff --git a/test/wpt/tests/common/gc.js b/test/wpt/tests/common/gc.js new file mode 100644 index 0000000..ac43a4c --- /dev/null +++ b/test/wpt/tests/common/gc.js @@ -0,0 +1,52 @@ +/** + * Does a best-effort attempt at invoking garbage collection. Attempts to use + * the standardized `TestUtils.gc()` function, but falls back to other + * environment-specific nonstandard functions, with a final result of just + * creating a lot of garbage (in which case you will get a console warning). + * + * This should generally only be used to attempt to trigger bugs and crashes + * inside tests, i.e. cases where if garbage collection happened, then this + * should not trigger some misbehavior. You cannot rely on garbage collection + * successfully trigger, or that any particular unreachable object will be + * collected. + * + * @returns {Promise} A promise you should await to ensure garbage + * collection has had a chance to complete. + */ +self.garbageCollect = async () => { + // https://testutils.spec.whatwg.org/#the-testutils-namespace + if (self.TestUtils?.gc) { + return TestUtils.gc(); + } + + // Use --expose_gc for V8 (and Node.js) + // to pass this flag at chrome launch use: --js-flags="--expose-gc" + // Exposed in SpiderMonkey shell as well + if (self.gc) { + return self.gc(); + } + + // Present in some WebKit development environments + if (self.GCController) { + return GCController.collect(); + } + + console.warn( + 'Tests are running without the ability to do manual garbage collection. ' + + 'They will still work, but coverage will be suboptimal.'); + + for (var i = 0; i < 1000; i++) { + gcRec(10); + } + + function gcRec(n) { + if (n < 1) { + return {}; + } + + let temp = { i: "ab" + i + i / 100000 }; + temp += "foo"; + + gcRec(n - 1); + } +}; diff --git a/test/wpt/tests/common/get-host-info.sub.js b/test/wpt/tests/common/get-host-info.sub.js new file mode 100644 index 0000000..9b8c2b5 --- /dev/null +++ b/test/wpt/tests/common/get-host-info.sub.js @@ -0,0 +1,63 @@ +/** + * Host information for cross-origin tests. + * @returns {Object} with properties for different host information. + */ +function get_host_info() { + + var HTTP_PORT = '{{ports[http][0]}}'; + var HTTP_PORT2 = '{{ports[http][1]}}'; + var HTTPS_PORT = '{{ports[https][0]}}'; + var HTTPS_PORT2 = '{{ports[https][1]}}'; + var PROTOCOL = self.location.protocol; + var IS_HTTPS = (PROTOCOL == "https:"); + var PORT = IS_HTTPS ? HTTPS_PORT : HTTP_PORT; + var PORT2 = IS_HTTPS ? HTTPS_PORT2 : HTTP_PORT2; + var HTTP_PORT_ELIDED = HTTP_PORT == "80" ? "" : (":" + HTTP_PORT); + var HTTP_PORT2_ELIDED = HTTP_PORT2 == "80" ? "" : (":" + HTTP_PORT2); + var HTTPS_PORT_ELIDED = HTTPS_PORT == "443" ? "" : (":" + HTTPS_PORT); + var PORT_ELIDED = IS_HTTPS ? HTTPS_PORT_ELIDED : HTTP_PORT_ELIDED; + var ORIGINAL_HOST = '{{host}}'; + var REMOTE_HOST = (ORIGINAL_HOST === 'localhost') ? '127.0.0.1' : ('www1.' + ORIGINAL_HOST); + var OTHER_HOST = '{{domains[www2]}}'; + var NOTSAMESITE_HOST = (ORIGINAL_HOST === 'localhost') ? '127.0.0.1' : ('{{hosts[alt][]}}'); + + return { + HTTP_PORT: HTTP_PORT, + HTTP_PORT2: HTTP_PORT2, + HTTPS_PORT: HTTPS_PORT, + HTTPS_PORT2: HTTPS_PORT2, + PORT: PORT, + PORT2: PORT2, + ORIGINAL_HOST: ORIGINAL_HOST, + REMOTE_HOST: REMOTE_HOST, + + ORIGIN: PROTOCOL + "//" + ORIGINAL_HOST + PORT_ELIDED, + HTTP_ORIGIN: 'http://' + ORIGINAL_HOST + HTTP_PORT_ELIDED, + HTTPS_ORIGIN: 'https://' + ORIGINAL_HOST + HTTPS_PORT_ELIDED, + HTTPS_ORIGIN_WITH_CREDS: 'https://foo:bar@' + ORIGINAL_HOST + HTTPS_PORT_ELIDED, + HTTP_ORIGIN_WITH_DIFFERENT_PORT: 'http://' + ORIGINAL_HOST + HTTP_PORT2_ELIDED, + REMOTE_ORIGIN: PROTOCOL + "//" + REMOTE_HOST + PORT_ELIDED, + OTHER_ORIGIN: PROTOCOL + "//" + OTHER_HOST + PORT_ELIDED, + HTTP_REMOTE_ORIGIN: 'http://' + REMOTE_HOST + HTTP_PORT_ELIDED, + HTTP_NOTSAMESITE_ORIGIN: 'http://' + NOTSAMESITE_HOST + HTTP_PORT_ELIDED, + HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT: 'http://' + REMOTE_HOST + HTTP_PORT2_ELIDED, + HTTPS_REMOTE_ORIGIN: 'https://' + REMOTE_HOST + HTTPS_PORT_ELIDED, + HTTPS_REMOTE_ORIGIN_WITH_CREDS: 'https://foo:bar@' + REMOTE_HOST + HTTPS_PORT_ELIDED, + HTTPS_NOTSAMESITE_ORIGIN: 'https://' + NOTSAMESITE_HOST + HTTPS_PORT_ELIDED, + UNAUTHENTICATED_ORIGIN: 'http://' + OTHER_HOST + HTTP_PORT_ELIDED, + AUTHENTICATED_ORIGIN: 'https://' + OTHER_HOST + HTTPS_PORT_ELIDED + }; +} + +/** + * When a default port is used, location.port returns the empty string. + * This function attempts to provide an exact port, assuming we are running under wptserve. + * @param {*} loc - can be Location///URL, but assumes http/https only. + * @returns {string} The port number. + */ +function get_port(loc) { + if (loc.port) { + return loc.port; + } + return loc.protocol === 'https:' ? '443' : '80'; +} diff --git a/test/wpt/tests/common/get-host-info.sub.js.headers b/test/wpt/tests/common/get-host-info.sub.js.headers new file mode 100644 index 0000000..6805c32 --- /dev/null +++ b/test/wpt/tests/common/get-host-info.sub.js.headers @@ -0,0 +1 @@ +Content-Type: text/javascript; charset=utf-8 diff --git a/test/wpt/tests/common/media.js b/test/wpt/tests/common/media.js new file mode 100644 index 0000000..800593f --- /dev/null +++ b/test/wpt/tests/common/media.js @@ -0,0 +1,61 @@ +/** + * Returns the URL of a supported video source based on the user agent + * @param {string} base - media URL without file extension + * @returns {string} + */ +function getVideoURI(base) +{ + var extension = '.mp4'; + + var videotag = document.createElement("video"); + + if ( videotag.canPlayType ) + { + if (videotag.canPlayType('video/webm; codecs="vp9, opus"') ) + { + extension = '.webm'; + } else if ( videotag.canPlayType('video/ogg; codecs="theora, vorbis"') ) + { + extension = '.ogv'; + } + } + + return base + extension; +} + +/** + * Returns the URL of a supported audio source based on the user agent + * @param {string} base - media URL without file extension + * @returns {string} + */ +function getAudioURI(base) +{ + var extension = '.mp3'; + + var audiotag = document.createElement("audio"); + + if ( audiotag.canPlayType && + audiotag.canPlayType('audio/ogg') ) + { + extension = '.oga'; + } + + return base + extension; +} + +/** + * Returns the MIME type for a media URL based on the file extension. + * @param {string} url + * @returns {string} + */ +function getMediaContentType(url) { + var extension = new URL(url, location).pathname.split(".").pop(); + var map = { + "mp4" : "video/mp4", + "ogv" : "application/ogg", + "webm": "video/webm", + "mp3" : "audio/mp3", + "oga" : "application/ogg", + }; + return map[extension]; +} diff --git a/test/wpt/tests/common/media.js.headers b/test/wpt/tests/common/media.js.headers new file mode 100644 index 0000000..6805c32 --- /dev/null +++ b/test/wpt/tests/common/media.js.headers @@ -0,0 +1 @@ +Content-Type: text/javascript; charset=utf-8 diff --git a/test/wpt/tests/common/object-association.js b/test/wpt/tests/common/object-association.js new file mode 100644 index 0000000..669c17c --- /dev/null +++ b/test/wpt/tests/common/object-association.js @@ -0,0 +1,74 @@ +"use strict"; + +// This is for testing whether an object (e.g., a global property) is associated with Window, or +// with Document. Recall that Window and Document are 1:1 except when doing a same-origin navigation +// away from the initial about:blank. In that case the Window object gets reused for the new +// Document. +// +// So: +// - If something is per-Window, then it should maintain its identity across an about:blank +// navigation. +// - If something is per-Document, then it should be recreated across an about:blank navigation. + +window.testIsPerWindow = propertyName => { + runTests(propertyName, assert_equals, "must not"); +}; + +window.testIsPerDocument = propertyName => { + runTests(propertyName, assert_not_equals, "must"); +}; + +function runTests(propertyName, equalityOrInequalityAsserter, mustOrMustNotReplace) { + async_test(t => { + const iframe = document.createElement("iframe"); + document.body.appendChild(iframe); + const frame = iframe.contentWindow; + + const before = frame[propertyName]; + assert_implements(before, `window.${propertyName} must be implemented`); + + iframe.onload = t.step_func_done(() => { + const after = frame[propertyName]; + equalityOrInequalityAsserter(after, before); + }); + + iframe.src = "/common/blank.html"; + }, `Navigating from the initial about:blank ${mustOrMustNotReplace} replace window.${propertyName}`); + + // Per spec, discarding a browsing context should not change any of the global objects. + test(() => { + const iframe = document.createElement("iframe"); + document.body.appendChild(iframe); + const frame = iframe.contentWindow; + + const before = frame[propertyName]; + assert_implements(before, `window.${propertyName} must be implemented`); + + iframe.remove(); + + const after = frame[propertyName]; + assert_equals(after, before, `window.${propertyName} should not change after iframe.remove()`); + }, `Discarding the browsing context must not change window.${propertyName}`); + + // Per spec, document.open() should not change any of the global objects. In historical versions + // of the spec, it did, so we test here. + async_test(t => { + const iframe = document.createElement("iframe"); + + iframe.onload = t.step_func_done(() => { + const frame = iframe.contentWindow; + const before = frame[propertyName]; + assert_implements(before, `window.${propertyName} must be implemented`); + + frame.document.open(); + + const after = frame[propertyName]; + assert_equals(after, before); + + frame.document.close(); + }); + + iframe.src = "/common/blank.html"; + document.body.appendChild(iframe); + }, `document.open() must not replace window.${propertyName}`); +} diff --git a/test/wpt/tests/common/object-association.js.headers b/test/wpt/tests/common/object-association.js.headers new file mode 100644 index 0000000..6805c32 --- /dev/null +++ b/test/wpt/tests/common/object-association.js.headers @@ -0,0 +1 @@ +Content-Type: text/javascript; charset=utf-8 diff --git a/test/wpt/tests/common/performance-timeline-utils.js b/test/wpt/tests/common/performance-timeline-utils.js new file mode 100644 index 0000000..b20241c --- /dev/null +++ b/test/wpt/tests/common/performance-timeline-utils.js @@ -0,0 +1,56 @@ +/* +author: W3C http://www.w3.org/ +help: http://www.w3.org/TR/navigation-timing/#sec-window.performance-attribute +*/ +var performanceNamespace = window.performance; +var namespace_check = false; +function wp_test(func, msg, properties) +{ + // only run the namespace check once + if (!namespace_check) + { + namespace_check = true; + + if (performanceNamespace === undefined || performanceNamespace == null) + { + // show a single error that window.performance is undefined + // The window.performance attribute provides a hosting area for performance related attributes. + test(function() { assert_true(performanceNamespace !== undefined && performanceNamespace != null, "window.performance is defined and not null"); }, "window.performance is defined and not null."); + } + } + + test(func, msg, properties); +} + +function test_true(value, msg, properties) +{ + wp_test(function () { assert_true(value, msg); }, msg, properties); +} + +function test_equals(value, equals, msg, properties) +{ + wp_test(function () { assert_equals(value, equals, msg); }, msg, properties); +} + +// assert for every entry in `expectedEntries`, there is a matching entry _somewhere_ in `actualEntries` +function test_entries(actualEntries, expectedEntries) { + test_equals(actualEntries.length, expectedEntries.length) + expectedEntries.forEach(function (expectedEntry) { + var foundEntry = actualEntries.find(function (actualEntry) { + return typeof Object.keys(expectedEntry).find(function (key) { + return actualEntry[key] !== expectedEntry[key] + }) === 'undefined' + }) + test_true(!!foundEntry, `Entry ${JSON.stringify(expectedEntry)} could not be found.`) + if (foundEntry) { + assert_object_equals(foundEntry.toJSON(), expectedEntry) + } + }) +} + +function delayedLoadListener(callback) { + window.addEventListener('load', function() { + // TODO(cvazac) Remove this setTimeout when spec enforces sync entries. + step_timeout(callback, 0) + }) +} diff --git a/test/wpt/tests/common/performance-timeline-utils.js.headers b/test/wpt/tests/common/performance-timeline-utils.js.headers new file mode 100644 index 0000000..6805c32 --- /dev/null +++ b/test/wpt/tests/common/performance-timeline-utils.js.headers @@ -0,0 +1 @@ +Content-Type: text/javascript; charset=utf-8 diff --git a/test/wpt/tests/common/proxy-all.sub.pac b/test/wpt/tests/common/proxy-all.sub.pac new file mode 100644 index 0000000..de601e5 --- /dev/null +++ b/test/wpt/tests/common/proxy-all.sub.pac @@ -0,0 +1,3 @@ +function FindProxyForURL(url, host) { + return "PROXY {{host}}:{{ports[http][0]}}" +} diff --git a/test/wpt/tests/common/redirect-opt-in.py b/test/wpt/tests/common/redirect-opt-in.py new file mode 100644 index 0000000..b5e674a --- /dev/null +++ b/test/wpt/tests/common/redirect-opt-in.py @@ -0,0 +1,20 @@ +def main(request, response): + """Simple handler that causes redirection. + + The request should typically have two query parameters: + status - The status to use for the redirection. Defaults to 302. + location - The resource to redirect to. + """ + status = 302 + if b"status" in request.GET: + try: + status = int(request.GET.first(b"status")) + except ValueError: + pass + + response.status = status + + location = request.GET.first(b"location") + + response.headers.set(b"Location", location) + response.headers.set(b"Timing-Allow-Origin", b"*") diff --git a/test/wpt/tests/common/redirect.py b/test/wpt/tests/common/redirect.py new file mode 100644 index 0000000..f2fd1eb --- /dev/null +++ b/test/wpt/tests/common/redirect.py @@ -0,0 +1,19 @@ +def main(request, response): + """Simple handler that causes redirection. + + The request should typically have two query parameters: + status - The status to use for the redirection. Defaults to 302. + location - The resource to redirect to. + """ + status = 302 + if b"status" in request.GET: + try: + status = int(request.GET.first(b"status")) + except ValueError: + pass + + response.status = status + + location = request.GET.first(b"location") + + response.headers.set(b"Location", location) diff --git a/test/wpt/tests/common/refresh.py b/test/wpt/tests/common/refresh.py new file mode 100644 index 0000000..0d30990 --- /dev/null +++ b/test/wpt/tests/common/refresh.py @@ -0,0 +1,11 @@ +def main(request, response): + """ + Respond with a blank HTML document and a `Refresh` header which describes + an immediate redirect to the URL specified by the requests `location` query + string parameter + """ + headers = [ + (b'Content-Type', b'text/html'), + (b'Refresh', b'0; URL=' + request.GET.first(b'location')) + ] + return (200, headers, b'') diff --git a/test/wpt/tests/common/reftest-wait.js b/test/wpt/tests/common/reftest-wait.js new file mode 100644 index 0000000..64fe9bf --- /dev/null +++ b/test/wpt/tests/common/reftest-wait.js @@ -0,0 +1,39 @@ +/** + * Remove the `reftest-wait` class on the document element. + * The reftest runner will wait with taking a screenshot while + * this class is present. + * + * See https://web-platform-tests.org/writing-tests/reftests.html#controlling-when-comparison-occurs + */ +function takeScreenshot() { + document.documentElement.classList.remove("reftest-wait"); +} + +/** + * Call `takeScreenshot()` after a delay of at least |timeout| milliseconds. + * @param {number} timeout - milliseconds + */ +function takeScreenshotDelayed(timeout) { + setTimeout(function() { + takeScreenshot(); + }, timeout); +} + +/** + * Ensure that a precondition is met before waiting for a screenshot. + * @param {bool} condition - Fail the test if this evaluates to false + * @param {string} msg - Error message to write to the screenshot + */ +function failIfNot(condition, msg) { + const fail = () => { + (document.body || document.documentElement).textContent = `Precondition Failed: ${msg}`; + takeScreenshot(); + }; + if (!condition) { + if (document.readyState == "interactive") { + fail(); + } else { + document.addEventListener("DOMContentLoaded", fail, false); + } + } +} diff --git a/test/wpt/tests/common/reftest-wait.js.headers b/test/wpt/tests/common/reftest-wait.js.headers new file mode 100644 index 0000000..6805c32 --- /dev/null +++ b/test/wpt/tests/common/reftest-wait.js.headers @@ -0,0 +1 @@ +Content-Type: text/javascript; charset=utf-8 diff --git a/test/wpt/tests/common/rendering-utils.js b/test/wpt/tests/common/rendering-utils.js new file mode 100644 index 0000000..46283bd --- /dev/null +++ b/test/wpt/tests/common/rendering-utils.js @@ -0,0 +1,19 @@ +"use strict"; + +/** + * Waits until we have at least one frame rendered, regardless of the engine. + * + * @returns {Promise} + */ +function waitForAtLeastOneFrame() { + return new Promise(resolve => { + // Different web engines work slightly different on this area but waiting + // for two requestAnimationFrames() to happen, one after another, should be + // sufficient to ensure at least one frame has been generated anywhere. + window.requestAnimationFrame(() => { + window.requestAnimationFrame(() => { + resolve(); + }); + }); + }); +} diff --git a/test/wpt/tests/common/sab.js b/test/wpt/tests/common/sab.js new file mode 100644 index 0000000..a3ea610 --- /dev/null +++ b/test/wpt/tests/common/sab.js @@ -0,0 +1,21 @@ +const createBuffer = (() => { + // See https://github.com/whatwg/html/issues/5380 for why not `new SharedArrayBuffer()` + let sabConstructor; + try { + sabConstructor = new WebAssembly.Memory({ shared:true, initial:0, maximum:0 }).buffer.constructor; + } catch(e) { + sabConstructor = null; + } + return (type, length, opts) => { + if (type === "ArrayBuffer") { + return new ArrayBuffer(length, opts); + } else if (type === "SharedArrayBuffer") { + if (sabConstructor && sabConstructor.name !== "SharedArrayBuffer") { + throw new Error("WebAssembly.Memory does not support shared:true"); + } + return new sabConstructor(length, opts); + } else { + throw new Error("type has to be ArrayBuffer or SharedArrayBuffer"); + } + } +})(); diff --git a/test/wpt/tests/common/security-features/README.md b/test/wpt/tests/common/security-features/README.md new file mode 100644 index 0000000..f957541 --- /dev/null +++ b/test/wpt/tests/common/security-features/README.md @@ -0,0 +1,460 @@ +This directory contains the common infrastructure for the following tests (also referred below as projects). + +- referrer-policy/ +- mixed-content/ +- upgrade-insecure-requests/ + +Subdirectories: + +- `resources`: + Serves JavaScript test helpers. +- `subresource`: + Serves subresources, with support for redirects, stash, etc. + The subresource paths are managed by `subresourceMap` and + fetched in `requestVia*()` functions in `resources/common.js`. +- `scope`: + Serves nested contexts, such as iframe documents or workers. + Used from `invokeFrom*()` functions in `resources/common.js`. +- `tools`: + Scripts that generate test HTML files. Not used while running tests. +- `/referrer-policy/generic/subresource-test`: + Sanity checking tests for subresource invocation + (This is still placed outside common/) + +# Test generator + +The test generator ([common/security-features/tools/generate.py](tools/generate.py)) generates test HTML files from templates and a seed (`spec.src.json`) that defines all the test scenarios. + +The project (i.e. a WPT subdirectory, for example `referrer-policy/`) that uses the generator should define per-project data and invoke the common generator logic in `common/security-features/tools`. + +This is the overview of the project structure: + +``` +common/security-features/ +└── tools/ - the common test generator logic + ├── spec.src.json + └── template/ - the test files templates +project-directory/ (e.g. referrer-policy/) +├── spec.src.json +├── generic/ +│ ├── test-case.sub.js - Per-project test helper +│ ├── sanity-checker.js (Used by debug target only) +│ └── spec_json.js (Used by debug target only) +└── gen/ - generated tests +``` + +## Generating the tests + +Note: When the repository already contains generated tests, [remove all generated tests](#removing-all-generated-tests) first. + +```bash +# Install json5 module if needed. +pip install --user json5 + +# Generate the test files under gen/ (HTMLs and .headers files). +path/to/common/security-features/tools/generate.py --spec path/to/project-directory/ + +# Add all generated tests to the repo. +git add path/to/project-directory/gen/ && git commit -m "Add generated tests" +``` + +This will parse the spec JSON5 files and determine which tests to generate (or skip) while using templates. + +- The default spec JSON5: `common/security-features/tools/spec.src.json`. + - Describes common configurations, such as subresource types, source context types, etc. +- The per-project spec JSON5: `project-directory/spec.src.json`. + - Describes project-specific configurations, particularly those related to test generation patterns (`specification`), policy deliveries (e.g. `delivery_type`, `delivery_value`) and `expectation`. + +For how these two spec JSON5 files are merged, see [Sub projects](#sub-projects) section. + +Note: `spec.src.json` is transitioning to JSON5 [#21710](https://github.com/web-platform-tests/wpt/issues/21710). + +During the generation, the spec is validated by ```common/security-features/tools/spec_validator.py```. This is specially important when you're making changes to `spec.src.json`. Make sure it's a valid JSON (no comments or trailing commas). The validator reports specific errors (missing keys etc.), if any. + +### Removing all generated tests + +Simply remove all files under `project-directory/gen/`. + +```bash +rm -r path/to/project-directory/gen/ +``` + +### Options for generating tests + +Note: this section is currently obsolete. Only the release template is working. + +The generator script has two targets: ```release``` and ```debug```. + +* Using **release** for the target will produce tests using a template for optimizing size and performance. The release template is intended for the official web-platform-tests and possibly other test suites. No sanity checking is done in release mode. Use this option whenever you're checking into web-platform-tests. + +* When generating for ```debug```, the produced tests will contain more verbosity and sanity checks. Use this target to identify problems with the test suites when making changes locally. Make sure you don't check in tests generated with the debug target. + +Note that **release** is the default target when invoking ```generate.py```. + + +## Sub projects + +Projects can be nested, for example to reuse a single `spec.src.json` across similar but slightly different sets of generated tests. +The directory structure would look like: + +``` +project-directory/ (e.g. referrer-policy/) +├── spec.src.json - Parent project's spec JSON +├── generic/ +│ └── test-case.sub.js - Parent project's test helper +├── gen/ - parent project's generated tests +└── sub-project-directory/ (e.g. 4K) + ├── spec.src.json - Child project's spec JSON + ├── generic/ + │ └── test-case.sub.js - Child project's test helper + └── gen/ - child project's generated tests +``` + +`generate.py --spec project-directory/sub-project-directory` generates test files under `project-directory/sub-project-directory/gen`, based on `project-directory/spec.src.json` and `project-directory/sub-project-directory/spec.src.json`. + +- The child project's `spec.src.json` is merged into parent project's `spec.src.json`. + - Two spec JSON objects are merged recursively. + - If a same key exists in both objects, the child's value overwrites the parent's value. + - If both (child's and parent's) values are arrays, then the child's value is concatenated to the parent's value. + - For debugging, `generate.py` dumps the merged spec JSON object as `generic/debug-output.spec.src.json`. +- The child project's generated tests include both of the parent and child project's `test-case.sub.js`: + ```html + + + + ``` + + +## Updating the tests + +The main test logic lives in ```project-directory/generic/test-case.sub.js``` with helper functions defined in ```/common/security-features/resources/common.js``` so you should probably start there. + +For updating the test suites you will most likely do **a subset** of the following: + +* Add a new subresource type: + + * Add a new sub-resource python script to `/common/security-features/subresource/`. + * Add a sanity check test for a sub-resource to `referrer-policy/generic/subresource-test/`. + * Add a new entry to `subresourceMap` in `/common/security-features/resources/common.js`. + * Add a new entry to `valid_subresource_names` in `/common/security-features/tools/spec_validator.py`. + * Add a new entry to `subresource_schema` in `spec.src.json`. + * Update `source_context_schema` to specify in which source context the subresource can be used. + +* Add a new subresource redirection type + + * TODO: to be documented. Example: [https://github.com/web-platform-tests/wpt/pull/18939](https://github.com/web-platform-tests/wpt/pull/18939) + +* Add a new subresource origin type + + * TODO: to be documented. Example: [https://github.com/web-platform-tests/wpt/pull/18940](https://github.com/web-platform-tests/wpt/pull/18940) + +* Add a new source context (e.g. "module sharedworker global scope") + + * TODO: to be documented. Example: [https://github.com/web-platform-tests/wpt/pull/18904](https://github.com/web-platform-tests/wpt/pull/18904) + +* Add a new source context list (e.g. "subresource request from a dedicated worker in a ` + invoker: invokeFromIframe, + }, + "iframe": { // + invoker: invokeFromIframe, + }, + "iframe-blank": { // + invoker: invokeFromIframe, + }, + "worker-classic": { + // Classic dedicated worker loaded from same-origin. + invoker: invokeFromWorker.bind(undefined, "worker", false, {}), + }, + "worker-classic-data": { + // Classic dedicated worker loaded from data: URL. + invoker: invokeFromWorker.bind(undefined, "worker", true, {}), + }, + "worker-module": { + // Module dedicated worker loaded from same-origin. + invoker: invokeFromWorker.bind(undefined, "worker", false, {type: 'module'}), + }, + "worker-module-data": { + // Module dedicated worker loaded from data: URL. + invoker: invokeFromWorker.bind(undefined, "worker", true, {type: 'module'}), + }, + "sharedworker-classic": { + // Classic shared worker loaded from same-origin. + invoker: invokeFromWorker.bind(undefined, "sharedworker", false, {}), + }, + "sharedworker-classic-data": { + // Classic shared worker loaded from data: URL. + invoker: invokeFromWorker.bind(undefined, "sharedworker", true, {}), + }, + "sharedworker-module": { + // Module shared worker loaded from same-origin. + invoker: invokeFromWorker.bind(undefined, "sharedworker", false, {type: 'module'}), + }, + "sharedworker-module-data": { + // Module shared worker loaded from data: URL. + invoker: invokeFromWorker.bind(undefined, "sharedworker", true, {type: 'module'}), + }, + }; + + return sourceContextMap[sourceContextList[0].sourceContextType].invoker( + subresource, sourceContextList); +} + +// Quick hack to expose invokeRequest when common.sub.js is loaded either +// as a classic or module script. +self.invokeRequest = invokeRequest; + +/** + invokeFrom*() functions are helper functions with the same parameters + and return values as invokeRequest(), that are tied to specific types + of top-most environment settings objects. + For example, invokeFromIframe() is the helper function for the cases where + sourceContextList[0] is an iframe. +*/ + +/** + @param {string} workerType + "worker" (for dedicated worker) or "sharedworker". + @param {boolean} isDataUrl + true if the worker script is loaded from data: URL. + Otherwise, the script is loaded from same-origin. + @param {object} workerOptions + The `options` argument for Worker constructor. + + Other parameters and return values are the same as those of invokeRequest(). +*/ +function invokeFromWorker(workerType, isDataUrl, workerOptions, + subresource, sourceContextList) { + const currentSourceContext = sourceContextList[0]; + let workerUrl = + "/common/security-features/scope/worker.py?policyDeliveries=" + + encodeURIComponent(JSON.stringify( + currentSourceContext.policyDeliveries || [])); + if (workerOptions.type === 'module') { + workerUrl += "&type=module"; + } + + let promise; + if (isDataUrl) { + promise = fetch(workerUrl) + .then(r => r.text()) + .then(source => { + return 'data:text/javascript;base64,' + btoa(source); + }); + } else { + promise = Promise.resolve(workerUrl); + } + + return promise + .then(url => { + if (workerType === "worker") { + const worker = new Worker(url, workerOptions); + worker.postMessage({subresource: subresource, + sourceContextList: sourceContextList.slice(1)}); + return bindEvents2(worker, "message", worker, "error", window, "error"); + } else if (workerType === "sharedworker") { + const worker = new SharedWorker(url, workerOptions); + worker.port.start(); + worker.port.postMessage({subresource: subresource, + sourceContextList: sourceContextList.slice(1)}); + return bindEvents2(worker.port, "message", worker, "error", window, "error"); + } else { + throw new Error('Invalid worker type: ' + workerType); + } + }) + .then(event => { + if (event.data.error) + return Promise.reject(event.data.error); + return event.data; + }); +} + +function invokeFromIframe(subresource, sourceContextList) { + const currentSourceContext = sourceContextList[0]; + const frameUrl = + "/common/security-features/scope/document.py?policyDeliveries=" + + encodeURIComponent(JSON.stringify( + currentSourceContext.policyDeliveries || [])); + + let iframe; + let promise; + if (currentSourceContext.sourceContextType === 'srcdoc') { + promise = fetch(frameUrl) + .then(r => r.text()) + .then(srcdoc => { + iframe = createElement( + "iframe", {srcdoc: srcdoc}, document.body, true); + return iframe.eventPromise; + }); + } else if (currentSourceContext.sourceContextType === 'iframe') { + iframe = createElement("iframe", {src: frameUrl}, document.body, true); + promise = iframe.eventPromise; + } else if (currentSourceContext.sourceContextType === 'iframe-blank') { + let frameContent; + promise = fetch(frameUrl) + .then(r => r.text()) + .then(t => { + frameContent = t; + iframe = createElement("iframe", {}, document.body, true); + return iframe.eventPromise; + }) + .then(() => { + // Reinitialize `iframe.eventPromise` with a new promise + // that catches the load event for the document.write() below. + bindEvents(iframe); + + iframe.contentDocument.write(frameContent); + iframe.contentDocument.close(); + return iframe.eventPromise; + }); + } + + return promise + .then(() => { + const promise = bindEvents2( + window, "message", iframe, "error", window, "error"); + iframe.contentWindow.postMessage( + {subresource: subresource, + sourceContextList: sourceContextList.slice(1)}, + "*"); + return promise; + }) + .then(event => { + if (event.data.error) + return Promise.reject(event.data.error); + return event.data; + }); +} + +// SanityChecker does nothing in release mode. See sanity-checker.js for debug +// mode. +function SanityChecker() {} +SanityChecker.prototype.checkScenario = function() {}; +SanityChecker.prototype.setFailTimeout = function(test, timeout) {}; +SanityChecker.prototype.checkSubresourceResult = function() {}; diff --git a/test/wpt/tests/common/security-features/resources/common.sub.js.headers b/test/wpt/tests/common/security-features/resources/common.sub.js.headers new file mode 100644 index 0000000..cb762ef --- /dev/null +++ b/test/wpt/tests/common/security-features/resources/common.sub.js.headers @@ -0,0 +1 @@ +Access-Control-Allow-Origin: * diff --git a/test/wpt/tests/common/security-features/scope/__init__.py b/test/wpt/tests/common/security-features/scope/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/common/security-features/scope/document.py b/test/wpt/tests/common/security-features/scope/document.py new file mode 100644 index 0000000..9a9f045 --- /dev/null +++ b/test/wpt/tests/common/security-features/scope/document.py @@ -0,0 +1,36 @@ +import os, sys, json + +from wptserve.utils import isomorphic_decode, isomorphic_encode + +import importlib +util = importlib.import_module("common.security-features.scope.util") + +def main(request, response): + policyDeliveries = json.loads(request.GET.first(b"policyDeliveries", b"[]")) + maybe_additional_headers = {} + meta = u'' + error = u'' + for delivery in policyDeliveries: + if delivery[u'deliveryType'] == u'meta': + if delivery[u'key'] == u'referrerPolicy': + meta += u'' % delivery[u'value'] + else: + error = u'invalid delivery key' + elif delivery[u'deliveryType'] == u'http-rp': + if delivery[u'key'] == u'referrerPolicy': + maybe_additional_headers[b'Referrer-Policy'] = isomorphic_encode(delivery[u'value']) + else: + error = u'invalid delivery key' + else: + error = u'invalid deliveryType' + + handler = lambda: util.get_template(u"document.html.template") % ({ + u"meta": meta, + u"error": error + }) + util.respond( + request, + response, + payload_generator=handler, + content_type=b"text/html", + maybe_additional_headers=maybe_additional_headers) diff --git a/test/wpt/tests/common/security-features/scope/template/document.html.template b/test/wpt/tests/common/security-features/scope/template/document.html.template new file mode 100644 index 0000000..37e29f8 --- /dev/null +++ b/test/wpt/tests/common/security-features/scope/template/document.html.template @@ -0,0 +1,30 @@ + + + + %(meta)s + + + + diff --git a/test/wpt/tests/common/security-features/scope/template/worker.js.template b/test/wpt/tests/common/security-features/scope/template/worker.js.template new file mode 100644 index 0000000..7a2a6e0 --- /dev/null +++ b/test/wpt/tests/common/security-features/scope/template/worker.js.template @@ -0,0 +1,29 @@ +%(import)s + +if ('DedicatedWorkerGlobalScope' in self && + self instanceof DedicatedWorkerGlobalScope) { + self.onmessage = event => onMessageFromParent(event, self); +} else if ('SharedWorkerGlobalScope' in self && + self instanceof SharedWorkerGlobalScope) { + onconnect = event => { + const port = event.ports[0]; + port.onmessage = event => onMessageFromParent(event, port); + }; +} + +// Receive a message from the parent and start the test. +function onMessageFromParent(event, port) { + const configurationError = "%(error)s"; + if (configurationError.length > 0) { + port.postMessage({error: configurationError}); + return; + } + + invokeRequest(event.data.subresource, + event.data.sourceContextList) + .then(result => port.postMessage(result)) + .catch(e => { + const message = (e.error && e.error.stack) || e.message || "Error"; + port.postMessage({error: message}); + }); +} diff --git a/test/wpt/tests/common/security-features/scope/util.py b/test/wpt/tests/common/security-features/scope/util.py new file mode 100644 index 0000000..da5aacf --- /dev/null +++ b/test/wpt/tests/common/security-features/scope/util.py @@ -0,0 +1,43 @@ +import os + +from wptserve.utils import isomorphic_decode + +def get_template(template_basename): + script_directory = os.path.dirname(os.path.abspath(isomorphic_decode(__file__))) + template_directory = os.path.abspath( + os.path.join(script_directory, u"template")) + template_filename = os.path.join(template_directory, template_basename) + + with open(template_filename, "r") as f: + return f.read() + + +def __noop(request, response): + return u"" + + +def respond(request, + response, + status_code=200, + content_type=b"text/html", + payload_generator=__noop, + cache_control=b"no-cache; must-revalidate", + access_control_allow_origin=b"*", + maybe_additional_headers=None): + response.add_required_headers = False + response.writer.write_status(status_code) + + if access_control_allow_origin != None: + response.writer.write_header(b"access-control-allow-origin", + access_control_allow_origin) + response.writer.write_header(b"content-type", content_type) + response.writer.write_header(b"cache-control", cache_control) + + additional_headers = maybe_additional_headers or {} + for header, value in additional_headers.items(): + response.writer.write_header(header, value) + + response.writer.end_headers() + + payload = payload_generator() + response.writer.write(payload) diff --git a/test/wpt/tests/common/security-features/scope/worker.py b/test/wpt/tests/common/security-features/scope/worker.py new file mode 100644 index 0000000..6b321e7 --- /dev/null +++ b/test/wpt/tests/common/security-features/scope/worker.py @@ -0,0 +1,44 @@ +import os, sys, json + +from wptserve.utils import isomorphic_decode, isomorphic_encode +import importlib +util = importlib.import_module("common.security-features.scope.util") + +def main(request, response): + policyDeliveries = json.loads(request.GET.first(b'policyDeliveries', b'[]')) + worker_type = request.GET.first(b'type', b'classic') + commonjs_url = u'%s://%s:%s/common/security-features/resources/common.sub.js' % ( + request.url_parts.scheme, request.url_parts.hostname, + request.url_parts.port) + if worker_type == b'classic': + import_line = u'importScripts("%s");' % commonjs_url + else: + import_line = u'import "%s";' % commonjs_url + + maybe_additional_headers = {} + error = u'' + for delivery in policyDeliveries: + if delivery[u'deliveryType'] == u'meta': + error = u' cannot be used in WorkerGlobalScope' + elif delivery[u'deliveryType'] == u'http-rp': + if delivery[u'key'] == u'referrerPolicy': + maybe_additional_headers[b'Referrer-Policy'] = isomorphic_encode(delivery[u'value']) + elif delivery[u'key'] == u'mixedContent' and delivery[u'value'] == u'opt-in': + maybe_additional_headers[b'Content-Security-Policy'] = b'block-all-mixed-content' + elif delivery[u'key'] == u'upgradeInsecureRequests' and delivery[u'value'] == u'upgrade': + maybe_additional_headers[b'Content-Security-Policy'] = b'upgrade-insecure-requests' + else: + error = u'invalid delivery key for http-rp: %s' % delivery[u'key'] + else: + error = u'invalid deliveryType: %s' % delivery[u'deliveryType'] + + handler = lambda: util.get_template(u'worker.js.template') % ({ + u'import': import_line, + u'error': error + }) + util.respond( + request, + response, + payload_generator=handler, + content_type=b'text/javascript', + maybe_additional_headers=maybe_additional_headers) diff --git a/test/wpt/tests/common/security-features/subresource/__init__.py b/test/wpt/tests/common/security-features/subresource/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/common/security-features/subresource/audio.py b/test/wpt/tests/common/security-features/subresource/audio.py new file mode 100644 index 0000000..f16a0f7 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/audio.py @@ -0,0 +1,18 @@ +import os, sys +from wptserve.utils import isomorphic_decode +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +def generate_payload(request, server_data): + file = os.path.join(request.doc_root, u"webaudio", u"resources", + u"sin_440Hz_-6dBFS_1s.wav") + return open(file, "rb").read() + + +def main(request, response): + handler = lambda data: generate_payload(request, data) + subresource.respond(request, + response, + payload_generator = handler, + access_control_allow_origin = b"*", + content_type = b"audio/wav") diff --git a/test/wpt/tests/common/security-features/subresource/document.py b/test/wpt/tests/common/security-features/subresource/document.py new file mode 100644 index 0000000..52b684a --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/document.py @@ -0,0 +1,12 @@ +import os, sys +from wptserve.utils import isomorphic_decode +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +def generate_payload(server_data): + return subresource.get_template(u"document.html.template") % server_data + +def main(request, response): + subresource.respond(request, + response, + payload_generator = generate_payload) diff --git a/test/wpt/tests/common/security-features/subresource/empty.py b/test/wpt/tests/common/security-features/subresource/empty.py new file mode 100644 index 0000000..312e12c --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/empty.py @@ -0,0 +1,14 @@ +import os, sys +from wptserve.utils import isomorphic_decode +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +def generate_payload(server_data): + return u'' + +def main(request, response): + subresource.respond(request, + response, + payload_generator = generate_payload, + access_control_allow_origin = b"*", + content_type = b"text/plain") diff --git a/test/wpt/tests/common/security-features/subresource/font.py b/test/wpt/tests/common/security-features/subresource/font.py new file mode 100644 index 0000000..7900079 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/font.py @@ -0,0 +1,76 @@ +import os, sys +from base64 import decodebytes + +from wptserve.utils import isomorphic_decode +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + + +def generate_payload(request, server_data): + data = (u'{"headers": %(headers)s}') % server_data + if b"id" in request.GET: + request.server.stash.put(request.GET[b"id"], data) + # Simple base64 encoded .tff font + return decodebytes(b"AAEAAAANAIAAAwBQRkZUTU6u6MkAAAXcAAAAHE9TLzJWYW" + b"QKAAABWAAAAFZjbWFwAA8D7wAAAcAAAAFCY3Z0IAAhAnkA" + b"AAMEAAAABGdhc3D//wADAAAF1AAAAAhnbHlmCC6aTwAAAx" + b"QAAACMaGVhZO8ooBcAAADcAAAANmhoZWEIkAV9AAABFAAA" + b"ACRobXR4EZQAhQAAAbAAAAAQbG9jYQBwAFQAAAMIAAAACm" + b"1heHAASQA9AAABOAAAACBuYW1lehAVOgAAA6AAAAIHcG9z" + b"dP+uADUAAAWoAAAAKgABAAAAAQAAMhPyuV8PPPUACwPoAA" + b"AAAMU4Lm0AAAAAxTgubQAh/5wFeAK8AAAACAACAAAAAAAA" + b"AAEAAAK8/5wAWgXcAAAAAAV4AAEAAAAAAAAAAAAAAAAAAA" + b"AEAAEAAAAEAAwAAwAAAAAAAgAAAAEAAQAAAEAALgAAAAAA" + b"AQXcAfQABQAAAooCvAAAAIwCigK8AAAB4AAxAQIAAAIABg" + b"kAAAAAAAAAAAABAAAAAAAAAAAAAAAAUGZFZABAAEEAQQMg" + b"/zgAWgK8AGQAAAABAAAAAAAABdwAIQAAAAAF3AAABdwAZA" + b"AAAAMAAAADAAAAHAABAAAAAAA8AAMAAQAAABwABAAgAAAA" + b"BAAEAAEAAABB//8AAABB////wgABAAAAAAAAAQYAAAEAAA" + b"AAAAAAAQIAAAACAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAA" + b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwAAAAAAAA" + b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + b"AAAAAAAAAAAAAAAAAAAhAnkAAAAqACoAKgBGAAAAAgAhAA" + b"ABKgKaAAMABwAusQEALzyyBwQA7TKxBgXcPLIDAgDtMgCx" + b"AwAvPLIFBADtMrIHBgH8PLIBAgDtMjMRIREnMxEjIQEJ6M" + b"fHApr9ZiECWAAAAwBk/5wFeAK8AAMABwALAAABNSEVATUh" + b"FQE1IRUB9AH0/UQDhPu0BRQB9MjI/tTIyP7UyMgAAAAAAA" + b"4ArgABAAAAAAAAACYATgABAAAAAAABAAUAgQABAAAAAAAC" + b"AAYAlQABAAAAAAADACEA4AABAAAAAAAEAAUBDgABAAAAAA" + b"AFABABNgABAAAAAAAGAAUBUwADAAEECQAAAEwAAAADAAEE" + b"CQABAAoAdQADAAEECQACAAwAhwADAAEECQADAEIAnAADAA" + b"EECQAEAAoBAgADAAEECQAFACABFAADAAEECQAGAAoBRwBD" + b"AG8AcAB5AHIAaQBnAGgAdAAgACgAYwApACAAMgAwADAAOA" + b"AgAE0AbwB6AGkAbABsAGEAIABDAG8AcgBwAG8AcgBhAHQA" + b"aQBvAG4AAENvcHlyaWdodCAoYykgMjAwOCBNb3ppbGxhIE" + b"NvcnBvcmF0aW9uAABNAGEAcgBrAEEAAE1hcmtBAABNAGUA" + b"ZABpAHUAbQAATWVkaXVtAABGAG8AbgB0AEYAbwByAGcAZQ" + b"AgADIALgAwACAAOgAgAE0AYQByAGsAQQAgADoAIAA1AC0A" + b"MQAxAC0AMgAwADAAOAAARm9udEZvcmdlIDIuMCA6IE1hcm" + b"tBIDogNS0xMS0yMDA4AABNAGEAcgBrAEEAAE1hcmtBAABW" + b"AGUAcgBzAGkAbwBuACAAMAAwADEALgAwADAAMAAgAABWZX" + b"JzaW9uIDAwMS4wMDAgAABNAGEAcgBrAEEAAE1hcmtBAAAA" + b"AgAAAAAAAP+DADIAAAABAAAAAAAAAAAAAAAAAAAAAAAEAA" + b"AAAQACACQAAAAAAAH//wACAAAAAQAAAADEPovuAAAAAMU4" + b"Lm0AAAAAxTgubQ==") + +def generate_report_headers_payload(request, server_data): + stashed_data = request.server.stash.take(request.GET[b"id"]) + return stashed_data + +def main(request, response): + handler = lambda data: generate_payload(request, data) + content_type = b'application/x-font-truetype' + + if b"report-headers" in request.GET: + handler = lambda data: generate_report_headers_payload(request, data) + content_type = b'application/json' + + subresource.respond(request, + response, + payload_generator = handler, + content_type = content_type, + access_control_allow_origin = b"*") diff --git a/test/wpt/tests/common/security-features/subresource/image.py b/test/wpt/tests/common/security-features/subresource/image.py new file mode 100644 index 0000000..5c9a0c0 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/image.py @@ -0,0 +1,116 @@ +import os, sys, array, math + +from io import BytesIO + +from wptserve.utils import isomorphic_decode + +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +class Image: + """This class partially implements the interface of the PIL.Image.Image. + One day in the future WPT might support the PIL module or another imaging + library, so this hacky BMP implementation will no longer be required. + """ + def __init__(self, width, height): + self.width = width + self.height = height + self.img = bytearray([0 for i in range(3 * width * height)]) + + @staticmethod + def new(mode, size, color=0): + return Image(size[0], size[1]) + + def _int_to_bytes(self, number): + packed_bytes = [0, 0, 0, 0] + for i in range(4): + packed_bytes[i] = number & 0xFF + number >>= 8 + + return packed_bytes + + def putdata(self, color_data): + for y in range(self.height): + for x in range(self.width): + i = x + y * self.width + if i > len(color_data) - 1: + return + + self.img[i * 3: i * 3 + 3] = color_data[i][::-1] + + def save(self, f, type): + assert type == "BMP" + # 54 bytes of preambule + image color data. + filesize = 54 + 3 * self.width * self.height + # 14 bytes of header. + bmpfileheader = bytearray([ord('B'), ord('M')] + self._int_to_bytes(filesize) + + [0, 0, 0, 0, 54, 0, 0, 0]) + # 40 bytes of info. + bmpinfoheader = bytearray([40, 0, 0, 0] + + self._int_to_bytes(self.width) + + self._int_to_bytes(self.height) + + [1, 0, 24] + (25 * [0])) + + padlength = (4 - (self.width * 3) % 4) % 4 + bmppad = bytearray([0, 0, 0]) + padding = bmppad[0 : padlength] + + f.write(bmpfileheader) + f.write(bmpinfoheader) + + for i in range(self.height): + offset = self.width * (self.height - i - 1) * 3 + f.write(self.img[offset : offset + 3 * self.width]) + f.write(padding) + +def encode_string_as_bmp_image(string_data): + data_bytes = array.array("B", string_data.encode("utf-8")) + + num_bytes = len(data_bytes) + + # Encode data bytes to color data (RGB), one bit per channel. + # This is to avoid errors due to different color spaces used in decoding. + color_data = [] + for byte in data_bytes: + p = [int(x) * 255 for x in '{0:08b}'.format(byte)] + color_data.append((p[0], p[1], p[2])) + color_data.append((p[3], p[4], p[5])) + color_data.append((p[6], p[7], 0)) + + # Render image. + num_pixels = len(color_data) + sqrt = int(math.ceil(math.sqrt(num_pixels))) + img = Image.new("RGB", (sqrt, sqrt), "black") + img.putdata(color_data) + + # Flush image to string. + f = BytesIO() + img.save(f, "BMP") + f.seek(0) + + return f.read() + +def generate_payload(request, server_data): + data = (u'{"headers": %(headers)s}') % server_data + if b"id" in request.GET: + request.server.stash.put(request.GET[b"id"], data) + data = encode_string_as_bmp_image(data) + return data + +def generate_report_headers_payload(request, server_data): + stashed_data = request.server.stash.take(request.GET[b"id"]) + return stashed_data + +def main(request, response): + handler = lambda data: generate_payload(request, data) + content_type = b'image/bmp' + + if b"report-headers" in request.GET: + handler = lambda data: generate_report_headers_payload(request, data) + content_type = b'application/json' + + subresource.respond(request, + response, + payload_generator = handler, + content_type = content_type, + access_control_allow_origin = b"*") diff --git a/test/wpt/tests/common/security-features/subresource/referrer.py b/test/wpt/tests/common/security-features/subresource/referrer.py new file mode 100644 index 0000000..e366314 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/referrer.py @@ -0,0 +1,4 @@ +def main(request, response): + referrer = request.headers.get(b"referer", b"") + response_headers = [(b"Content-Type", b"text/javascript")] + return (200, response_headers, b"window.referrer = '" + referrer + b"'") diff --git a/test/wpt/tests/common/security-features/subresource/script.py b/test/wpt/tests/common/security-features/subresource/script.py new file mode 100644 index 0000000..9701816 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/script.py @@ -0,0 +1,14 @@ +import os, sys +from wptserve.utils import isomorphic_decode + +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +def generate_payload(server_data): + return subresource.get_template(u"script.js.template") % server_data + +def main(request, response): + subresource.respond(request, + response, + payload_generator = generate_payload, + content_type = b"application/javascript") diff --git a/test/wpt/tests/common/security-features/subresource/shared-worker.py b/test/wpt/tests/common/security-features/subresource/shared-worker.py new file mode 100644 index 0000000..bdfb61b --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/shared-worker.py @@ -0,0 +1,13 @@ +import os, sys +from wptserve.utils import isomorphic_decode +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +def generate_payload(server_data): + return subresource.get_template(u"shared-worker.js.template") % server_data + +def main(request, response): + subresource.respond(request, + response, + payload_generator = generate_payload, + content_type = b"application/javascript") diff --git a/test/wpt/tests/common/security-features/subresource/static-import.py b/test/wpt/tests/common/security-features/subresource/static-import.py new file mode 100644 index 0000000..3c3a6f6 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/static-import.py @@ -0,0 +1,61 @@ +import os, sys, json +from urllib.parse import unquote + +from wptserve.utils import isomorphic_decode +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +def get_csp_value(value): + ''' + Returns actual CSP header values (e.g. "worker-src 'self'") for the + given string used in PolicyDelivery's value (e.g. "worker-src-self"). + ''' + + # script-src + # Test-related scripts like testharness.js and inline scripts containing + # test bodies. + # 'unsafe-inline' is added as a workaround here. This is probably not so + # bad, as it shouldn't intefere non-inline-script requests that we want to + # test. + if value == 'script-src-wildcard': + return "script-src * 'unsafe-inline'" + if value == 'script-src-self': + return "script-src 'self' 'unsafe-inline'" + # Workaround for "script-src 'none'" would be more complicated, because + # - "script-src 'none' 'unsafe-inline'" is handled somehow differently from + # "script-src 'none'", i.e. + # https://w3c.github.io/webappsec-csp/#match-url-to-source-list Step 3 + # handles the latter but not the former. + # - We need nonce- or path-based additional values to allow same-origin + # test scripts like testharness.js. + # Therefore, we disable 'script-src-none' tests for now in + # `/content-security-policy/spec.src.json`. + if value == 'script-src-none': + return "script-src 'none'" + + # worker-src + if value == 'worker-src-wildcard': + return 'worker-src *' + if value == 'worker-src-self': + return "worker-src 'self'" + if value == 'worker-src-none': + return "worker-src 'none'" + raise Exception('Invalid delivery_value: %s' % value) + +def generate_payload(request): + import_url = unquote(isomorphic_decode(request.GET[b'import_url'])) + return subresource.get_template(u"static-import.js.template") % { + u"import_url": import_url + } + +def main(request, response): + def payload_generator(_): return generate_payload(request) + maybe_additional_headers = {} + if b'contentSecurityPolicy' in request.GET: + csp = unquote(isomorphic_decode(request.GET[b'contentSecurityPolicy'])) + maybe_additional_headers[b'Content-Security-Policy'] = get_csp_value(csp) + subresource.respond(request, + response, + payload_generator = payload_generator, + content_type = b"application/javascript", + maybe_additional_headers = maybe_additional_headers) diff --git a/test/wpt/tests/common/security-features/subresource/stylesheet.py b/test/wpt/tests/common/security-features/subresource/stylesheet.py new file mode 100644 index 0000000..05db249 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/stylesheet.py @@ -0,0 +1,61 @@ +import os, sys +from wptserve.utils import isomorphic_decode +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +def generate_payload(request, server_data): + data = (u'{"headers": %(headers)s}') % server_data + type = b'image' + if b"type" in request.GET: + type = request.GET[b"type"] + + if b"id" in request.GET: + request.server.stash.put(request.GET[b"id"], data) + + if type == b'image': + return subresource.get_template(u"image.css.template") % {u"id": isomorphic_decode(request.GET[b"id"])} + + elif type == b'font': + return subresource.get_template(u"font.css.template") % {u"id": isomorphic_decode(request.GET[b"id"])} + + elif type == b'svg': + return subresource.get_template(u"svg.css.template") % { + u"id": isomorphic_decode(request.GET[b"id"]), + u"property": isomorphic_decode(request.GET[b"property"])} + + # A `'stylesheet-only'`-type stylesheet has no nested resources; this is + # useful in tests that cover referrers for stylesheet fetches (e.g. fetches + # triggered by `@import` statements). + elif type == b'stylesheet-only': + return u'' + +def generate_import_rule(request, server_data): + return u"@import url('%(url)s');" % { + u"url": subresource.create_url(request, swap_origin=True, + query_parameter_to_remove=u"import-rule") + } + +def generate_report_headers_payload(request, server_data): + stashed_data = request.server.stash.take(request.GET[b"id"]) + return stashed_data + +def main(request, response): + payload_generator = lambda data: generate_payload(request, data) + content_type = b"text/css" + referrer_policy = b"unsafe-url" + if b"import-rule" in request.GET: + payload_generator = lambda data: generate_import_rule(request, data) + + if b"report-headers" in request.GET: + payload_generator = lambda data: generate_report_headers_payload(request, data) + content_type = b'application/json' + + if b"referrer-policy" in request.GET: + referrer_policy = request.GET[b"referrer-policy"] + + subresource.respond( + request, + response, + payload_generator = payload_generator, + content_type = content_type, + maybe_additional_headers = { b"Referrer-Policy": referrer_policy }) diff --git a/test/wpt/tests/common/security-features/subresource/subresource.py b/test/wpt/tests/common/security-features/subresource/subresource.py new file mode 100644 index 0000000..b3c055a --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/subresource.py @@ -0,0 +1,199 @@ +import os, json +from urllib.parse import parse_qsl, SplitResult, urlencode, urlsplit, urlunsplit + +from wptserve.utils import isomorphic_decode, isomorphic_encode + +def get_template(template_basename): + script_directory = os.path.dirname(os.path.abspath(isomorphic_decode(__file__))) + template_directory = os.path.abspath(os.path.join(script_directory, + u"template")) + template_filename = os.path.join(template_directory, template_basename) + + with open(template_filename, "r") as f: + return f.read() + + +def redirect(url, response): + response.add_required_headers = False + response.writer.write_status(301) + response.writer.write_header(b"access-control-allow-origin", b"*") + response.writer.write_header(b"location", isomorphic_encode(url)) + response.writer.end_headers() + response.writer.write(u"") + + +# TODO(kristijanburnik): subdomain_prefix is a hardcoded value aligned with +# referrer-policy-test-case.js. The prefix should be configured in one place. +def __get_swapped_origin_netloc(netloc, subdomain_prefix = u"www1."): + if netloc.startswith(subdomain_prefix): + return netloc[len(subdomain_prefix):] + else: + return subdomain_prefix + netloc + + +# Creates a URL (typically a redirect target URL) that is the same as the +# current request URL `request.url`, except for: +# - When `swap_scheme` or `swap_origin` is True, its scheme/origin is changed +# to the other one. (http <-> https, ws <-> wss, etc.) +# - For `downgrade`, we redirect to a URL that would be successfully loaded +# if and only if upgrade-insecure-request is applied. +# - `query_parameter_to_remove` parameter is removed from query part. +# Its default is "redirection" to avoid redirect loops. +def create_url(request, + swap_scheme=False, + swap_origin=False, + downgrade=False, + query_parameter_to_remove=u"redirection"): + parsed = urlsplit(request.url) + destination_netloc = parsed.netloc + + scheme = parsed.scheme + if swap_scheme: + scheme = u"http" if parsed.scheme == u"https" else u"https" + hostname = parsed.netloc.split(u':')[0] + port = request.server.config[u"ports"][scheme][0] + destination_netloc = u":".join([hostname, str(port)]) + + if downgrade: + # These rely on some unintuitive cleverness due to WPT's test setup: + # 'Upgrade-Insecure-Requests' does not upgrade the port number, + # so we use URLs in the form `http://[domain]:[https-port]`, + # which will be upgraded to `https://[domain]:[https-port]`. + # If the upgrade fails, the load will fail, as we don't serve HTTP over + # the secure port. + if parsed.scheme == u"https": + scheme = u"http" + elif parsed.scheme == u"wss": + scheme = u"ws" + else: + raise ValueError(u"Downgrade redirection: Invalid scheme '%s'" % + parsed.scheme) + hostname = parsed.netloc.split(u':')[0] + port = request.server.config[u"ports"][parsed.scheme][0] + destination_netloc = u":".join([hostname, str(port)]) + + if swap_origin: + destination_netloc = __get_swapped_origin_netloc(destination_netloc) + + parsed_query = parse_qsl(parsed.query, keep_blank_values=True) + parsed_query = [x for x in parsed_query if x[0] != query_parameter_to_remove] + + destination_url = urlunsplit(SplitResult( + scheme = scheme, + netloc = destination_netloc, + path = parsed.path, + query = urlencode(parsed_query), + fragment = None)) + + return destination_url + + +def preprocess_redirection(request, response): + if b"redirection" not in request.GET: + return False + + redirection = request.GET[b"redirection"] + + if redirection == b"no-redirect": + return False + elif redirection == b"keep-scheme": + redirect_url = create_url(request, swap_scheme=False) + elif redirection == b"swap-scheme": + redirect_url = create_url(request, swap_scheme=True) + elif redirection == b"downgrade": + redirect_url = create_url(request, downgrade=True) + elif redirection == b"keep-origin": + redirect_url = create_url(request, swap_origin=False) + elif redirection == b"swap-origin": + redirect_url = create_url(request, swap_origin=True) + else: + raise ValueError(u"Invalid redirection type '%s'" % isomorphic_decode(redirection)) + + redirect(redirect_url, response) + return True + + +def preprocess_stash_action(request, response): + if b"action" not in request.GET: + return False + + action = request.GET[b"action"] + + key = request.GET[b"key"] + stash = request.server.stash + path = request.GET[b"path"] if b"path" in request.GET \ + else isomorphic_encode(request.url.split(u'?')[0]) + + if action == b"put": + value = isomorphic_decode(request.GET[b"value"]) + stash.take(key=key, path=path) + stash.put(key=key, value=value, path=path) + response_data = json.dumps({u"status": u"success", u"result": isomorphic_decode(key)}) + elif action == b"purge": + value = stash.take(key=key, path=path) + return False + elif action == b"take": + value = stash.take(key=key, path=path) + if value is None: + status = u"allowed" + else: + status = u"blocked" + response_data = json.dumps({u"status": status, u"result": value}) + else: + return False + + response.add_required_headers = False + response.writer.write_status(200) + response.writer.write_header(b"content-type", b"text/javascript") + response.writer.write_header(b"cache-control", b"no-cache; must-revalidate") + response.writer.end_headers() + response.writer.write(response_data) + return True + + +def __noop(request, response): + return u"" + + +def respond(request, + response, + status_code = 200, + content_type = b"text/html", + payload_generator = __noop, + cache_control = b"no-cache; must-revalidate", + access_control_allow_origin = b"*", + maybe_additional_headers = None): + if preprocess_redirection(request, response): + return + + if preprocess_stash_action(request, response): + return + + response.add_required_headers = False + response.writer.write_status(status_code) + + if access_control_allow_origin != None: + response.writer.write_header(b"access-control-allow-origin", + access_control_allow_origin) + response.writer.write_header(b"content-type", content_type) + response.writer.write_header(b"cache-control", cache_control) + + additional_headers = maybe_additional_headers or {} + for header, value in additional_headers.items(): + response.writer.write_header(header, value) + + response.writer.end_headers() + + new_headers = {} + new_val = [] + for key, val in request.headers.items(): + if len(val) == 1: + new_val = isomorphic_decode(val[0]) + else: + new_val = [isomorphic_decode(x) for x in val] + new_headers[isomorphic_decode(key)] = new_val + + server_data = {u"headers": json.dumps(new_headers, indent = 4)} + + payload = payload_generator(server_data) + response.writer.write(payload) diff --git a/test/wpt/tests/common/security-features/subresource/svg.py b/test/wpt/tests/common/security-features/subresource/svg.py new file mode 100644 index 0000000..9c569e3 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/svg.py @@ -0,0 +1,37 @@ +import os, sys +from wptserve.utils import isomorphic_decode +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +def generate_payload(request, server_data): + data = (u'{"headers": %(headers)s}') % server_data + if b"id" in request.GET: + with request.server.stash.lock: + request.server.stash.take(request.GET[b"id"]) + request.server.stash.put(request.GET[b"id"], data) + return u"" + +def generate_payload_embedded(request, server_data): + return subresource.get_template(u"svg.embedded.template") % { + u"id": isomorphic_decode(request.GET[b"id"]), + u"property": isomorphic_decode(request.GET[b"property"])} + +def generate_report_headers_payload(request, server_data): + stashed_data = request.server.stash.take(request.GET[b"id"]) + return stashed_data + +def main(request, response): + handler = lambda data: generate_payload(request, data) + content_type = b'image/svg+xml' + + if b"embedded-svg" in request.GET: + handler = lambda data: generate_payload_embedded(request, data) + + if b"report-headers" in request.GET: + handler = lambda data: generate_report_headers_payload(request, data) + content_type = b'application/json' + + subresource.respond(request, + response, + payload_generator = handler, + content_type = content_type) diff --git a/test/wpt/tests/common/security-features/subresource/template/document.html.template b/test/wpt/tests/common/security-features/subresource/template/document.html.template new file mode 100644 index 0000000..141711c --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/template/document.html.template @@ -0,0 +1,16 @@ + + + + This page reports back it's request details to the parent frame + + + + + diff --git a/test/wpt/tests/common/security-features/subresource/template/font.css.template b/test/wpt/tests/common/security-features/subresource/template/font.css.template new file mode 100644 index 0000000..9d1e9c4 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/template/font.css.template @@ -0,0 +1,9 @@ +@font-face { + font-family: 'wpt'; + font-style: normal; + font-weight: normal; + src: url(/common/security-features/subresource/font.py?id=%(id)s) format('truetype'); +} +body { + font-family: 'wpt'; +} diff --git a/test/wpt/tests/common/security-features/subresource/template/image.css.template b/test/wpt/tests/common/security-features/subresource/template/image.css.template new file mode 100644 index 0000000..dfe41f1 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/template/image.css.template @@ -0,0 +1,3 @@ +div.styled::before { + content:url(/common/security-features/subresource/image.py?id=%(id)s) +} diff --git a/test/wpt/tests/common/security-features/subresource/template/script.js.template b/test/wpt/tests/common/security-features/subresource/template/script.js.template new file mode 100644 index 0000000..e2edf21 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/template/script.js.template @@ -0,0 +1,3 @@ +postMessage({ + "headers": %(headers)s +}, "*"); diff --git a/test/wpt/tests/common/security-features/subresource/template/shared-worker.js.template b/test/wpt/tests/common/security-features/subresource/template/shared-worker.js.template new file mode 100644 index 0000000..c3f109e --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/template/shared-worker.js.template @@ -0,0 +1,5 @@ +onconnect = function(e) { + e.ports[0].postMessage({ + "headers": %(headers)s + }); +}; diff --git a/test/wpt/tests/common/security-features/subresource/template/static-import.js.template b/test/wpt/tests/common/security-features/subresource/template/static-import.js.template new file mode 100644 index 0000000..095459b --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/template/static-import.js.template @@ -0,0 +1 @@ +import '%(import_url)s'; diff --git a/test/wpt/tests/common/security-features/subresource/template/svg.css.template b/test/wpt/tests/common/security-features/subresource/template/svg.css.template new file mode 100644 index 0000000..c2e509c --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/template/svg.css.template @@ -0,0 +1,3 @@ +path { + %(property)s: url(/common/security-features/subresource/svg.py?id=%(id)s#invalidFragment); +} diff --git a/test/wpt/tests/common/security-features/subresource/template/svg.embedded.template b/test/wpt/tests/common/security-features/subresource/template/svg.embedded.template new file mode 100644 index 0000000..5986c48 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/template/svg.embedded.template @@ -0,0 +1,5 @@ + + + + + diff --git a/test/wpt/tests/common/security-features/subresource/template/worker.js.template b/test/wpt/tests/common/security-features/subresource/template/worker.js.template new file mode 100644 index 0000000..817dd8c --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/template/worker.js.template @@ -0,0 +1,3 @@ +postMessage({ + "headers": %(headers)s +}); diff --git a/test/wpt/tests/common/security-features/subresource/video.py b/test/wpt/tests/common/security-features/subresource/video.py new file mode 100644 index 0000000..7cfbbfa --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/video.py @@ -0,0 +1,17 @@ +import os, sys +from wptserve.utils import isomorphic_decode +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +def generate_payload(request, server_data): + file = os.path.join(request.doc_root, u"media", u"movie_5.ogv") + return open(file, "rb").read() + + +def main(request, response): + handler = lambda data: generate_payload(request, data) + subresource.respond(request, + response, + payload_generator = handler, + access_control_allow_origin = b"*", + content_type = b"video/ogg") diff --git a/test/wpt/tests/common/security-features/subresource/worker.py b/test/wpt/tests/common/security-features/subresource/worker.py new file mode 100644 index 0000000..f655633 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/worker.py @@ -0,0 +1,13 @@ +import os, sys +from wptserve.utils import isomorphic_decode +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +def generate_payload(server_data): + return subresource.get_template(u"worker.js.template") % server_data + +def main(request, response): + subresource.respond(request, + response, + payload_generator = generate_payload, + content_type = b"application/javascript") diff --git a/test/wpt/tests/common/security-features/subresource/xhr.py b/test/wpt/tests/common/security-features/subresource/xhr.py new file mode 100644 index 0000000..75921e9 --- /dev/null +++ b/test/wpt/tests/common/security-features/subresource/xhr.py @@ -0,0 +1,16 @@ +import os, sys +from wptserve.utils import isomorphic_decode +import importlib +subresource = importlib.import_module("common.security-features.subresource.subresource") + +def generate_payload(server_data): + data = (u'{"headers": %(headers)s}') % server_data + return data + +def main(request, response): + subresource.respond(request, + response, + payload_generator = generate_payload, + access_control_allow_origin = b"*", + content_type = b"application/json", + cache_control = b"no-store") diff --git a/test/wpt/tests/common/security-features/tools/format_spec_src_json.py b/test/wpt/tests/common/security-features/tools/format_spec_src_json.py new file mode 100644 index 0000000..d1bf581 --- /dev/null +++ b/test/wpt/tests/common/security-features/tools/format_spec_src_json.py @@ -0,0 +1,24 @@ +import collections +import json +import os + + +def main(): + '''Formats spec.src.json.''' + script_directory = os.path.dirname(os.path.abspath(__file__)) + for dir in [ + 'mixed-content', 'referrer-policy', 'referrer-policy/4K-1', + 'referrer-policy/4K', 'referrer-policy/4K+1', + 'upgrade-insecure-requests' + ]: + filename = os.path.join(script_directory, '..', '..', '..', dir, + 'spec.src.json') + spec = json.load( + open(filename, 'r'), object_pairs_hook=collections.OrderedDict) + with open(filename, 'w') as f: + f.write(json.dumps(spec, indent=2, separators=(',', ': '))) + f.write('\n') + + +if __name__ == '__main__': + main() diff --git a/test/wpt/tests/common/security-features/tools/generate.py b/test/wpt/tests/common/security-features/tools/generate.py new file mode 100644 index 0000000..409b4f1 --- /dev/null +++ b/test/wpt/tests/common/security-features/tools/generate.py @@ -0,0 +1,462 @@ +#!/usr/bin/env python3 + +import argparse +import collections +import copy +import json +import os +import sys + +import spec_validator +import util + + +def expand_pattern(expansion_pattern, test_expansion_schema): + expansion = {} + for artifact_key in expansion_pattern: + artifact_value = expansion_pattern[artifact_key] + if artifact_value == '*': + expansion[artifact_key] = test_expansion_schema[artifact_key] + elif isinstance(artifact_value, list): + expansion[artifact_key] = artifact_value + elif isinstance(artifact_value, dict): + # Flattened expansion. + expansion[artifact_key] = [] + values_dict = expand_pattern(artifact_value, + test_expansion_schema[artifact_key]) + for sub_key in values_dict.keys(): + expansion[artifact_key] += values_dict[sub_key] + else: + expansion[artifact_key] = [artifact_value] + + return expansion + + +def permute_expansion(expansion, + artifact_order, + selection={}, + artifact_index=0): + assert isinstance(artifact_order, list), "artifact_order should be a list" + + if artifact_index >= len(artifact_order): + yield selection + return + + artifact_key = artifact_order[artifact_index] + + for artifact_value in expansion[artifact_key]: + selection[artifact_key] = artifact_value + for next_selection in permute_expansion(expansion, artifact_order, + selection, artifact_index + 1): + yield next_selection + + +# Dumps the test config `selection` into a serialized JSON string. +def dump_test_parameters(selection): + return json.dumps( + selection, + indent=2, + separators=(',', ': '), + sort_keys=True, + cls=util.CustomEncoder) + + +def get_test_filename(spec_directory, spec_json, selection): + '''Returns the filname for the main test HTML file''' + + selection_for_filename = copy.deepcopy(selection) + # Use 'unset' rather than 'None' in test filenames. + if selection_for_filename['delivery_value'] is None: + selection_for_filename['delivery_value'] = 'unset' + + return os.path.join( + spec_directory, + spec_json['test_file_path_pattern'] % selection_for_filename) + + +def get_csp_value(value): + ''' + Returns actual CSP header values (e.g. "worker-src 'self'") for the + given string used in PolicyDelivery's value (e.g. "worker-src-self"). + ''' + + # script-src + # Test-related scripts like testharness.js and inline scripts containing + # test bodies. + # 'unsafe-inline' is added as a workaround here. This is probably not so + # bad, as it shouldn't intefere non-inline-script requests that we want to + # test. + if value == 'script-src-wildcard': + return "script-src * 'unsafe-inline'" + if value == 'script-src-self': + return "script-src 'self' 'unsafe-inline'" + # Workaround for "script-src 'none'" would be more complicated, because + # - "script-src 'none' 'unsafe-inline'" is handled somehow differently from + # "script-src 'none'", i.e. + # https://w3c.github.io/webappsec-csp/#match-url-to-source-list Step 3 + # handles the latter but not the former. + # - We need nonce- or path-based additional values to allow same-origin + # test scripts like testharness.js. + # Therefore, we disable 'script-src-none' tests for now in + # `/content-security-policy/spec.src.json`. + if value == 'script-src-none': + return "script-src 'none'" + + # worker-src + if value == 'worker-src-wildcard': + return 'worker-src *' + if value == 'worker-src-self': + return "worker-src 'self'" + if value == 'worker-src-none': + return "worker-src 'none'" + raise Exception('Invalid delivery_value: %s' % value) + +def handle_deliveries(policy_deliveries): + ''' + Generate elements and HTTP headers for the given list of + PolicyDelivery. + TODO(hiroshige): Merge duplicated code here, scope/document.py, etc. + ''' + + meta = '' + headers = {} + + for delivery in policy_deliveries: + if delivery.value is None: + continue + if delivery.key == 'referrerPolicy': + if delivery.delivery_type == 'meta': + meta += \ + '' % delivery.value + elif delivery.delivery_type == 'http-rp': + headers['Referrer-Policy'] = delivery.value + # TODO(kristijanburnik): Limit to WPT origins. + headers['Access-Control-Allow-Origin'] = '*' + else: + raise Exception( + 'Invalid delivery_type: %s' % delivery.delivery_type) + elif delivery.key == 'mixedContent': + assert (delivery.value == 'opt-in') + if delivery.delivery_type == 'meta': + meta += '' + elif delivery.delivery_type == 'http-rp': + headers['Content-Security-Policy'] = 'block-all-mixed-content' + else: + raise Exception( + 'Invalid delivery_type: %s' % delivery.delivery_type) + elif delivery.key == 'contentSecurityPolicy': + csp_value = get_csp_value(delivery.value) + if delivery.delivery_type == 'meta': + meta += '' + elif delivery.delivery_type == 'http-rp': + headers['Content-Security-Policy'] = csp_value + else: + raise Exception( + 'Invalid delivery_type: %s' % delivery.delivery_type) + elif delivery.key == 'upgradeInsecureRequests': + # https://w3c.github.io/webappsec-upgrade-insecure-requests/#delivery + assert (delivery.value == 'upgrade') + if delivery.delivery_type == 'meta': + meta += '' + elif delivery.delivery_type == 'http-rp': + headers[ + 'Content-Security-Policy'] = 'upgrade-insecure-requests' + else: + raise Exception( + 'Invalid delivery_type: %s' % delivery.delivery_type) + else: + raise Exception('Invalid delivery_key: %s' % delivery.key) + return {"meta": meta, "headers": headers} + + +def generate_selection(spec_json, selection): + ''' + Returns a scenario object (with a top-level source_context_list entry, + which will be removed in generate_test_file() later). + ''' + + target_policy_delivery = util.PolicyDelivery(selection['delivery_type'], + selection['delivery_key'], + selection['delivery_value']) + del selection['delivery_type'] + del selection['delivery_key'] + del selection['delivery_value'] + + # Parse source context list and policy deliveries of source contexts. + # `util.ShouldSkip()` exceptions are raised if e.g. unsuppported + # combinations of source contexts and policy deliveries are used. + source_context_list_scheme = spec_json['source_context_list_schema'][ + selection['source_context_list']] + selection['source_context_list'] = [ + util.SourceContext.from_json(source_context, target_policy_delivery, + spec_json['source_context_schema']) + for source_context in source_context_list_scheme['sourceContextList'] + ] + + # Check if the subresource is supported by the innermost source context. + innermost_source_context = selection['source_context_list'][-1] + supported_subresource = spec_json['source_context_schema'][ + 'supported_subresource'][innermost_source_context.source_context_type] + if supported_subresource != '*': + if selection['subresource'] not in supported_subresource: + raise util.ShouldSkip() + + # Parse subresource policy deliveries. + selection[ + 'subresource_policy_deliveries'] = util.PolicyDelivery.list_from_json( + source_context_list_scheme['subresourcePolicyDeliveries'], + target_policy_delivery, spec_json['subresource_schema'] + ['supported_delivery_type'][selection['subresource']]) + + # Generate per-scenario test description. + selection['test_description'] = spec_json[ + 'test_description_template'] % selection + + return selection + + +def generate_test_file(spec_directory, test_helper_filenames, + test_html_template_basename, test_filename, scenarios): + ''' + Generates a test HTML file (and possibly its associated .headers file) + from `scenarios`. + ''' + + # Scenarios for the same file should have the same `source_context_list`, + # including the top-level one. + # Note: currently, non-top-level source contexts aren't necessarily required + # to be the same, but we set this requirement as it will be useful e.g. when + # we e.g. reuse a worker among multiple scenarios. + for scenario in scenarios: + assert (scenario['source_context_list'] == scenarios[0] + ['source_context_list']) + + # We process the top source context below, and do not include it in + # the JSON objects (i.e. `scenarios`) in generated HTML files. + top_source_context = scenarios[0]['source_context_list'].pop(0) + assert (top_source_context.source_context_type == 'top') + for scenario in scenarios[1:]: + assert (scenario['source_context_list'].pop(0) == top_source_context) + + parameters = {} + + # Sort scenarios, to avoid unnecessary diffs due to different orders in + # `scenarios`. + serialized_scenarios = sorted( + [dump_test_parameters(scenario) for scenario in scenarios]) + + parameters['scenarios'] = ",\n".join(serialized_scenarios).replace( + "\n", "\n" + " " * 10) + + test_directory = os.path.dirname(test_filename) + + parameters['helper_js'] = "" + for test_helper_filename in test_helper_filenames: + parameters['helper_js'] += ' \n' % ( + os.path.relpath(test_helper_filename, test_directory)) + parameters['sanity_checker_js'] = os.path.relpath( + os.path.join(spec_directory, 'generic', 'sanity-checker.js'), + test_directory) + parameters['spec_json_js'] = os.path.relpath( + os.path.join(spec_directory, 'generic', 'spec_json.js'), + test_directory) + + test_headers_filename = test_filename + ".headers" + + test_html_template = util.get_template(test_html_template_basename) + disclaimer_template = util.get_template('disclaimer.template') + + html_template_filename = os.path.join(util.template_directory, + test_html_template_basename) + generated_disclaimer = disclaimer_template \ + % {'generating_script_filename': os.path.relpath(sys.argv[0], + util.test_root_directory), + 'spec_directory': os.path.relpath(spec_directory, + util.test_root_directory)} + + # Adjust the template for the test invoking JS. Indent it to look nice. + parameters['generated_disclaimer'] = generated_disclaimer.rstrip() + + # Directory for the test files. + try: + os.makedirs(test_directory) + except: + pass + + delivery = handle_deliveries(top_source_context.policy_deliveries) + + if len(delivery['headers']) > 0: + with open(test_headers_filename, "w") as f: + for header in delivery['headers']: + f.write('%s: %s\n' % (header, delivery['headers'][header])) + + parameters['meta_delivery_method'] = delivery['meta'] + # Obey the lint and pretty format. + if len(parameters['meta_delivery_method']) > 0: + parameters['meta_delivery_method'] = "\n " + \ + parameters['meta_delivery_method'] + + # Write out the generated HTML file. + util.write_file(test_filename, test_html_template % parameters) + + +def generate_test_source_files(spec_directory, test_helper_filenames, + spec_json, target): + test_expansion_schema = spec_json['test_expansion_schema'] + specification = spec_json['specification'] + + if target == "debug": + spec_json_js_template = util.get_template('spec_json.js.template') + util.write_file( + os.path.join(spec_directory, "generic", "spec_json.js"), + spec_json_js_template % {'spec_json': json.dumps(spec_json)}) + util.write_file( + os.path.join(spec_directory, "generic", + "debug-output.spec.src.json"), + json.dumps(spec_json, indent=2, separators=(',', ': '))) + + # Choose a debug/release template depending on the target. + html_template = "test.%s.html.template" % target + + artifact_order = test_expansion_schema.keys() + artifact_order.remove('expansion') + + excluded_selection_pattern = '' + for key in artifact_order: + excluded_selection_pattern += '%(' + key + ')s/' + + # Create list of excluded tests. + exclusion_dict = set() + for excluded_pattern in spec_json['excluded_tests']: + excluded_expansion = \ + expand_pattern(excluded_pattern, test_expansion_schema) + for excluded_selection in permute_expansion(excluded_expansion, + artifact_order): + excluded_selection['delivery_key'] = spec_json['delivery_key'] + exclusion_dict.add(excluded_selection_pattern % excluded_selection) + + # `scenarios[filename]` represents the list of scenario objects to be + # generated into `filename`. + scenarios = {} + + for spec in specification: + # Used to make entries with expansion="override" override preceding + # entries with the same |selection_path|. + output_dict = {} + + for expansion_pattern in spec['test_expansion']: + expansion = expand_pattern(expansion_pattern, + test_expansion_schema) + for selection in permute_expansion(expansion, artifact_order): + selection['delivery_key'] = spec_json['delivery_key'] + selection_path = spec_json['selection_pattern'] % selection + if selection_path in output_dict: + if expansion_pattern['expansion'] != 'override': + print("Error: expansion is default in:") + print(dump_test_parameters(selection)) + print("but overrides:") + print(dump_test_parameters( + output_dict[selection_path])) + sys.exit(1) + output_dict[selection_path] = copy.deepcopy(selection) + + for selection_path in output_dict: + selection = output_dict[selection_path] + if (excluded_selection_pattern % selection) in exclusion_dict: + print('Excluding selection:', selection_path) + continue + try: + test_filename = get_test_filename(spec_directory, spec_json, + selection) + scenario = generate_selection(spec_json, selection) + scenarios[test_filename] = scenarios.get(test_filename, + []) + [scenario] + except util.ShouldSkip: + continue + + for filename in scenarios: + generate_test_file(spec_directory, test_helper_filenames, + html_template, filename, scenarios[filename]) + + +def merge_json(base, child): + for key in child: + if key not in base: + base[key] = child[key] + continue + # `base[key]` and `child[key]` both exists. + if isinstance(base[key], list) and isinstance(child[key], list): + base[key].extend(child[key]) + elif isinstance(base[key], dict) and isinstance(child[key], dict): + merge_json(base[key], child[key]) + else: + base[key] = child[key] + + +def main(): + parser = argparse.ArgumentParser( + description='Test suite generator utility') + parser.add_argument( + '-t', + '--target', + type=str, + choices=("release", "debug"), + default="release", + help='Sets the appropriate template for generating tests') + parser.add_argument( + '-s', + '--spec', + type=str, + default=os.getcwd(), + help='Specify a file used for describing and generating the tests') + # TODO(kristijanburnik): Add option for the spec_json file. + args = parser.parse_args() + + spec_directory = os.path.abspath(args.spec) + + # Read `spec.src.json` files, starting from `spec_directory`, and + # continuing to parent directories as long as `spec.src.json` exists. + spec_filenames = [] + test_helper_filenames = [] + spec_src_directory = spec_directory + while len(spec_src_directory) >= len(util.test_root_directory): + spec_filename = os.path.join(spec_src_directory, "spec.src.json") + if not os.path.exists(spec_filename): + break + spec_filenames.append(spec_filename) + test_filename = os.path.join(spec_src_directory, 'generic', + 'test-case.sub.js') + assert (os.path.exists(test_filename)) + test_helper_filenames.append(test_filename) + spec_src_directory = os.path.abspath( + os.path.join(spec_src_directory, "..")) + + spec_filenames = list(reversed(spec_filenames)) + test_helper_filenames = list(reversed(test_helper_filenames)) + + if len(spec_filenames) == 0: + print('Error: No spec.src.json is found at %s.' % spec_directory) + return + + # Load the default spec JSON file, ... + default_spec_filename = os.path.join(util.script_directory, + 'spec.src.json') + spec_json = collections.OrderedDict() + if os.path.exists(default_spec_filename): + spec_json = util.load_spec_json(default_spec_filename) + + # ... and then make spec JSON files in subdirectories override the default. + for spec_filename in spec_filenames: + child_spec_json = util.load_spec_json(spec_filename) + merge_json(spec_json, child_spec_json) + + spec_validator.assert_valid_spec_json(spec_json) + generate_test_source_files(spec_directory, test_helper_filenames, + spec_json, args.target) + + +if __name__ == '__main__': + main() diff --git a/test/wpt/tests/common/security-features/tools/spec.src.json b/test/wpt/tests/common/security-features/tools/spec.src.json new file mode 100644 index 0000000..4a84493 --- /dev/null +++ b/test/wpt/tests/common/security-features/tools/spec.src.json @@ -0,0 +1,533 @@ +{ + "selection_pattern": "%(source_context_list)s.%(delivery_type)s/%(delivery_value)s/%(subresource)s/%(origin)s.%(redirection)s.%(source_scheme)s", + "test_file_path_pattern": "gen/%(source_context_list)s.%(delivery_type)s/%(delivery_value)s/%(subresource)s.%(source_scheme)s.html", + "excluded_tests": [ + { + // Workers are same-origin only + "expansion": "*", + "source_scheme": "*", + "source_context_list": "*", + "delivery_type": "*", + "delivery_value": "*", + "redirection": "*", + "subresource": [ + "worker-classic", + "worker-module", + "sharedworker-classic", + "sharedworker-module" + ], + "origin": [ + "cross-https", + "cross-http", + "cross-http-downgrade", + "cross-wss", + "cross-ws", + "cross-ws-downgrade" + ], + "expectation": "*" + }, + { + // Workers are same-origin only (redirects) + "expansion": "*", + "source_scheme": "*", + "source_context_list": "*", + "delivery_type": "*", + "delivery_value": "*", + "redirection": [ + "swap-origin", + "swap-scheme" + ], + "subresource": [ + "worker-classic", + "worker-module", + "sharedworker-classic", + "sharedworker-module" + ], + "origin": "*", + "expectation": "*" + }, + { + // Websockets are ws/wss-only + "expansion": "*", + "source_scheme": "*", + "source_context_list": "*", + "delivery_type": "*", + "delivery_value": "*", + "redirection": "*", + "subresource": "websocket", + "origin": [ + "same-https", + "same-http", + "same-http-downgrade", + "cross-https", + "cross-http", + "cross-http-downgrade" + ], + "expectation": "*" + }, + { + // Redirects are intentionally forbidden in browsers: + // https://fetch.spec.whatwg.org/#concept-websocket-establish + // Websockets are no-redirect only + "expansion": "*", + "source_scheme": "*", + "source_context_list": "*", + "delivery_type": "*", + "delivery_value": "*", + "redirection": [ + "keep-origin", + "swap-origin", + "keep-scheme", + "swap-scheme", + "downgrade" + ], + "subresource": "websocket", + "origin": "*", + "expectation": "*" + }, + { + // ws/wss are websocket-only + "expansion": "*", + "source_scheme": "*", + "source_context_list": "*", + "delivery_type": "*", + "delivery_value": "*", + "redirection": "*", + "subresource": [ + "a-tag", + "area-tag", + "audio-tag", + "beacon", + "fetch", + "iframe-tag", + "img-tag", + "link-css-tag", + "link-prefetch-tag", + "object-tag", + "picture-tag", + "script-tag", + "script-tag-dynamic-import", + "sharedworker-classic", + "sharedworker-import", + "sharedworker-import-data", + "sharedworker-module", + "video-tag", + "worker-classic", + "worker-import", + "worker-import-data", + "worker-module", + "worklet-animation", + "worklet-animation-import-data", + "worklet-audio", + "worklet-audio-import-data", + "worklet-layout", + "worklet-layout-import-data", + "worklet-paint", + "worklet-paint-import-data", + "xhr" + ], + "origin": [ + "same-wss", + "same-ws", + "same-ws-downgrade", + "cross-wss", + "cross-ws", + "cross-ws-downgrade" + ], + "expectation": "*" + }, + { + // Worklets are HTTPS contexts only + "expansion": "*", + "source_scheme": "http", + "source_context_list": "*", + "delivery_type": "*", + "delivery_value": "*", + "redirection": "*", + "subresource": [ + "worklet-animation", + "worklet-animation-import-data", + "worklet-audio", + "worklet-audio-import-data", + "worklet-layout", + "worklet-layout-import-data", + "worklet-paint", + "worklet-paint-import-data" + ], + "origin": "*", + "expectation": "*" + } + ], + "source_context_schema": { + "supported_subresource": { + "top": "*", + "iframe": "*", + "iframe-blank": "*", + "srcdoc": "*", + "worker-classic": [ + "xhr", + "fetch", + "websocket", + "worker-classic", + "worker-module" + ], + "worker-module": [ + "xhr", + "fetch", + "websocket", + "worker-classic", + "worker-module" + ], + "worker-classic-data": [ + "xhr", + "fetch", + "websocket" + ], + "worker-module-data": [ + "xhr", + "fetch", + "websocket" + ], + "sharedworker-classic": [ + "xhr", + "fetch", + "websocket" + ], + "sharedworker-module": [ + "xhr", + "fetch", + "websocket" + ], + "sharedworker-classic-data": [ + "xhr", + "fetch", + "websocket" + ], + "sharedworker-module-data": [ + "xhr", + "fetch", + "websocket" + ] + } + }, + "source_context_list_schema": { + // Warning: Currently, some nested patterns of contexts have different + // inheritance rules for different kinds of policies. + // The generated tests will be used to test/investigate the policy + // inheritance rules, and eventually the policy inheritance rules will + // be unified (https://github.com/w3ctag/design-principles/issues/111). + "top": { + "description": "Policy set by the top-level Document", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "policy" + ] + } + ], + "subresourcePolicyDeliveries": [] + }, + "req": { + "description": "Subresource request's policy should override Document's policy", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "anotherPolicy" + ] + } + ], + "subresourcePolicyDeliveries": [ + "nonNullPolicy" + ] + }, + "srcdoc-inherit": { + "description": "srcdoc iframe without its own policy should inherit parent Document's policy", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "policy" + ] + }, + { + "sourceContextType": "srcdoc" + } + ], + "subresourcePolicyDeliveries": [] + }, + "srcdoc": { + "description": "srcdoc iframe's policy should override parent Document's policy", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "anotherPolicy" + ] + }, + { + "sourceContextType": "srcdoc", + "policyDeliveries": [ + "nonNullPolicy" + ] + } + ], + "subresourcePolicyDeliveries": [] + }, + "iframe": { + "description": "external iframe's policy should override parent Document's policy", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "anotherPolicy" + ] + }, + { + "sourceContextType": "iframe", + "policyDeliveries": [ + "policy" + ] + } + ], + "subresourcePolicyDeliveries": [] + }, + "iframe-blank-inherit": { + "description": "blank iframe should inherit parent Document's policy", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "policy" + ] + }, + { + "sourceContextType": "iframe-blank" + } + ], + "subresourcePolicyDeliveries": [] + }, + "worker-classic": { + // This is applicable to referrer-policy tests. + // Use "worker-classic-inherit" for CSP (mixed-content, etc.). + "description": "dedicated workers shouldn't inherit its parent's policy.", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "anotherPolicy" + ] + }, + { + "sourceContextType": "worker-classic", + "policyDeliveries": [ + "policy" + ] + } + ], + "subresourcePolicyDeliveries": [] + }, + "worker-classic-data": { + "description": "data: dedicated workers should inherit its parent's policy.", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "policy" + ] + }, + { + "sourceContextType": "worker-classic-data", + "policyDeliveries": [] + } + ], + "subresourcePolicyDeliveries": [] + }, + "worker-module": { + // This is applicable to referrer-policy tests. + "description": "dedicated workers shouldn't inherit its parent's policy.", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "anotherPolicy" + ] + }, + { + "sourceContextType": "worker-module", + "policyDeliveries": [ + "policy" + ] + } + ], + "subresourcePolicyDeliveries": [] + }, + "worker-module-data": { + "description": "data: dedicated workers should inherit its parent's policy.", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "policy" + ] + }, + { + "sourceContextType": "worker-module-data", + "policyDeliveries": [] + } + ], + "subresourcePolicyDeliveries": [] + }, + "sharedworker-classic": { + "description": "shared workers shouldn't inherit its parent's policy.", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "anotherPolicy" + ] + }, + { + "sourceContextType": "sharedworker-classic", + "policyDeliveries": [ + "policy" + ] + } + ], + "subresourcePolicyDeliveries": [] + }, + "sharedworker-classic-data": { + "description": "data: shared workers should inherit its parent's policy.", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "policy" + ] + }, + { + "sourceContextType": "sharedworker-classic-data", + "policyDeliveries": [] + } + ], + "subresourcePolicyDeliveries": [] + }, + "sharedworker-module": { + "description": "shared workers shouldn't inherit its parent's policy.", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "anotherPolicy" + ] + }, + { + "sourceContextType": "sharedworker-module", + "policyDeliveries": [ + "policy" + ] + } + ], + "subresourcePolicyDeliveries": [] + }, + "sharedworker-module-data": { + "description": "data: shared workers should inherit its parent's policy.", + "sourceContextList": [ + { + "sourceContextType": "top", + "policyDeliveries": [ + "policy" + ] + }, + { + "sourceContextType": "sharedworker-module-data", + "policyDeliveries": [] + } + ], + "subresourcePolicyDeliveries": [] + } + }, + "test_expansion_schema": { + "expansion": [ + "default", + "override" + ], + "source_scheme": [ + "http", + "https" + ], + "source_context_list": [ + "top", + "req", + "srcdoc-inherit", + "srcdoc", + "iframe", + "iframe-blank-inherit", + "worker-classic", + "worker-classic-data", + "worker-module", + "worker-module-data", + "sharedworker-classic", + "sharedworker-classic-data", + "sharedworker-module", + "sharedworker-module-data" + ], + "redirection": [ + "no-redirect", + "keep-origin", + "swap-origin", + "keep-scheme", + "swap-scheme", + "downgrade" + ], + "origin": [ + "same-https", + "same-http", + "same-http-downgrade", + "cross-https", + "cross-http", + "cross-http-downgrade", + "same-wss", + "same-ws", + "same-ws-downgrade", + "cross-wss", + "cross-ws", + "cross-ws-downgrade" + ], + "subresource": [ + "a-tag", + "area-tag", + "audio-tag", + "beacon", + "fetch", + "iframe-tag", + "img-tag", + "link-css-tag", + "link-prefetch-tag", + "object-tag", + "picture-tag", + "script-tag", + "script-tag-dynamic-import", + "sharedworker-classic", + "sharedworker-import", + "sharedworker-import-data", + "sharedworker-module", + "video-tag", + "websocket", + "worker-classic", + "worker-import", + "worker-import-data", + "worker-module", + "worklet-animation", + "worklet-animation-import-data", + "worklet-audio", + "worklet-audio-import-data", + "worklet-layout", + "worklet-layout-import-data", + "worklet-paint", + "worklet-paint-import-data", + "xhr" + ] + } +} diff --git a/test/wpt/tests/common/security-features/tools/spec_validator.py b/test/wpt/tests/common/security-features/tools/spec_validator.py new file mode 100644 index 0000000..f8a1390 --- /dev/null +++ b/test/wpt/tests/common/security-features/tools/spec_validator.py @@ -0,0 +1,251 @@ +#!/usr/bin/env python3 + +import json, sys + + +def assert_non_empty_string(obj, field): + assert field in obj, 'Missing field "%s"' % field + assert isinstance(obj[field], basestring), \ + 'Field "%s" must be a string' % field + assert len(obj[field]) > 0, 'Field "%s" must not be empty' % field + + +def assert_non_empty_list(obj, field): + assert isinstance(obj[field], list), \ + '%s must be a list' % field + assert len(obj[field]) > 0, \ + '%s list must not be empty' % field + + +def assert_non_empty_dict(obj, field): + assert isinstance(obj[field], dict), \ + '%s must be a dict' % field + assert len(obj[field]) > 0, \ + '%s dict must not be empty' % field + + +def assert_contains(obj, field): + assert field in obj, 'Must contain field "%s"' % field + + +def assert_value_from(obj, field, items): + assert obj[field] in items, \ + 'Field "%s" must be from: %s' % (field, str(items)) + + +def assert_atom_or_list_items_from(obj, field, items): + if isinstance(obj[field], basestring) or isinstance( + obj[field], int) or obj[field] is None: + assert_value_from(obj, field, items) + return + + assert isinstance(obj[field], list), '%s must be a list' % field + for allowed_value in obj[field]: + assert allowed_value != '*', "Wildcard is not supported for lists!" + assert allowed_value in items, \ + 'Field "%s" must be from: %s' % (field, str(items)) + + +def assert_contains_only_fields(obj, expected_fields): + for expected_field in expected_fields: + assert_contains(obj, expected_field) + + for actual_field in obj: + assert actual_field in expected_fields, \ + 'Unexpected field "%s".' % actual_field + + +def leaf_values(schema): + if isinstance(schema, list): + return schema + ret = [] + for _, sub_schema in schema.iteritems(): + ret += leaf_values(sub_schema) + return ret + + +def assert_value_unique_in(value, used_values): + assert value not in used_values, 'Duplicate value "%s"!' % str(value) + used_values[value] = True + + +def assert_valid_artifact(exp_pattern, artifact_key, schema): + if isinstance(schema, list): + assert_atom_or_list_items_from(exp_pattern, artifact_key, + ["*"] + schema) + return + + for sub_artifact_key, sub_schema in schema.iteritems(): + assert_valid_artifact(exp_pattern[artifact_key], sub_artifact_key, + sub_schema) + + +def validate(spec_json, details): + """ Validates the json specification for generating tests. """ + + details['object'] = spec_json + assert_contains_only_fields(spec_json, [ + "selection_pattern", "test_file_path_pattern", + "test_description_template", "test_page_title_template", + "specification", "delivery_key", "subresource_schema", + "source_context_schema", "source_context_list_schema", + "test_expansion_schema", "excluded_tests" + ]) + assert_non_empty_list(spec_json, "specification") + assert_non_empty_dict(spec_json, "test_expansion_schema") + assert_non_empty_list(spec_json, "excluded_tests") + + specification = spec_json['specification'] + test_expansion_schema = spec_json['test_expansion_schema'] + excluded_tests = spec_json['excluded_tests'] + + valid_test_expansion_fields = test_expansion_schema.keys() + + # Should be consistent with `sourceContextMap` in + # `/common/security-features/resources/common.sub.js`. + valid_source_context_names = [ + "top", "iframe", "iframe-blank", "srcdoc", "worker-classic", + "worker-module", "worker-classic-data", "worker-module-data", + "sharedworker-classic", "sharedworker-module", + "sharedworker-classic-data", "sharedworker-module-data" + ] + + valid_subresource_names = [ + "a-tag", "area-tag", "audio-tag", "form-tag", "iframe-tag", "img-tag", + "link-css-tag", "link-prefetch-tag", "object-tag", "picture-tag", + "script-tag", "script-tag-dynamic-import", "video-tag" + ] + ["beacon", "fetch", "xhr", "websocket"] + [ + "worker-classic", "worker-module", "worker-import", + "worker-import-data", "sharedworker-classic", "sharedworker-module", + "sharedworker-import", "sharedworker-import-data", + "serviceworker-classic", "serviceworker-module", + "serviceworker-import", "serviceworker-import-data" + ] + [ + "worklet-animation", "worklet-audio", "worklet-layout", + "worklet-paint", "worklet-animation-import", "worklet-audio-import", + "worklet-layout-import", "worklet-paint-import", + "worklet-animation-import-data", "worklet-audio-import-data", + "worklet-layout-import-data", "worklet-paint-import-data" + ] + + # Validate each single spec. + for spec in specification: + details['object'] = spec + + # Validate required fields for a single spec. + assert_contains_only_fields(spec, [ + 'title', 'description', 'specification_url', 'test_expansion' + ]) + assert_non_empty_string(spec, 'title') + assert_non_empty_string(spec, 'description') + assert_non_empty_string(spec, 'specification_url') + assert_non_empty_list(spec, 'test_expansion') + + for spec_exp in spec['test_expansion']: + details['object'] = spec_exp + assert_contains_only_fields(spec_exp, valid_test_expansion_fields) + + for artifact in test_expansion_schema: + details['test_expansion_field'] = artifact + assert_valid_artifact(spec_exp, artifact, + test_expansion_schema[artifact]) + del details['test_expansion_field'] + + # Validate source_context_schema. + details['object'] = spec_json['source_context_schema'] + assert_contains_only_fields( + spec_json['source_context_schema'], + ['supported_delivery_type', 'supported_subresource']) + assert_contains_only_fields( + spec_json['source_context_schema']['supported_delivery_type'], + valid_source_context_names) + for source_context in spec_json['source_context_schema'][ + 'supported_delivery_type']: + assert_valid_artifact( + spec_json['source_context_schema']['supported_delivery_type'], + source_context, test_expansion_schema['delivery_type']) + assert_contains_only_fields( + spec_json['source_context_schema']['supported_subresource'], + valid_source_context_names) + for source_context in spec_json['source_context_schema'][ + 'supported_subresource']: + assert_valid_artifact( + spec_json['source_context_schema']['supported_subresource'], + source_context, leaf_values(test_expansion_schema['subresource'])) + + # Validate subresource_schema. + details['object'] = spec_json['subresource_schema'] + assert_contains_only_fields(spec_json['subresource_schema'], + ['supported_delivery_type']) + assert_contains_only_fields( + spec_json['subresource_schema']['supported_delivery_type'], + leaf_values(test_expansion_schema['subresource'])) + for subresource in spec_json['subresource_schema'][ + 'supported_delivery_type']: + assert_valid_artifact( + spec_json['subresource_schema']['supported_delivery_type'], + subresource, test_expansion_schema['delivery_type']) + + # Validate the test_expansion schema members. + details['object'] = test_expansion_schema + assert_contains_only_fields(test_expansion_schema, [ + 'expansion', 'source_scheme', 'source_context_list', 'delivery_type', + 'delivery_value', 'redirection', 'subresource', 'origin', 'expectation' + ]) + assert_atom_or_list_items_from(test_expansion_schema, 'expansion', + ['default', 'override']) + assert_atom_or_list_items_from(test_expansion_schema, 'source_scheme', + ['http', 'https']) + assert_atom_or_list_items_from( + test_expansion_schema, 'source_context_list', + spec_json['source_context_list_schema'].keys()) + + # Should be consistent with `preprocess_redirection` in + # `/common/security-features/subresource/subresource.py`. + assert_atom_or_list_items_from(test_expansion_schema, 'redirection', [ + 'no-redirect', 'keep-origin', 'swap-origin', 'keep-scheme', + 'swap-scheme', 'downgrade' + ]) + for subresource in leaf_values(test_expansion_schema['subresource']): + assert subresource in valid_subresource_names, "Invalid subresource %s" % subresource + # Should be consistent with getSubresourceOrigin() in + # `/common/security-features/resources/common.sub.js`. + assert_atom_or_list_items_from(test_expansion_schema, 'origin', [ + 'same-http', 'same-https', 'same-ws', 'same-wss', 'cross-http', + 'cross-https', 'cross-ws', 'cross-wss', 'same-http-downgrade', + 'cross-http-downgrade', 'same-ws-downgrade', 'cross-ws-downgrade' + ]) + + # Validate excluded tests. + details['object'] = excluded_tests + for excluded_test_expansion in excluded_tests: + assert_contains_only_fields(excluded_test_expansion, + valid_test_expansion_fields) + details['object'] = excluded_test_expansion + for artifact in test_expansion_schema: + details['test_expansion_field'] = artifact + assert_valid_artifact(excluded_test_expansion, artifact, + test_expansion_schema[artifact]) + del details['test_expansion_field'] + + del details['object'] + + +def assert_valid_spec_json(spec_json): + error_details = {} + try: + validate(spec_json, error_details) + except AssertionError as err: + print('ERROR:', err.message) + print(json.dumps(error_details, indent=4)) + sys.exit(1) + + +def main(): + spec_json = load_spec_json() + assert_valid_spec_json(spec_json) + print("Spec JSON is valid.") + + +if __name__ == '__main__': + main() diff --git a/test/wpt/tests/common/security-features/tools/template/disclaimer.template b/test/wpt/tests/common/security-features/tools/template/disclaimer.template new file mode 100644 index 0000000..ba9458c --- /dev/null +++ b/test/wpt/tests/common/security-features/tools/template/disclaimer.template @@ -0,0 +1 @@ + diff --git a/test/wpt/tests/common/security-features/tools/template/spec_json.js.template b/test/wpt/tests/common/security-features/tools/template/spec_json.js.template new file mode 100644 index 0000000..e4cbd03 --- /dev/null +++ b/test/wpt/tests/common/security-features/tools/template/spec_json.js.template @@ -0,0 +1 @@ +var SPEC_JSON = %(spec_json)s; diff --git a/test/wpt/tests/common/security-features/tools/template/test.debug.html.template b/test/wpt/tests/common/security-features/tools/template/test.debug.html.template new file mode 100644 index 0000000..b6be088 --- /dev/null +++ b/test/wpt/tests/common/security-features/tools/template/test.debug.html.template @@ -0,0 +1,26 @@ + +%(generated_disclaimer)s + + + + %(meta_delivery_method)s + + + + + + + +%(helper_js)s + + +
+ + diff --git a/test/wpt/tests/common/security-features/tools/template/test.release.html.template b/test/wpt/tests/common/security-features/tools/template/test.release.html.template new file mode 100644 index 0000000..bac2d5b --- /dev/null +++ b/test/wpt/tests/common/security-features/tools/template/test.release.html.template @@ -0,0 +1,22 @@ + +%(generated_disclaimer)s + + + + %(meta_delivery_method)s + + + +%(helper_js)s + + +
+ + diff --git a/test/wpt/tests/common/security-features/tools/util.py b/test/wpt/tests/common/security-features/tools/util.py new file mode 100644 index 0000000..5da06f9 --- /dev/null +++ b/test/wpt/tests/common/security-features/tools/util.py @@ -0,0 +1,228 @@ +import os, sys, json, json5, re +import collections + +script_directory = os.path.dirname(os.path.abspath(__file__)) +template_directory = os.path.abspath( + os.path.join(script_directory, 'template')) +test_root_directory = os.path.abspath( + os.path.join(script_directory, '..', '..', '..')) + + +def get_template(basename): + with open(os.path.join(template_directory, basename), "r") as f: + return f.read() + + +def write_file(filename, contents): + with open(filename, "w") as f: + f.write(contents) + + +def read_nth_line(fp, line_number): + fp.seek(0) + for i, line in enumerate(fp): + if (i + 1) == line_number: + return line + + +def load_spec_json(path_to_spec): + re_error_location = re.compile('line ([0-9]+) column ([0-9]+)') + with open(path_to_spec, "r") as f: + try: + return json5.load(f, object_pairs_hook=collections.OrderedDict) + except ValueError as ex: + print(ex.message) + match = re_error_location.search(ex.message) + if match: + line_number, column = int(match.group(1)), int(match.group(2)) + print(read_nth_line(f, line_number).rstrip()) + print(" " * (column - 1) + "^") + sys.exit(1) + + +class ShouldSkip(Exception): + ''' + Raised when the given combination of subresource type, source context type, + delivery type etc. are not supported and we should skip that configuration. + ShouldSkip is expected in normal generator execution (and thus subsequent + generation continues), as we first enumerate a broad range of configurations + first, and later raise ShouldSkip to filter out unsupported combinations. + + ShouldSkip is distinguished from other general errors that cause immediate + termination of the generator and require fix. + ''' + def __init__(self): + pass + + +class PolicyDelivery(object): + ''' + See `@typedef PolicyDelivery` comments in + `common/security-features/resources/common.sub.js`. + ''' + + def __init__(self, delivery_type, key, value): + self.delivery_type = delivery_type + self.key = key + self.value = value + + def __eq__(self, other): + return type(self) is type(other) and self.__dict__ == other.__dict__ + + @classmethod + def list_from_json(cls, list, target_policy_delivery, + supported_delivery_types): + # type: (dict, PolicyDelivery, typing.List[str]) -> typing.List[PolicyDelivery] + ''' + Parses a JSON object `list` that represents a list of `PolicyDelivery` + and returns a list of `PolicyDelivery`, plus supporting placeholders + (see `from_json()` comments below or + `common/security-features/README.md`). + + Can raise `ShouldSkip`. + ''' + if list is None: + return [] + + out = [] + for obj in list: + policy_delivery = PolicyDelivery.from_json( + obj, target_policy_delivery, supported_delivery_types) + # Drop entries with null values. + if policy_delivery.value is None: + continue + out.append(policy_delivery) + return out + + @classmethod + def from_json(cls, obj, target_policy_delivery, supported_delivery_types): + # type: (dict, PolicyDelivery, typing.List[str]) -> PolicyDelivery + ''' + Parses a JSON object `obj` and returns a `PolicyDelivery` object. + In addition to dicts (in the same format as to_json() outputs), + this method accepts the following placeholders: + "policy": + `target_policy_delivery` + "policyIfNonNull": + `target_policy_delivery` if its value is not None. + "anotherPolicy": + A PolicyDelivery that has the same key as + `target_policy_delivery` but a different value. + The delivery type is selected from `supported_delivery_types`. + + Can raise `ShouldSkip`. + ''' + + if obj == "policy": + policy_delivery = target_policy_delivery + elif obj == "nonNullPolicy": + if target_policy_delivery.value is None: + raise ShouldSkip() + policy_delivery = target_policy_delivery + elif obj == "anotherPolicy": + if len(supported_delivery_types) == 0: + raise ShouldSkip() + policy_delivery = target_policy_delivery.get_another_policy( + supported_delivery_types[0]) + elif isinstance(obj, dict): + policy_delivery = PolicyDelivery(obj['deliveryType'], obj['key'], + obj['value']) + else: + raise Exception('policy delivery is invalid: ' + obj) + + # Omit unsupported combinations of source contexts and delivery type. + if policy_delivery.delivery_type not in supported_delivery_types: + raise ShouldSkip() + + return policy_delivery + + def to_json(self): + # type: () -> dict + return { + "deliveryType": self.delivery_type, + "key": self.key, + "value": self.value + } + + def get_another_policy(self, delivery_type): + # type: (str) -> PolicyDelivery + if self.key == 'referrerPolicy': + # Return 'unsafe-url' (i.e. more unsafe policy than `self.value`) + # as long as possible, to make sure the tests to fail if the + # returned policy is used unexpectedly instead of `self.value`. + # Using safer policy wouldn't be distinguishable from acceptable + # arbitrary policy enforcement by user agents, as specified at + # Step 7 of + # https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer: + # "The user agent MAY alter referrerURL or referrerOrigin at this + # point to enforce arbitrary policy considerations in the + # interests of minimizing data leakage." + # See also the comments at `referrerUrlResolver` in + # `wpt/referrer-policy/generic/test-case.sub.js`. + if self.value != 'unsafe-url': + return PolicyDelivery(delivery_type, self.key, 'unsafe-url') + else: + return PolicyDelivery(delivery_type, self.key, 'no-referrer') + elif self.key == 'mixedContent': + if self.value == 'opt-in': + return PolicyDelivery(delivery_type, self.key, None) + else: + return PolicyDelivery(delivery_type, self.key, 'opt-in') + elif self.key == 'contentSecurityPolicy': + if self.value is not None: + return PolicyDelivery(delivery_type, self.key, None) + else: + return PolicyDelivery(delivery_type, self.key, 'worker-src-none') + elif self.key == 'upgradeInsecureRequests': + if self.value == 'upgrade': + return PolicyDelivery(delivery_type, self.key, None) + else: + return PolicyDelivery(delivery_type, self.key, 'upgrade') + else: + raise Exception('delivery key is invalid: ' + self.key) + + +class SourceContext(object): + def __init__(self, source_context_type, policy_deliveries): + # type: (unicode, typing.List[PolicyDelivery]) -> None + self.source_context_type = source_context_type + self.policy_deliveries = policy_deliveries + + def __eq__(self, other): + return type(self) is type(other) and self.__dict__ == other.__dict__ + + @classmethod + def from_json(cls, obj, target_policy_delivery, source_context_schema): + ''' + Parses a JSON object `obj` and returns a `SourceContext` object. + + `target_policy_delivery` and `source_context_schema` are used for + policy delivery placeholders and filtering out unsupported + delivery types. + + Can raise `ShouldSkip`. + ''' + source_context_type = obj.get('sourceContextType') + policy_deliveries = PolicyDelivery.list_from_json( + obj.get('policyDeliveries'), target_policy_delivery, + source_context_schema['supported_delivery_type'] + [source_context_type]) + return SourceContext(source_context_type, policy_deliveries) + + def to_json(self): + return { + "sourceContextType": self.source_context_type, + "policyDeliveries": [x.to_json() for x in self.policy_deliveries] + } + + +class CustomEncoder(json.JSONEncoder): + ''' + Used to dump dicts containing `SourceContext`/`PolicyDelivery` into JSON. + ''' + def default(self, obj): + if isinstance(obj, SourceContext): + return obj.to_json() + if isinstance(obj, PolicyDelivery): + return obj.to_json() + return json.JSONEncoder.default(self, obj) diff --git a/test/wpt/tests/common/security-features/types.md b/test/wpt/tests/common/security-features/types.md new file mode 100644 index 0000000..1707991 --- /dev/null +++ b/test/wpt/tests/common/security-features/types.md @@ -0,0 +1,62 @@ +# Types around the generator and generated tests + +This document describes types and concepts used across JavaScript and Python parts of this test framework. +Please refer to the JSDoc in `common.sub.js` or docstrings in Python scripts (if any). + +## Scenario + +### Properties + +- All keys of `test_expansion_schema` in `spec.src.json`, except for `expansion`, `delivery_type`, `delivery_value`, and `source_context_list`. Their values are **string**s specified in `test_expansion_schema`. +- `source_context_list` +- `subresource_policy_deliveries` + +### Types + +- Generator (`spec.src.json`): JSON object +- Generator (Python): `dict` +- Runtime (JS): JSON object +- Runtime (Python): N/A + +## `PolicyDelivery` + +### Types + +- Generator (`spec.src.json`): JSON object +- Generator (Python): `util.PolicyDelivery` +- Runtime (JS): JSON object (`@typedef PolicyDelivery` in `common.sub.js`) +- Runtime (Python): N/A + +## `SourceContext` + +Subresource requests can be possibly sent from various kinds of fetch client's environment settings objects. For example: + +- top-level windows, +- ` diff --git a/test/wpt/tests/fetch/api/abort/general.any.js b/test/wpt/tests/fetch/api/abort/general.any.js new file mode 100644 index 0000000..3727bb4 --- /dev/null +++ b/test/wpt/tests/fetch/api/abort/general.any.js @@ -0,0 +1,572 @@ +// META: timeout=long +// META: global=window,worker +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=../request/request-error.js + +const BODY_METHODS = ['arrayBuffer', 'blob', 'formData', 'json', 'text']; + +const error1 = new Error('error1'); +error1.name = 'error1'; + +// This is used to close connections that weren't correctly closed during the tests, +// otherwise you can end up running out of HTTP connections. +let requestAbortKeys = []; + +function abortRequests() { + const keys = requestAbortKeys; + requestAbortKeys = []; + return Promise.all( + keys.map(key => fetch(`../resources/stash-put.py?key=${key}&value=close`)) + ); +} + +const hostInfo = get_host_info(); +const urlHostname = hostInfo.REMOTE_HOST; + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const fetchPromise = fetch('../resources/data.json', { signal }); + + await promise_rejects_dom(t, "AbortError", fetchPromise); +}, "Aborting rejects with AbortError"); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(error1); + + const fetchPromise = fetch('../resources/data.json', { signal }); + + await promise_rejects_exactly(t, error1, fetchPromise, 'fetch() should reject with abort reason'); +}, "Aborting rejects with abort reason"); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const url = new URL('../resources/data.json', location); + url.hostname = urlHostname; + + const fetchPromise = fetch(url, { + signal, + mode: 'no-cors' + }); + + await promise_rejects_dom(t, "AbortError", fetchPromise); +}, "Aborting rejects with AbortError - no-cors"); + +// Test that errors thrown from the request constructor take priority over abort errors. +// badRequestArgTests is from response-error.js +for (const { args, testName } of badRequestArgTests) { + promise_test(async t => { + try { + // If this doesn't throw, we'll effectively skip the test. + // It'll fail properly in ../request/request-error.html + new Request(...args); + } + catch (err) { + const controller = new AbortController(); + controller.abort(); + + // Add signal to 2nd arg + args[1] = args[1] || {}; + args[1].signal = controller.signal; + await promise_rejects_js(t, TypeError, fetch(...args)); + } + }, `TypeError from request constructor takes priority - ${testName}`); +} + +test(() => { + const request = new Request(''); + assert_true(Boolean(request.signal), "Signal member is present & truthy"); + assert_equals(request.signal.constructor, AbortSignal); +}, "Request objects have a signal property"); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const request = new Request('../resources/data.json', { signal }); + + assert_true(Boolean(request.signal), "Signal member is present & truthy"); + assert_equals(request.signal.constructor, AbortSignal); + assert_not_equals(request.signal, signal, 'Request has a new signal, not a reference'); + assert_true(request.signal.aborted, `Request's signal has aborted`); + + const fetchPromise = fetch(request); + + await promise_rejects_dom(t, "AbortError", fetchPromise); +}, "Signal on request object"); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(error1); + + const request = new Request('../resources/data.json', { signal }); + + assert_not_equals(request.signal, signal, 'Request has a new signal, not a reference'); + assert_true(request.signal.aborted, `Request's signal has aborted`); + assert_equals(request.signal.reason, error1, `Request's signal's abort reason is error1`); + + const fetchPromise = fetch(request); + + await promise_rejects_exactly(t, error1, fetchPromise, "fetch() should reject with abort reason"); +}, "Signal on request object should also have abort reason"); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const request = new Request('../resources/data.json', { signal }); + const requestFromRequest = new Request(request); + + const fetchPromise = fetch(requestFromRequest); + + await promise_rejects_dom(t, "AbortError", fetchPromise); +}, "Signal on request object created from request object"); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const request = new Request('../resources/data.json'); + const requestFromRequest = new Request(request, { signal }); + + const fetchPromise = fetch(requestFromRequest); + + await promise_rejects_dom(t, "AbortError", fetchPromise); +}, "Signal on request object created from request object, with signal on second request"); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const request = new Request('../resources/data.json', { signal: new AbortController().signal }); + const requestFromRequest = new Request(request, { signal }); + + const fetchPromise = fetch(requestFromRequest); + + await promise_rejects_dom(t, "AbortError", fetchPromise); +}, "Signal on request object created from request object, with signal on second request overriding another"); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const request = new Request('../resources/data.json', { signal }); + + const fetchPromise = fetch(request, {method: 'POST'}); + + await promise_rejects_dom(t, "AbortError", fetchPromise); +}, "Signal retained after unrelated properties are overridden by fetch"); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const request = new Request('../resources/data.json', { signal }); + + const data = await fetch(request, { signal: null }).then(r => r.json()); + assert_equals(data.key, 'value', 'Fetch fully completes'); +}, "Signal removed by setting to null"); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const log = []; + + await Promise.all([ + fetch('../resources/data.json', { signal }).then( + () => assert_unreached("Fetch must not resolve"), + () => log.push('fetch-reject') + ), + Promise.resolve().then(() => log.push('next-microtask')) + ]); + + assert_array_equals(log, ['fetch-reject', 'next-microtask']); +}, "Already aborted signal rejects immediately"); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const request = new Request('../resources/data.json', { + signal, + method: 'POST', + body: 'foo', + headers: { 'Content-Type': 'text/plain' } + }); + + await fetch(request).catch(() => {}); + + assert_true(request.bodyUsed, "Body has been used"); +}, "Request is still 'used' if signal is aborted before fetching"); + +for (const bodyMethod of BODY_METHODS) { + promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + + const log = []; + const response = await fetch('../resources/data.json', { signal }); + + controller.abort(); + + const bodyPromise = response[bodyMethod](); + + await Promise.all([ + bodyPromise.catch(() => log.push(`${bodyMethod}-reject`)), + Promise.resolve().then(() => log.push('next-microtask')) + ]); + + await promise_rejects_dom(t, "AbortError", bodyPromise); + + assert_array_equals(log, [`${bodyMethod}-reject`, 'next-microtask']); + }, `response.${bodyMethod}() rejects if already aborted`); +} + +promise_test(async (t) => { + const controller = new AbortController(); + const signal = controller.signal; + + const res = await fetch('../resources/data.json', { signal }); + controller.abort(); + + await promise_rejects_dom(t, 'AbortError', res.text()); + await promise_rejects_dom(t, 'AbortError', res.text()); +}, 'Call text() twice on aborted response'); + +promise_test(async t => { + await abortRequests(); + + const controller = new AbortController(); + const signal = controller.signal; + const stateKey = token(); + const abortKey = token(); + requestAbortKeys.push(abortKey); + controller.abort(); + + await fetch(`../resources/infinite-slow-response.py?stateKey=${stateKey}&abortKey=${abortKey}`, { signal }).catch(() => {}); + + // I'm hoping this will give the browser enough time to (incorrectly) make the request + // above, if it intends to. + await fetch('../resources/data.json').then(r => r.json()); + + const response = await fetch(`../resources/stash-take.py?key=${stateKey}`); + const data = await response.json(); + + assert_equals(data, null, "Request hasn't been made to the server"); +}, "Already aborted signal does not make request"); + +promise_test(async t => { + await abortRequests(); + + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const fetches = []; + + for (let i = 0; i < 3; i++) { + const abortKey = token(); + requestAbortKeys.push(abortKey); + + fetches.push( + fetch(`../resources/infinite-slow-response.py?${i}&abortKey=${abortKey}`, { signal }) + ); + } + + for (const fetchPromise of fetches) { + await promise_rejects_dom(t, "AbortError", fetchPromise); + } +}, "Already aborted signal can be used for many fetches"); + +promise_test(async t => { + await abortRequests(); + + const controller = new AbortController(); + const signal = controller.signal; + + await fetch('../resources/data.json', { signal }).then(r => r.json()); + + controller.abort(); + + const fetches = []; + + for (let i = 0; i < 3; i++) { + const abortKey = token(); + requestAbortKeys.push(abortKey); + + fetches.push( + fetch(`../resources/infinite-slow-response.py?${i}&abortKey=${abortKey}`, { signal }) + ); + } + + for (const fetchPromise of fetches) { + await promise_rejects_dom(t, "AbortError", fetchPromise); + } +}, "Signal can be used to abort other fetches, even if another fetch succeeded before aborting"); + +promise_test(async t => { + await abortRequests(); + + const controller = new AbortController(); + const signal = controller.signal; + const stateKey = token(); + const abortKey = token(); + requestAbortKeys.push(abortKey); + + await fetch(`../resources/infinite-slow-response.py?stateKey=${stateKey}&abortKey=${abortKey}`, { signal }); + + const beforeAbortResult = await fetch(`../resources/stash-take.py?key=${stateKey}`).then(r => r.json()); + assert_equals(beforeAbortResult, "open", "Connection is open"); + + controller.abort(); + + // The connection won't close immediately, but it should close at some point: + const start = Date.now(); + + while (true) { + // Stop spinning if 10 seconds have passed + if (Date.now() - start > 10000) throw Error('Timed out'); + + const afterAbortResult = await fetch(`../resources/stash-take.py?key=${stateKey}`).then(r => r.json()); + if (afterAbortResult == 'closed') break; + } +}, "Underlying connection is closed when aborting after receiving response"); + +promise_test(async t => { + await abortRequests(); + + const controller = new AbortController(); + const signal = controller.signal; + const stateKey = token(); + const abortKey = token(); + requestAbortKeys.push(abortKey); + + const url = new URL(`../resources/infinite-slow-response.py?stateKey=${stateKey}&abortKey=${abortKey}`, location); + url.hostname = urlHostname; + + await fetch(url, { + signal, + mode: 'no-cors' + }); + + const stashTakeURL = new URL(`../resources/stash-take.py?key=${stateKey}`, location); + stashTakeURL.hostname = urlHostname; + + const beforeAbortResult = await fetch(stashTakeURL).then(r => r.json()); + assert_equals(beforeAbortResult, "open", "Connection is open"); + + controller.abort(); + + // The connection won't close immediately, but it should close at some point: + const start = Date.now(); + + while (true) { + // Stop spinning if 10 seconds have passed + if (Date.now() - start > 10000) throw Error('Timed out'); + + const afterAbortResult = await fetch(stashTakeURL).then(r => r.json()); + if (afterAbortResult == 'closed') break; + } +}, "Underlying connection is closed when aborting after receiving response - no-cors"); + +for (const bodyMethod of BODY_METHODS) { + promise_test(async t => { + await abortRequests(); + + const controller = new AbortController(); + const signal = controller.signal; + const stateKey = token(); + const abortKey = token(); + requestAbortKeys.push(abortKey); + + const response = await fetch(`../resources/infinite-slow-response.py?stateKey=${stateKey}&abortKey=${abortKey}`, { signal }); + + const beforeAbortResult = await fetch(`../resources/stash-take.py?key=${stateKey}`).then(r => r.json()); + assert_equals(beforeAbortResult, "open", "Connection is open"); + + const bodyPromise = response[bodyMethod](); + + controller.abort(); + + await promise_rejects_dom(t, "AbortError", bodyPromise); + + const start = Date.now(); + + while (true) { + // Stop spinning if 10 seconds have passed + if (Date.now() - start > 10000) throw Error('Timed out'); + + const afterAbortResult = await fetch(`../resources/stash-take.py?key=${stateKey}`).then(r => r.json()); + if (afterAbortResult == 'closed') break; + } + }, `Fetch aborted & connection closed when aborted after calling response.${bodyMethod}()`); +} + +promise_test(async t => { + await abortRequests(); + + const controller = new AbortController(); + const signal = controller.signal; + const stateKey = token(); + const abortKey = token(); + requestAbortKeys.push(abortKey); + + const response = await fetch(`../resources/infinite-slow-response.py?stateKey=${stateKey}&abortKey=${abortKey}`, { signal }); + const reader = response.body.getReader(); + + controller.abort(); + + await promise_rejects_dom(t, "AbortError", reader.read()); + await promise_rejects_dom(t, "AbortError", reader.closed); + + // The connection won't close immediately, but it should close at some point: + const start = Date.now(); + + while (true) { + // Stop spinning if 10 seconds have passed + if (Date.now() - start > 10000) throw Error('Timed out'); + + const afterAbortResult = await fetch(`../resources/stash-take.py?key=${stateKey}`).then(r => r.json()); + if (afterAbortResult == 'closed') break; + } +}, "Stream errors once aborted. Underlying connection closed."); + +promise_test(async t => { + await abortRequests(); + + const controller = new AbortController(); + const signal = controller.signal; + const stateKey = token(); + const abortKey = token(); + requestAbortKeys.push(abortKey); + + const response = await fetch(`../resources/infinite-slow-response.py?stateKey=${stateKey}&abortKey=${abortKey}`, { signal }); + const reader = response.body.getReader(); + + await reader.read(); + + controller.abort(); + + await promise_rejects_dom(t, "AbortError", reader.read()); + await promise_rejects_dom(t, "AbortError", reader.closed); + + // The connection won't close immediately, but it should close at some point: + const start = Date.now(); + + while (true) { + // Stop spinning if 10 seconds have passed + if (Date.now() - start > 10000) throw Error('Timed out'); + + const afterAbortResult = await fetch(`../resources/stash-take.py?key=${stateKey}`).then(r => r.json()); + if (afterAbortResult == 'closed') break; + } +}, "Stream errors once aborted, after reading. Underlying connection closed."); + +promise_test(async t => { + await abortRequests(); + + const controller = new AbortController(); + const signal = controller.signal; + + const response = await fetch(`../resources/empty.txt`, { signal }); + + // Read whole response to ensure close signal has sent. + await response.clone().text(); + + const reader = response.body.getReader(); + + controller.abort(); + + const item = await reader.read(); + + assert_true(item.done, "Stream is done"); +}, "Stream will not error if body is empty. It's closed with an empty queue before it errors."); + +promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + let cancelReason; + + const body = new ReadableStream({ + pull(controller) { + controller.enqueue(new Uint8Array([42])); + }, + cancel(reason) { + cancelReason = reason; + } + }); + + const fetchPromise = fetch('../resources/empty.txt', { + body, signal, + method: 'POST', + duplex: 'half', + headers: { + 'Content-Type': 'text/plain' + } + }); + + assert_true(!!cancelReason, 'Cancel called sync'); + assert_equals(cancelReason.constructor, DOMException); + assert_equals(cancelReason.name, 'AbortError'); + + await promise_rejects_dom(t, "AbortError", fetchPromise); + + const fetchErr = await fetchPromise.catch(e => e); + + assert_equals(cancelReason, fetchErr, "Fetch rejects with same error instance"); +}, "Readable stream synchronously cancels with AbortError if aborted before reading"); + +test(() => { + const controller = new AbortController(); + const signal = controller.signal; + controller.abort(); + + const request = new Request('.', { signal }); + const requestSignal = request.signal; + + const clonedRequest = request.clone(); + + assert_equals(requestSignal, request.signal, "Original request signal the same after cloning"); + assert_true(request.signal.aborted, "Original request signal aborted"); + assert_not_equals(clonedRequest.signal, request.signal, "Cloned request has different signal"); + assert_true(clonedRequest.signal.aborted, "Cloned request signal aborted"); +}, "Signal state is cloned"); + +test(() => { + const controller = new AbortController(); + const signal = controller.signal; + + const request = new Request('.', { signal }); + const clonedRequest = request.clone(); + + const log = []; + + request.signal.addEventListener('abort', () => log.push('original-aborted')); + clonedRequest.signal.addEventListener('abort', () => log.push('clone-aborted')); + + controller.abort(); + + assert_array_equals(log, ['original-aborted', 'clone-aborted'], "Abort events fired in correct order"); + assert_true(request.signal.aborted, 'Signal aborted'); + assert_true(clonedRequest.signal.aborted, 'Signal aborted'); +}, "Clone aborts with original controller"); diff --git a/test/wpt/tests/fetch/api/abort/keepalive.html b/test/wpt/tests/fetch/api/abort/keepalive.html new file mode 100644 index 0000000..db12df0 --- /dev/null +++ b/test/wpt/tests/fetch/api/abort/keepalive.html @@ -0,0 +1,85 @@ + + + + + + + + diff --git a/test/wpt/tests/fetch/api/abort/request.any.js b/test/wpt/tests/fetch/api/abort/request.any.js new file mode 100644 index 0000000..dcc7803 --- /dev/null +++ b/test/wpt/tests/fetch/api/abort/request.any.js @@ -0,0 +1,85 @@ +// META: timeout=long +// META: global=window,worker + +const BODY_FUNCTION_AND_DATA = { + arrayBuffer: null, + blob: null, + formData: new FormData(), + json: new Blob(["{}"]), + text: null, +}; + +for (const [bodyFunction, body] of Object.entries(BODY_FUNCTION_AND_DATA)) { + promise_test(async () => { + const controller = new AbortController(); + const signal = controller.signal; + const request = new Request("../resources/data.json", { + method: "post", + signal, + body, + }); + + controller.abort(); + await request[bodyFunction](); + assert_true( + true, + `An aborted request should still be able to run ${bodyFunction}()` + ); + }, `Calling ${bodyFunction}() on an aborted request`); + + promise_test(async () => { + const controller = new AbortController(); + const signal = controller.signal; + const request = new Request("../resources/data.json", { + method: "post", + signal, + body, + }); + + const p = request[bodyFunction](); + controller.abort(); + await p; + assert_true( + true, + `An aborted request should still be able to run ${bodyFunction}()` + ); + }, `Aborting a request after calling ${bodyFunction}()`); + + if (!body) { + promise_test(async () => { + const controller = new AbortController(); + const signal = controller.signal; + const request = new Request("../resources/data.json", { + method: "post", + signal, + body, + }); + + // consuming happens synchronously, so don't wait + fetch(request).catch(() => {}); + + controller.abort(); + await request[bodyFunction](); + assert_true( + true, + `An aborted consumed request should still be able to run ${bodyFunction}() when empty` + ); + }, `Calling ${bodyFunction}() on an aborted consumed empty request`); + } + + promise_test(async t => { + const controller = new AbortController(); + const signal = controller.signal; + const request = new Request("../resources/data.json", { + method: "post", + signal, + body: body || new Blob(["foo"]), + }); + + // consuming happens synchronously, so don't wait + fetch(request).catch(() => {}); + + controller.abort(); + await promise_rejects_js(t, TypeError, request[bodyFunction]()); + }, `Calling ${bodyFunction}() on an aborted consumed nonempty request`); +} diff --git a/test/wpt/tests/fetch/api/abort/serviceworker-intercepted.https.html b/test/wpt/tests/fetch/api/abort/serviceworker-intercepted.https.html new file mode 100644 index 0000000..ed9bc97 --- /dev/null +++ b/test/wpt/tests/fetch/api/abort/serviceworker-intercepted.https.html @@ -0,0 +1,212 @@ + + + + + Aborting fetch when intercepted by a service worker + + + + + + + + diff --git a/test/wpt/tests/fetch/api/basic/accept-header.any.js b/test/wpt/tests/fetch/api/basic/accept-header.any.js new file mode 100644 index 0000000..cd54cf2 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/accept-header.any.js @@ -0,0 +1,34 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +promise_test(function() { + return fetch(RESOURCES_DIR + "inspect-headers.py?headers=Accept").then(function(response) { + assert_equals(response.status, 200, "HTTP status is 200"); + assert_equals(response.type , "basic", "Response's type is basic"); + assert_equals(response.headers.get("x-request-accept"), "*/*", "Request has accept header with value '*/*'"); + }); +}, "Request through fetch should have 'accept' header with value '*/*'"); + +promise_test(function() { + return fetch(RESOURCES_DIR + "inspect-headers.py?headers=Accept", {"headers": [["Accept", "custom/*"]]}).then(function(response) { + assert_equals(response.status, 200, "HTTP status is 200"); + assert_equals(response.type , "basic", "Response's type is basic"); + assert_equals(response.headers.get("x-request-accept"), "custom/*", "Request has accept header with value 'custom/*'"); + }); +}, "Request through fetch should have 'accept' header with value 'custom/*'"); + +promise_test(function() { + return fetch(RESOURCES_DIR + "inspect-headers.py?headers=Accept-Language").then(function(response) { + assert_equals(response.status, 200, "HTTP status is 200"); + assert_equals(response.type , "basic", "Response's type is basic"); + assert_true(response.headers.has("x-request-accept-language")); + }); +}, "Request through fetch should have a 'accept-language' header"); + +promise_test(function() { + return fetch(RESOURCES_DIR + "inspect-headers.py?headers=Accept-Language", {"headers": [["Accept-Language", "bzh"]]}).then(function(response) { + assert_equals(response.status, 200, "HTTP status is 200"); + assert_equals(response.type , "basic", "Response's type is basic"); + assert_equals(response.headers.get("x-request-accept-language"), "bzh", "Request has accept header with value 'bzh'"); + }); +}, "Request through fetch should have 'accept-language' header with value 'bzh'"); diff --git a/test/wpt/tests/fetch/api/basic/block-mime-as-script.html b/test/wpt/tests/fetch/api/basic/block-mime-as-script.html new file mode 100644 index 0000000..afc2bbb --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/block-mime-as-script.html @@ -0,0 +1,43 @@ + + +Block mime type as script + + +
+ diff --git a/test/wpt/tests/fetch/api/basic/conditional-get.any.js b/test/wpt/tests/fetch/api/basic/conditional-get.any.js new file mode 100644 index 0000000..2f9fa81 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/conditional-get.any.js @@ -0,0 +1,38 @@ +// META: title=Request ETag +// META: global=window,worker +// META: script=/common/utils.js + +promise_test(function() { + var cacheBuster = token(); // ensures first request is uncached + var url = "../resources/cache.py?v=" + cacheBuster; + var etag; + + // make the first request + return fetch(url).then(function(response) { + // ensure we're getting the regular, uncached response + assert_equals(response.status, 200); + assert_equals(response.headers.get("X-HTTP-STATUS"), null) + + return response.text(); // consuming the body, just to be safe + }).then(function(body) { + // make a second request + return fetch(url); + }).then(function(response) { + // while the server responds with 304 if our browser sent the correct + // If-None-Match request header, at the JavaScript level this surfaces + // as 200 + assert_equals(response.status, 200); + assert_equals(response.headers.get("X-HTTP-STATUS"), "304") + + etag = response.headers.get("ETag") + + return response.text(); // consuming the body, just to be safe + }).then(function(body) { + // make a third request, explicitly setting If-None-Match request header + var headers = { "If-None-Match": etag } + return fetch(url, { headers: headers }) + }).then(function(response) { + // 304 now surfaces thanks to the explicit If-None-Match request header + assert_equals(response.status, 304); + }); +}, "Testing conditional GET with ETags"); diff --git a/test/wpt/tests/fetch/api/basic/error-after-response.any.js b/test/wpt/tests/fetch/api/basic/error-after-response.any.js new file mode 100644 index 0000000..f711442 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/error-after-response.any.js @@ -0,0 +1,24 @@ +// META: title=Fetch: network timeout after receiving the HTTP response headers +// META: global=window,worker +// META: timeout=long +// META: script=../resources/utils.js + +function checkReader(test, reader, promiseToTest) +{ + return reader.read().then((value) => { + validateBufferFromString(value.value, "TEST_CHUNK", "Should receive first chunk"); + return promise_rejects_js(test, TypeError, promiseToTest(reader)); + }); +} + +promise_test((test) => { + return fetch("../resources/bad-chunk-encoding.py?count=1").then((response) => { + return checkReader(test, response.body.getReader(), reader => reader.read()); + }); +}, "Response reader read() promise should reject after a network error happening after resolving fetch promise"); + +promise_test((test) => { + return fetch("../resources/bad-chunk-encoding.py?count=1").then((response) => { + return checkReader(test, response.body.getReader(), reader => reader.closed); + }); +}, "Response reader closed promise should reject after a network error happening after resolving fetch promise"); diff --git a/test/wpt/tests/fetch/api/basic/header-value-combining.any.js b/test/wpt/tests/fetch/api/basic/header-value-combining.any.js new file mode 100644 index 0000000..bb70d87 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/header-value-combining.any.js @@ -0,0 +1,15 @@ +// META: global=window,worker + +[ + ["content-length", "0", "header-content-length"], + ["content-length", "0, 0", "header-content-length-twice"], + ["double-trouble", ", ", "headers-double-empty"], + ["foo-test", "1, 2, 3", "headers-basic"], + ["heya", ", \u000B\u000C, 1, , , 2", "headers-some-are-empty"], + ["www-authenticate", "1, 2, 3, 4", "headers-www-authenticate"], +].forEach(testValues => { + promise_test(async t => { + const response = await fetch("../../../xhr/resources/" + testValues[2] + ".asis"); + assert_equals(response.headers.get(testValues[0]), testValues[1]); + }, "response.headers.get('" + testValues[0] + "') expects " + testValues[1]); +}); diff --git a/test/wpt/tests/fetch/api/basic/header-value-null-byte.any.js b/test/wpt/tests/fetch/api/basic/header-value-null-byte.any.js new file mode 100644 index 0000000..741d83b --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/header-value-null-byte.any.js @@ -0,0 +1,5 @@ +// META: global=window,worker + +promise_test(t => { + return promise_rejects_js(t, TypeError, fetch("../../../xhr/resources/parse-headers.py?my-custom-header="+encodeURIComponent("x\0x"))); +}, "Ensure fetch() rejects null bytes in headers"); diff --git a/test/wpt/tests/fetch/api/basic/historical.any.js b/test/wpt/tests/fetch/api/basic/historical.any.js new file mode 100644 index 0000000..c808126 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/historical.any.js @@ -0,0 +1,17 @@ +// META: global=window,worker + +test(() => { + assert_false("getAll" in new Headers()); + assert_false("getAll" in Headers.prototype); +}, "Headers object no longer has a getAll() method"); + +test(() => { + assert_false("type" in new Request("about:blank")); + assert_false("type" in Request.prototype); +}, "'type' getter should not exist on Request objects"); + +// See https://github.com/whatwg/fetch/pull/979 for the removal +test(() => { + assert_false("trailer" in new Response()); + assert_false("trailer" in Response.prototype); +}, "Response object no longer has a trailer getter"); diff --git a/test/wpt/tests/fetch/api/basic/http-response-code.any.js b/test/wpt/tests/fetch/api/basic/http-response-code.any.js new file mode 100644 index 0000000..1fd312a --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/http-response-code.any.js @@ -0,0 +1,14 @@ +// META: global=window,worker +// META: script=../resources/utils.js +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js + +promise_test(async (test) => { + const resp = await fetch( + "/fetch/connection-pool/resources/network-partition-key.py?" + + `status=425&uuid=${token()}&partition_id=${get_host_info().ORIGIN}` + + `&dispatch=check_partition&addcounter=true`); + assert_equals(resp.status, 425); + const text = await resp.text(); + assert_equals(text, "ok. Request was sent 1 times. 1 connections were created."); +}, "Fetch on 425 response should not be retried for non TLS early data."); diff --git a/test/wpt/tests/fetch/api/basic/integrity.sub.any.js b/test/wpt/tests/fetch/api/basic/integrity.sub.any.js new file mode 100644 index 0000000..e3cfd1b --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/integrity.sub.any.js @@ -0,0 +1,87 @@ +// META: global=window,dedicatedworker,sharedworker +// META: script=../resources/utils.js + +function integrity(desc, url, integrity, initRequestMode, shouldPass) { + var fetchRequestInit = {'integrity': integrity} + if (!!initRequestMode && initRequestMode !== "") { + fetchRequestInit.mode = initRequestMode; + } + + if (shouldPass) { + promise_test(function(test) { + return fetch(url, fetchRequestInit).then(function(resp) { + if (initRequestMode !== "no-cors") { + assert_equals(resp.status, 200, "Response's status is 200"); + } else { + assert_equals(resp.status, 0, "Opaque response's status is 0"); + assert_equals(resp.type, "opaque"); + } + }); + }, desc); + } else { + promise_test(function(test) { + return promise_rejects_js(test, TypeError, fetch(url, fetchRequestInit)); + }, desc); + } +} + +const topSha256 = "sha256-KHIDZcXnR2oBHk9DrAA+5fFiR6JjudYjqoXtMR1zvzk="; +const topSha384 = "sha384-MgZYnnAzPM/MjhqfOIMfQK5qcFvGZsGLzx4Phd7/A8fHTqqLqXqKo8cNzY3xEPTL"; +const topSha512 = "sha512-D6yns0qxG0E7+TwkevZ4Jt5t7Iy3ugmAajG/dlf6Pado1JqTyneKXICDiqFIkLMRExgtvg8PlxbKTkYfRejSOg=="; +const topSha512wrongpadding = "sha512-D6yns0qxG0E7+TwkevZ4Jt5t7Iy3ugmAajG/dlf6Pado1JqTyneKXICDiqFIkLMRExgtvg8PlxbKTkYfRejSOg"; +const topSha512base64url = "sha512-D6yns0qxG0E7-TwkevZ4Jt5t7Iy3ugmAajG_dlf6Pado1JqTyneKXICDiqFIkLMRExgtvg8PlxbKTkYfRejSOg=="; +const topSha512base64url_nopadding = "sha512-D6yns0qxG0E7-TwkevZ4Jt5t7Iy3ugmAajG_dlf6Pado1JqTyneKXICDiqFIkLMRExgtvg8PlxbKTkYfRejSOg"; +const invalidSha256 = "sha256-dKUcPOn/AlUjWIwcHeHNqYXPlvyGiq+2dWOdFcE+24I="; +const invalidSha512 = "sha512-oUceBRNxPxnY60g/VtPCj2syT4wo4EZh2CgYdWy9veW8+OsReTXoh7dizMGZafvx9+QhMS39L/gIkxnPIn41Zg=="; + +const path = dirname(location.pathname) + RESOURCES_DIR + "top.txt"; +const url = path; +const corsUrl = + `http://{{host}}:{{ports[http][1]}}${path}?pipe=header(Access-Control-Allow-Origin,*)`; +const corsUrl2 = `https://{{host}}:{{ports[https][0]}}${path}` + +integrity("Empty string integrity", url, "", /* initRequestMode */ undefined, + /* shouldPass */ true); +integrity("SHA-256 integrity", url, topSha256, /* initRequestMode */ undefined, + /* shouldPass */ true); +integrity("SHA-384 integrity", url, topSha384, /* initRequestMode */ undefined, + /* shouldPass */ true); +integrity("SHA-512 integrity", url, topSha512, /* initRequestMode */ undefined, + /* shouldPass */ true); +integrity("SHA-512 integrity with missing padding", url, topSha512wrongpadding, + /* initRequestMode */ undefined, /* shouldPass */ true); +integrity("SHA-512 integrity base64url encoded", url, topSha512base64url, + /* initRequestMode */ undefined, /* shouldPass */ true); +integrity("SHA-512 integrity base64url encoded with missing padding", url, + topSha512base64url_nopadding, /* initRequestMode */ undefined, + /* shouldPass */ true); +integrity("Invalid integrity", url, invalidSha256, + /* initRequestMode */ undefined, /* shouldPass */ false); +integrity("Multiple integrities: valid stronger than invalid", url, + invalidSha256 + " " + topSha384, /* initRequestMode */ undefined, + /* shouldPass */ true); +integrity("Multiple integrities: invalid stronger than valid", + url, invalidSha512 + " " + topSha384, /* initRequestMode */ undefined, + /* shouldPass */ false); +integrity("Multiple integrities: invalid as strong as valid", url, + invalidSha512 + " " + topSha512, /* initRequestMode */ undefined, + /* shouldPass */ true); +integrity("Multiple integrities: both are valid", url, + topSha384 + " " + topSha512, /* initRequestMode */ undefined, + /* shouldPass */ true); +integrity("Multiple integrities: both are invalid", url, + invalidSha256 + " " + invalidSha512, /* initRequestMode */ undefined, + /* shouldPass */ false); +integrity("CORS empty integrity", corsUrl, "", /* initRequestMode */ undefined, + /* shouldPass */ true); +integrity("CORS SHA-512 integrity", corsUrl, topSha512, + /* initRequestMode */ undefined, /* shouldPass */ true); +integrity("CORS invalid integrity", corsUrl, invalidSha512, + /* initRequestMode */ undefined, /* shouldPass */ false); + +integrity("Empty string integrity for opaque response", corsUrl2, "", + /* initRequestMode */ "no-cors", /* shouldPass */ true); +integrity("SHA-* integrity for opaque response", corsUrl2, topSha512, + /* initRequestMode */ "no-cors", /* shouldPass */ false); + +done(); diff --git a/test/wpt/tests/fetch/api/basic/keepalive.any.js b/test/wpt/tests/fetch/api/basic/keepalive.any.js new file mode 100644 index 0000000..899d41d --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/keepalive.any.js @@ -0,0 +1,43 @@ +// META: global=window +// META: title=Fetch API: keepalive handling +// META: script=/resources/testharness.js +// META: script=/resources/testharnessreport.js +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=../resources/keepalive-helper.js + +'use strict'; + +const { + HTTP_NOTSAMESITE_ORIGIN, + HTTP_REMOTE_ORIGIN, + HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT +} = get_host_info(); + +/** + * In a different-site iframe, test to fetch a keepalive URL on the specified + * document event. + */ +function keepaliveSimpleRequestTest(method) { + for (const evt of ['load', 'pagehide', 'unload']) { + const desc = + `[keepalive] simple ${method} request on '${evt}' [no payload]`; + promise_test(async (test) => { + const token1 = token(); + const iframe = document.createElement('iframe'); + iframe.src = getKeepAliveIframeUrl(token1, method, {sendOn: evt}); + document.body.appendChild(iframe); + await iframeLoaded(iframe); + if (evt != 'load') { + iframe.remove(); + } + assert_equals(await getTokenFromMessage(), token1); + + assertStashedTokenAsync(desc, token1); + }, `${desc}; setting up`); + } +} + +for (const method of ['GET', 'POST']) { + keepaliveSimpleRequestTest(method); +} diff --git a/test/wpt/tests/fetch/api/basic/mediasource.window.js b/test/wpt/tests/fetch/api/basic/mediasource.window.js new file mode 100644 index 0000000..1f89595 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/mediasource.window.js @@ -0,0 +1,5 @@ +promise_test(t => { + const mediaSource = new MediaSource(), + mediaSourceURL = URL.createObjectURL(mediaSource); + return promise_rejects_js(t, TypeError, fetch(mediaSourceURL)); +}, "Cannot fetch blob: URL from a MediaSource"); diff --git a/test/wpt/tests/fetch/api/basic/mode-no-cors.sub.any.js b/test/wpt/tests/fetch/api/basic/mode-no-cors.sub.any.js new file mode 100644 index 0000000..a4abcac --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/mode-no-cors.sub.any.js @@ -0,0 +1,29 @@ +// META: script=../resources/utils.js + +function fetchNoCors(url, isOpaqueFiltered) { + var urlQuery = "?pipe=header(x-is-filtered,value)" + promise_test(function(test) { + if (isOpaqueFiltered) + return fetch(url + urlQuery, {"mode": "no-cors"}).then(function(resp) { + assert_equals(resp.status, 0, "Opaque filter: status is 0"); + assert_equals(resp.statusText, "", "Opaque filter: statusText is \"\""); + assert_equals(resp.url, "", "Opaque filter: url is \"\""); + assert_equals(resp.type , "opaque", "Opaque filter: response's type is opaque"); + assert_equals(resp.headers.get("x-is-filtered"), null, "Header x-is-filtered is filtered"); + }); + else + return fetch(url + urlQuery, {"mode": "no-cors"}).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type , "basic", "Response's type is basic"); + assert_equals(resp.headers.get("x-is-filtered"), "value", "Header x-is-filtered is not filtered"); + }); + }, "Fetch "+ url + " with no-cors mode"); +} + +fetchNoCors(RESOURCES_DIR + "top.txt", false); +fetchNoCors("http://{{host}}:{{ports[http][0]}}/fetch/api/resources/top.txt", false); +fetchNoCors("https://{{host}}:{{ports[https][0]}}/fetch/api/resources/top.txt", true); +fetchNoCors("http://{{host}}:{{ports[http][1]}}/fetch/api/resources/top.txt", true); + +done(); + diff --git a/test/wpt/tests/fetch/api/basic/mode-same-origin.any.js b/test/wpt/tests/fetch/api/basic/mode-same-origin.any.js new file mode 100644 index 0000000..1457702 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/mode-same-origin.any.js @@ -0,0 +1,28 @@ +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function fetchSameOrigin(url, shouldPass) { + promise_test(function(test) { + if (shouldPass) + return fetch(url , {"mode": "same-origin"}).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type, "basic", "response type is basic"); + }); + else + return promise_rejects_js(test, TypeError, fetch(url, {mode: "same-origin"})); + }, "Fetch "+ url + " with same-origin mode"); +} + +var host_info = get_host_info(); + +fetchSameOrigin(RESOURCES_DIR + "top.txt", true); +fetchSameOrigin(host_info.HTTP_ORIGIN + "/fetch/api/resources/top.txt", true); +fetchSameOrigin(host_info.HTTPS_ORIGIN + "/fetch/api/resources/top.txt", false); +fetchSameOrigin(host_info.HTTP_REMOTE_ORIGIN + "/fetch/api/resources/top.txt", false); + +var redirPath = dirname(location.pathname) + RESOURCES_DIR + "redirect.py?location="; + +fetchSameOrigin(redirPath + RESOURCES_DIR + "top.txt", true); +fetchSameOrigin(redirPath + host_info.HTTP_ORIGIN + "/fetch/api/resources/top.txt", true); +fetchSameOrigin(redirPath + host_info.HTTPS_ORIGIN + "/fetch/api/resources/top.txt", false); +fetchSameOrigin(redirPath + host_info.HTTP_REMOTE_ORIGIN + "/fetch/api/resources/top.txt", false); diff --git a/test/wpt/tests/fetch/api/basic/referrer.any.js b/test/wpt/tests/fetch/api/basic/referrer.any.js new file mode 100644 index 0000000..85745e6 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/referrer.any.js @@ -0,0 +1,29 @@ +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function runTest(url, init, expectedReferrer, title) { + promise_test(function(test) { + url += (url.indexOf('?') !== -1 ? '&' : '?') + "headers=referer&cors"; + + return fetch(url , init).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.headers.get("x-request-referer"), expectedReferrer, "Request's referrer is correct"); + }); + }, title); +} + +var fetchedUrl = RESOURCES_DIR + "inspect-headers.py"; +var corsFetchedUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "inspect-headers.py"; +var redirectUrl = RESOURCES_DIR + "redirect.py?location=" ; +var corsRedirectUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "redirect.py?location="; + +runTest(fetchedUrl, { referrerPolicy: "origin-when-cross-origin"}, location.toString(), "origin-when-cross-origin policy on a same-origin URL"); +runTest(corsFetchedUrl, { referrerPolicy: "origin-when-cross-origin"}, get_host_info().HTTP_ORIGIN + "/", "origin-when-cross-origin policy on a cross-origin URL"); +runTest(redirectUrl + corsFetchedUrl, { referrerPolicy: "origin-when-cross-origin"}, get_host_info().HTTP_ORIGIN + "/", "origin-when-cross-origin policy on a cross-origin URL after same-origin redirection"); +runTest(corsRedirectUrl + fetchedUrl, { referrerPolicy: "origin-when-cross-origin"}, get_host_info().HTTP_ORIGIN + "/", "origin-when-cross-origin policy on a same-origin URL after cross-origin redirection"); + + +var referrerUrlWithCredentials = get_host_info().HTTP_ORIGIN.replace("http://", "http://username:password@"); +runTest(fetchedUrl, {referrer: referrerUrlWithCredentials}, get_host_info().HTTP_ORIGIN + "/", "Referrer with credentials should be stripped"); +var referrerUrlWithFragmentIdentifier = get_host_info().HTTP_ORIGIN + "#fragmentIdentifier"; +runTest(fetchedUrl, {referrer: referrerUrlWithFragmentIdentifier}, get_host_info().HTTP_ORIGIN + "/", "Referrer with fragment ID should be stripped"); diff --git a/test/wpt/tests/fetch/api/basic/request-forbidden-headers.any.js b/test/wpt/tests/fetch/api/basic/request-forbidden-headers.any.js new file mode 100644 index 0000000..511ce60 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/request-forbidden-headers.any.js @@ -0,0 +1,100 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +function requestForbiddenHeaders(desc, forbiddenHeaders) { + var url = RESOURCES_DIR + "inspect-headers.py"; + var requestInit = {"headers": forbiddenHeaders} + var urlParameters = "?headers=" + Object.keys(forbiddenHeaders).join("|"); + + promise_test(function(test){ + return fetch(url + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type , "basic", "Response's type is basic"); + for (var header in forbiddenHeaders) + assert_not_equals(resp.headers.get("x-request-" + header), forbiddenHeaders[header], header + " does not have the value we defined"); + }); + }, desc); +} + +function requestValidOverrideHeaders(desc, validHeaders) { + var url = RESOURCES_DIR + "inspect-headers.py"; + var requestInit = {"headers": validHeaders} + var urlParameters = "?headers=" + Object.keys(validHeaders).join("|"); + + promise_test(function(test){ + return fetch(url + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type , "basic", "Response's type is basic"); + for (var header in validHeaders) + assert_equals(resp.headers.get("x-request-" + header), validHeaders[header], header + "is not skipped for non-forbidden methods"); + }); + }, desc); +} + +requestForbiddenHeaders("Accept-Charset is a forbidden request header", {"Accept-Charset": "utf-8"}); +requestForbiddenHeaders("Accept-Encoding is a forbidden request header", {"Accept-Encoding": ""}); + +requestForbiddenHeaders("Access-Control-Request-Headers is a forbidden request header", {"Access-Control-Request-Headers": ""}); +requestForbiddenHeaders("Access-Control-Request-Method is a forbidden request header", {"Access-Control-Request-Method": ""}); +requestForbiddenHeaders( + 'Access-Control-Request-Private-Network is a forbidden request header', + {'Access-Control-Request-Private-Network': ''}); +requestForbiddenHeaders("Connection is a forbidden request header", {"Connection": "close"}); +requestForbiddenHeaders("Content-Length is a forbidden request header", {"Content-Length": "42"}); +requestForbiddenHeaders("Cookie is a forbidden request header", {"Cookie": "cookie=none"}); +requestForbiddenHeaders("Cookie2 is a forbidden request header", {"Cookie2": "cookie2=none"}); +requestForbiddenHeaders("Date is a forbidden request header", {"Date": "Wed, 04 May 1988 22:22:22 GMT"}); +requestForbiddenHeaders("DNT is a forbidden request header", {"DNT": "4"}); +requestForbiddenHeaders("Expect is a forbidden request header", {"Expect": "100-continue"}); +requestForbiddenHeaders("Host is a forbidden request header", {"Host": "http://wrong-host.com"}); +requestForbiddenHeaders("Keep-Alive is a forbidden request header", {"Keep-Alive": "timeout=15"}); +requestForbiddenHeaders("Origin is a forbidden request header", {"Origin": "http://wrong-origin.com"}); +requestForbiddenHeaders("Referer is a forbidden request header", {"Referer": "http://wrong-referer.com"}); +requestForbiddenHeaders("TE is a forbidden request header", {"TE": "trailers"}); +requestForbiddenHeaders("Trailer is a forbidden request header", {"Trailer": "Accept"}); +requestForbiddenHeaders("Transfer-Encoding is a forbidden request header", {"Transfer-Encoding": "chunked"}); +requestForbiddenHeaders("Upgrade is a forbidden request header", {"Upgrade": "HTTP/2.0"}); +requestForbiddenHeaders("Via is a forbidden request header", {"Via": "1.1 nowhere.com"}); +requestForbiddenHeaders("Proxy- is a forbidden request header", {"Proxy-": "value"}); +requestForbiddenHeaders("Proxy-Test is a forbidden request header", {"Proxy-Test": "value"}); +requestForbiddenHeaders("Sec- is a forbidden request header", {"Sec-": "value"}); +requestForbiddenHeaders("Sec-Test is a forbidden request header", {"Sec-Test": "value"}); + +let forbiddenMethods = [ + "TRACE", + "TRACK", + "CONNECT", + "trace", + "track", + "connect", + "trace,", + "GET,track ", + " connect", +]; + +let overrideHeaders = [ + "x-http-method-override", + "x-http-method", + "x-method-override", + "X-HTTP-METHOD-OVERRIDE", + "X-HTTP-METHOD", + "X-METHOD-OVERRIDE", +]; + +for (forbiddenMethod of forbiddenMethods) { + for (overrideHeader of overrideHeaders) { + requestForbiddenHeaders(`header ${overrideHeader} is forbidden to use value ${forbiddenMethod}`, {[overrideHeader]: forbiddenMethod}); + } +} + +let permittedValues = [ + "GETTRACE", + "GET", + "\",TRACE\",", +]; + +for (permittedValue of permittedValues) { + for (overrideHeader of overrideHeaders) { + requestValidOverrideHeaders(`header ${overrideHeader} is allowed to use value ${permittedValue}`, {[overrideHeader]: permittedValue}); + } +} diff --git a/test/wpt/tests/fetch/api/basic/request-head.any.js b/test/wpt/tests/fetch/api/basic/request-head.any.js new file mode 100644 index 0000000..e0b6afa --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/request-head.any.js @@ -0,0 +1,6 @@ +// META: global=window,worker + +promise_test(function(test) { + var requestInit = {"method": "HEAD", "body": "test"}; + return promise_rejects_js(test, TypeError, fetch(".", requestInit)); +}, "Fetch with HEAD with body"); diff --git a/test/wpt/tests/fetch/api/basic/request-headers-case.any.js b/test/wpt/tests/fetch/api/basic/request-headers-case.any.js new file mode 100644 index 0000000..4c10e71 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/request-headers-case.any.js @@ -0,0 +1,13 @@ +// META: global=window,worker + +promise_test(() => { + return fetch("/xhr/resources/echo-headers.py", {headers: [["THIS-is-A-test", 1], ["THIS-IS-A-TEST", 2]] }).then(res => res.text()).then(body => { + assert_regexp_match(body, /THIS-is-A-test: 1, 2/) + }) +}, "Multiple headers with the same name, different case (THIS-is-A-test first)") + +promise_test(() => { + return fetch("/xhr/resources/echo-headers.py", {headers: [["THIS-IS-A-TEST", 1], ["THIS-is-A-test", 2]] }).then(res => res.text()).then(body => { + assert_regexp_match(body, /THIS-IS-A-TEST: 1, 2/) + }) +}, "Multiple headers with the same name, different case (THIS-IS-A-TEST first)") diff --git a/test/wpt/tests/fetch/api/basic/request-headers-nonascii.any.js b/test/wpt/tests/fetch/api/basic/request-headers-nonascii.any.js new file mode 100644 index 0000000..4a9a801 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/request-headers-nonascii.any.js @@ -0,0 +1,29 @@ +// META: global=window,worker + +// This tests characters that are not +// https://infra.spec.whatwg.org/#ascii-code-point +// but are still +// https://infra.spec.whatwg.org/#byte-value +// in request header values. +// Such request header values are valid and thus sent to servers. +// Characters outside the #byte-value range are tested e.g. in +// fetch/api/headers/headers-errors.html. + +promise_test(() => { + return fetch( + "../resources/inspect-headers.py?headers=accept|x-test", + {headers: { + "Accept": "before-æøå-after", + "X-Test": "before-ß-after" + }}) + .then(res => { + assert_equals( + res.headers.get("x-request-accept"), + "before-æøå-after", + "Accept Header"); + assert_equals( + res.headers.get("x-request-x-test"), + "before-ß-after", + "X-Test Header"); + }); +}, "Non-ascii bytes in request headers"); diff --git a/test/wpt/tests/fetch/api/basic/request-headers.any.js b/test/wpt/tests/fetch/api/basic/request-headers.any.js new file mode 100644 index 0000000..ac54256 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/request-headers.any.js @@ -0,0 +1,82 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +function checkContentType(contentType, body) +{ + if (self.FormData && body instanceof self.FormData) { + assert_true(contentType.startsWith("multipart/form-data; boundary="), "Request should have header content-type starting with multipart/form-data; boundary=, but got " + contentType); + return; + } + + var expectedContentType = "text/plain;charset=UTF-8"; + if(body === null || body instanceof ArrayBuffer || body.buffer instanceof ArrayBuffer) + expectedContentType = null; + else if (body instanceof Blob) + expectedContentType = body.type ? body.type : null; + else if (body instanceof URLSearchParams) + expectedContentType = "application/x-www-form-urlencoded;charset=UTF-8"; + + assert_equals(contentType , expectedContentType, "Request should have header content-type: " + expectedContentType); +} + +function requestHeaders(desc, url, method, body, expectedOrigin, expectedContentLength) { + var urlParameters = "?headers=origin|user-agent|accept-charset|content-length|content-type"; + var requestInit = {"method": method} + promise_test(function(test){ + if (typeof body === "function") + body = body(); + if (body) + requestInit["body"] = body; + return fetch(url + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type , "basic", "Response's type is basic"); + assert_true(resp.headers.has("x-request-user-agent"), "Request has header user-agent"); + assert_false(resp.headers.has("accept-charset"), "Request has header accept-charset"); + assert_equals(resp.headers.get("x-request-origin") , expectedOrigin, "Request should have header origin: " + expectedOrigin); + if (expectedContentLength !== undefined) + assert_equals(resp.headers.get("x-request-content-length") , expectedContentLength, "Request should have header content-length: " + expectedContentLength); + checkContentType(resp.headers.get("x-request-content-type"), body); + }); + }, desc); +} + +var url = RESOURCES_DIR + "inspect-headers.py" + +requestHeaders("Fetch with GET", url, "GET", null, null, null); +requestHeaders("Fetch with HEAD", url, "HEAD", null, null, null); +requestHeaders("Fetch with PUT without body", url, "POST", null, location.origin, "0"); +requestHeaders("Fetch with PUT with body", url, "PUT", "Request's body", location.origin, "14"); +requestHeaders("Fetch with POST without body", url, "POST", null, location.origin, "0"); +requestHeaders("Fetch with POST with text body", url, "POST", "Request's body", location.origin, "14"); +requestHeaders("Fetch with POST with FormData body", url, "POST", function() { return new FormData(); }, location.origin); +requestHeaders("Fetch with POST with URLSearchParams body", url, "POST", function() { return new URLSearchParams("name=value"); }, location.origin, "10"); +requestHeaders("Fetch with POST with Blob body", url, "POST", new Blob(["Test"]), location.origin, "4"); +requestHeaders("Fetch with POST with ArrayBuffer body", url, "POST", new ArrayBuffer(4), location.origin, "4"); +requestHeaders("Fetch with POST with Uint8Array body", url, "POST", new Uint8Array(4), location.origin, "4"); +requestHeaders("Fetch with POST with Int8Array body", url, "POST", new Int8Array(4), location.origin, "4"); +requestHeaders("Fetch with POST with Float32Array body", url, "POST", new Float32Array(1), location.origin, "4"); +requestHeaders("Fetch with POST with Float64Array body", url, "POST", new Float64Array(1), location.origin, "8"); +requestHeaders("Fetch with POST with DataView body", url, "POST", new DataView(new ArrayBuffer(8), 0, 4), location.origin, "4"); +requestHeaders("Fetch with POST with Blob body with mime type", url, "POST", new Blob(["Test"], { type: "text/maybe" }), location.origin, "4"); +requestHeaders("Fetch with Chicken", url, "Chicken", null, location.origin, null); +requestHeaders("Fetch with Chicken with body", url, "Chicken", "Request's body", location.origin, "14"); + +function requestOriginHeader(method, mode, needsOrigin) { + promise_test(function(test){ + return fetch(url + "?headers=origin", {method:method, mode:mode}).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type , "basic", "Response's type is basic"); + if(needsOrigin) + assert_equals(resp.headers.get("x-request-origin") , location.origin, "Request should have an Origin header with origin: " + location.origin); + else + assert_equals(resp.headers.get("x-request-origin"), null, "Request should not have an Origin header") + }); + }, "Fetch with " + method + " and mode \"" + mode + "\" " + (needsOrigin ? "needs" : "does not need") + " an Origin header"); +} + +requestOriginHeader("GET", "cors", false); +requestOriginHeader("POST", "same-origin", true); +requestOriginHeader("POST", "no-cors", true); +requestOriginHeader("PUT", "same-origin", true); +requestOriginHeader("TacO", "same-origin", true); +requestOriginHeader("TacO", "cors", true); diff --git a/test/wpt/tests/fetch/api/basic/request-referrer-redirected-worker.html b/test/wpt/tests/fetch/api/basic/request-referrer-redirected-worker.html new file mode 100644 index 0000000..bdea1e1 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/request-referrer-redirected-worker.html @@ -0,0 +1,17 @@ + + + + + Fetch in worker: referrer header + + + + + + + diff --git a/test/wpt/tests/fetch/api/basic/request-referrer.any.js b/test/wpt/tests/fetch/api/basic/request-referrer.any.js new file mode 100644 index 0000000..0c33576 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/request-referrer.any.js @@ -0,0 +1,24 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +function testReferrer(referrer, expected, desc) { + promise_test(function(test) { + var url = RESOURCES_DIR + "inspect-headers.py?headers=referer" + var req = new Request(url, { referrer: referrer }); + return fetch(req).then(function(resp) { + var actual = resp.headers.get("x-request-referer"); + if (expected) { + assert_equals(actual, expected, "request's referer should be: " + expected); + return; + } + if (actual) { + assert_equals(actual, "", "request's referer should be empty"); + } + }); + }, desc); +} + +testReferrer("about:client", self.location.href, 'about:client referrer'); + +var fooURL = new URL("./foo", self.location).href; +testReferrer(fooURL, fooURL, 'url referrer'); diff --git a/test/wpt/tests/fetch/api/basic/request-upload.any.js b/test/wpt/tests/fetch/api/basic/request-upload.any.js new file mode 100644 index 0000000..9168aa1 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/request-upload.any.js @@ -0,0 +1,135 @@ +// META: global=window,worker +// META: script=../resources/utils.js +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js + +function testUpload(desc, url, method, createBody, expectedBody) { + const requestInit = {method}; + promise_test(function(test){ + const body = createBody(); + if (body) { + requestInit["body"] = body; + requestInit.duplex = "half"; + } + return fetch(url, requestInit).then(function(resp) { + return resp.text().then((text)=> { + assert_equals(text, expectedBody); + }); + }); + }, desc); +} + +function testUploadFailure(desc, url, method, createBody) { + const requestInit = {method}; + promise_test(t => { + const body = createBody(); + if (body) { + requestInit["body"] = body; + } + return promise_rejects_js(t, TypeError, fetch(url, requestInit)); + }, desc); +} + +const url = RESOURCES_DIR + "echo-content.py" + +testUpload("Fetch with PUT with body", url, + "PUT", + () => "Request's body", + "Request's body"); +testUpload("Fetch with POST with text body", url, + "POST", + () => "Request's body", + "Request's body"); +testUpload("Fetch with POST with URLSearchParams body", url, + "POST", + () => new URLSearchParams("name=value"), + "name=value"); +testUpload("Fetch with POST with Blob body", url, + "POST", + () => new Blob(["Test"]), + "Test"); +testUpload("Fetch with POST with ArrayBuffer body", url, + "POST", + () => new ArrayBuffer(4), + "\0\0\0\0"); +testUpload("Fetch with POST with Uint8Array body", url, + "POST", + () => new Uint8Array(4), + "\0\0\0\0"); +testUpload("Fetch with POST with Int8Array body", url, + "POST", + () => new Int8Array(4), + "\0\0\0\0"); +testUpload("Fetch with POST with Float32Array body", url, + "POST", + () => new Float32Array(1), + "\0\0\0\0"); +testUpload("Fetch with POST with Float64Array body", url, + "POST", + () => new Float64Array(1), + "\0\0\0\0\0\0\0\0"); +testUpload("Fetch with POST with DataView body", url, + "POST", + () => new DataView(new ArrayBuffer(8), 0, 4), + "\0\0\0\0"); +testUpload("Fetch with POST with Blob body with mime type", url, + "POST", + () => new Blob(["Test"], { type: "text/maybe" }), + "Test"); + +testUploadFailure("Fetch with POST with ReadableStream containing String", url, + "POST", + () => { + return new ReadableStream({start: controller => { + controller.enqueue("Test"); + controller.close(); + }}) + }); +testUploadFailure("Fetch with POST with ReadableStream containing null", url, + "POST", + () => { + return new ReadableStream({start: controller => { + controller.enqueue(null); + controller.close(); + }}) + }); +testUploadFailure("Fetch with POST with ReadableStream containing number", url, + "POST", + () => { + return new ReadableStream({start: controller => { + controller.enqueue(99); + controller.close(); + }}) + }); +testUploadFailure("Fetch with POST with ReadableStream containing ArrayBuffer", url, + "POST", + () => { + return new ReadableStream({start: controller => { + controller.enqueue(new ArrayBuffer()); + controller.close(); + }}) + }); +testUploadFailure("Fetch with POST with ReadableStream containing Blob", url, + "POST", + () => { + return new ReadableStream({start: controller => { + controller.enqueue(new Blob()); + controller.close(); + }}) + }); + +promise_test(async (test) => { + const resp = await fetch( + "/fetch/connection-pool/resources/network-partition-key.py?" + + `status=421&uuid=${token()}&partition_id=${get_host_info().ORIGIN}` + + `&dispatch=check_partition&addcounter=true`, + {method: "POST", body: "foobar"}); + assert_equals(resp.status, 421); + const text = await resp.text(); + assert_equals(text, "ok. Request was sent 2 times. 2 connections were created."); +}, "Fetch with POST with text body on 421 response should be retried once on new connection."); + +promise_test(async (test) => { + const body = new ReadableStream({start: c => c.close()}); + await promise_rejects_js(test, TypeError, fetch('/', {method: 'POST', body})); +}, "Streaming upload shouldn't work on Http/1.1."); diff --git a/test/wpt/tests/fetch/api/basic/request-upload.h2.any.js b/test/wpt/tests/fetch/api/basic/request-upload.h2.any.js new file mode 100644 index 0000000..eedc2bf --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/request-upload.h2.any.js @@ -0,0 +1,186 @@ +// META: global=window,worker +// META: script=../resources/utils.js +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js + +const duplex = "half"; + +async function assertUpload(url, method, createBody, expectedBody) { + const requestInit = {method}; + const body = createBody(); + if (body) { + requestInit["body"] = body; + requestInit.duplex = "half"; + } + const resp = await fetch(url, requestInit); + const text = await resp.text(); + assert_equals(text, expectedBody); +} + +function testUpload(desc, url, method, createBody, expectedBody) { + promise_test(async () => { + await assertUpload(url, method, createBody, expectedBody); + }, desc); +} + +function createStream(chunks) { + return new ReadableStream({ + start: (controller) => { + for (const chunk of chunks) { + controller.enqueue(chunk); + } + controller.close(); + } + }); +} + +const url = RESOURCES_DIR + "echo-content.h2.py" + +testUpload("Fetch with POST with empty ReadableStream", url, + "POST", + () => { + return new ReadableStream({start: controller => { + controller.close(); + }}) + }, + ""); + +testUpload("Fetch with POST with ReadableStream", url, + "POST", + () => { + return new ReadableStream({start: controller => { + const encoder = new TextEncoder(); + controller.enqueue(encoder.encode("Test")); + controller.close(); + }}) + }, + "Test"); + +promise_test(async (test) => { + const body = new ReadableStream({start: controller => { + const encoder = new TextEncoder(); + controller.enqueue(encoder.encode("Test")); + controller.close(); + }}); + const resp = await fetch( + "/fetch/connection-pool/resources/network-partition-key.py?" + + `status=421&uuid=${token()}&partition_id=${self.origin}` + + `&dispatch=check_partition&addcounter=true`, + {method: "POST", body: body, duplex}); + assert_equals(resp.status, 421); + const text = await resp.text(); + assert_equals(text, "ok. Request was sent 1 times. 1 connections were created."); +}, "Fetch with POST with ReadableStream on 421 response should return the response and not retry."); + +promise_test(async (test) => { + const request = new Request('', { + body: new ReadableStream(), + method: 'POST', + duplex, + }); + + assert_equals(request.headers.get('Content-Type'), null, `Request should not have a content-type set`); + + const response = await fetch('data:a/a;charset=utf-8,test', { + method: 'POST', + body: new ReadableStream(), + duplex, + }); + + assert_equals(await response.text(), 'test', `Response has correct body`); +}, "Feature detect for POST with ReadableStream"); + +promise_test(async (test) => { + const request = new Request('data:a/a;charset=utf-8,test', { + body: new ReadableStream(), + method: 'POST', + duplex, + }); + + assert_equals(request.headers.get('Content-Type'), null, `Request should not have a content-type set`); + const response = await fetch(request); + assert_equals(await response.text(), 'test', `Response has correct body`); +}, "Feature detect for POST with ReadableStream, using request object"); + +test(() => { + let duplexAccessed = false; + + const request = new Request("", { + body: new ReadableStream(), + method: "POST", + get duplex() { + duplexAccessed = true; + return "half"; + }, + }); + + assert_equals( + request.headers.get("Content-Type"), + null, + `Request should not have a content-type set` + ); + assert_true(duplexAccessed, `duplex dictionary property should be accessed`); +}, "Synchronous feature detect"); + +// The asserts the synchronousFeatureDetect isn't broken by a partial implementation. +// An earlier feature detect was broken by Safari implementing streaming bodies as part of Request, +// but it failed when passed to fetch(). +// This tests ensures that UAs must not implement RequestInit.duplex and streaming request bodies without also implementing the fetch() parts. +promise_test(async () => { + let duplexAccessed = false; + + const request = new Request("", { + body: new ReadableStream(), + method: "POST", + get duplex() { + duplexAccessed = true; + return "half"; + }, + }); + + const supported = + request.headers.get("Content-Type") === null && duplexAccessed; + + // If the feature detect fails, assume the browser is being truthful (other tests pick up broken cases here) + if (!supported) return false; + + await assertUpload( + url, + "POST", + () => + new ReadableStream({ + start: (controller) => { + const encoder = new TextEncoder(); + controller.enqueue(encoder.encode("Test")); + controller.close(); + }, + }), + "Test" + ); +}, "Synchronous feature detect fails if feature unsupported"); + +promise_test(async (t) => { + const body = createStream(["hello"]); + const method = "POST"; + await promise_rejects_js(t, TypeError, fetch(url, { method, body, duplex })); +}, "Streaming upload with body containing a String"); + +promise_test(async (t) => { + const body = createStream([null]); + const method = "POST"; + await promise_rejects_js(t, TypeError, fetch(url, { method, body, duplex })); +}, "Streaming upload with body containing null"); + +promise_test(async (t) => { + const body = createStream([33]); + const method = "POST"; + await promise_rejects_js(t, TypeError, fetch(url, { method, body, duplex })); +}, "Streaming upload with body containing a number"); + +promise_test(async (t) => { + const url = "/fetch/api/resources/authentication.py?realm=test"; + const body = createStream([]); + const method = "POST"; + await promise_rejects_js(t, TypeError, fetch(url, { method, body, duplex })); +}, "Streaming upload should fail on a 401 response"); + diff --git a/test/wpt/tests/fetch/api/basic/response-null-body.any.js b/test/wpt/tests/fetch/api/basic/response-null-body.any.js new file mode 100644 index 0000000..bb05892 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/response-null-body.any.js @@ -0,0 +1,38 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +const nullBodyStatus = [204, 205, 304]; +const methods = ["GET", "POST", "OPTIONS"]; + +for (const status of nullBodyStatus) { + for (const method of methods) { + promise_test( + async () => { + const url = + `${RESOURCES_DIR}status.py?code=${status}&content=hello-world`; + const resp = await fetch(url, { method }); + assert_equals(resp.status, status); + assert_equals(resp.body, null, "the body should be null"); + const text = await resp.text(); + assert_equals(text, "", "null bodies result in empty text"); + }, + `Response.body is null for responses with status=${status} (method=${method})`, + ); + } +} + +promise_test(async () => { + const url = `${RESOURCES_DIR}status.py?code=200&content=hello-world`; + const resp = await fetch(url, { method: "HEAD" }); + assert_equals(resp.status, 200); + assert_equals(resp.body, null, "the body should be null"); + const text = await resp.text(); + assert_equals(text, "", "null bodies result in empty text"); +}, `Response.body is null for responses with method=HEAD`); + +promise_test(async (t) => { + const integrity = "sha384-UT6f7WCFp32YJnp1is4l/ZYnOeQKpE8xjmdkLOwZ3nIP+tmT2aMRFQGJomjVf5cE"; + const url = `${RESOURCES_DIR}status.py?code=204&content=hello-world`; + const promise = fetch(url, { method: "GET", integrity }); + promise_rejects_js(t, TypeError, promise); +}, "Null body status with subresource integrity should abort"); diff --git a/test/wpt/tests/fetch/api/basic/response-url.sub.any.js b/test/wpt/tests/fetch/api/basic/response-url.sub.any.js new file mode 100644 index 0000000..0d123c4 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/response-url.sub.any.js @@ -0,0 +1,16 @@ +function checkResponseURL(fetchedURL, expectedURL) +{ + promise_test(function() { + return fetch(fetchedURL).then(function(response) { + assert_equals(response.url, expectedURL); + }); + }, "Testing response url getter with " +fetchedURL); +} + +var baseURL = "http://{{host}}:{{ports[http][0]}}"; +checkResponseURL(baseURL + "/ada", baseURL + "/ada"); +checkResponseURL(baseURL + "/#", baseURL + "/"); +checkResponseURL(baseURL + "/#ada", baseURL + "/"); +checkResponseURL(baseURL + "#ada", baseURL + "/"); + +done(); diff --git a/test/wpt/tests/fetch/api/basic/scheme-about.any.js b/test/wpt/tests/fetch/api/basic/scheme-about.any.js new file mode 100644 index 0000000..9ef4418 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/scheme-about.any.js @@ -0,0 +1,26 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +function checkNetworkError(url, method) { + method = method || "GET"; + const desc = "Fetching " + url.substring(0, 45) + " with method " + method + " is KO" + promise_test(function(test) { + var promise = fetch(url, { method: method }); + return promise_rejects_js(test, TypeError, promise); + }, desc); +} + +checkNetworkError("about:blank", "GET"); +checkNetworkError("about:blank", "PUT"); +checkNetworkError("about:blank", "POST"); +checkNetworkError("about:invalid.com"); +checkNetworkError("about:config"); +checkNetworkError("about:unicorn"); + +promise_test(function(test) { + var promise = fetch("about:blank", { + "method": "GET", + "Range": "bytes=1-10" + }); + return promise_rejects_js(test, TypeError, promise); +}, "Fetching about:blank with range header does not affect behavior"); diff --git a/test/wpt/tests/fetch/api/basic/scheme-blob.sub.any.js b/test/wpt/tests/fetch/api/basic/scheme-blob.sub.any.js new file mode 100644 index 0000000..8afdc03 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/scheme-blob.sub.any.js @@ -0,0 +1,125 @@ +// META: script=../resources/utils.js + +function checkFetchResponse(url, data, mime, size, desc) { + promise_test(function(test) { + size = size.toString(); + return fetch(url).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type, "basic", "response type is basic"); + assert_equals(resp.headers.get("Content-Type"), mime, "Content-Type is " + resp.headers.get("Content-Type")); + assert_equals(resp.headers.get("Content-Length"), size, "Content-Length is " + resp.headers.get("Content-Length")); + return resp.text(); + }).then(function(bodyAsText) { + assert_equals(bodyAsText, data, "Response's body is " + data); + }); + }, desc); +} + +var blob = new Blob(["Blob's data"], { "type" : "text/plain" }); +checkFetchResponse(URL.createObjectURL(blob), "Blob's data", "text/plain", blob.size, + "Fetching [GET] URL.createObjectURL(blob) is OK"); + +function checkKoUrl(url, method, desc) { + promise_test(function(test) { + var promise = fetch(url, {"method": method}); + return promise_rejects_js(test, TypeError, promise); + }, desc); +} + +var blob2 = new Blob(["Blob's data"], { "type" : "text/plain" }); +checkKoUrl("blob:http://{{domains[www]}}:{{ports[http][0]}}/", "GET", + "Fetching [GET] blob:http://{{domains[www]}}:{{ports[http][0]}}/ is KO"); + +var invalidRequestMethods = [ + "POST", + "OPTIONS", + "HEAD", + "PUT", + "DELETE", + "INVALID", +]; +invalidRequestMethods.forEach(function(method) { + checkKoUrl(URL.createObjectURL(blob2), method, "Fetching [" + method + "] URL.createObjectURL(blob) is KO"); +}); + +checkKoUrl("blob:not-backed-by-a-blob/", "GET", + "Fetching [GET] blob:not-backed-by-a-blob/ is KO"); + +let empty_blob = new Blob([]); +checkFetchResponse(URL.createObjectURL(empty_blob), "", "", 0, + "Fetching URL.createObjectURL(empty_blob) is OK"); + +let empty_type_blob = new Blob([], {type: ""}); +checkFetchResponse(URL.createObjectURL(empty_type_blob), "", "", 0, + "Fetching URL.createObjectURL(empty_type_blob) is OK"); + +let empty_data_blob = new Blob([], {type: "text/plain"}); +checkFetchResponse(URL.createObjectURL(empty_data_blob), "", "text/plain", 0, + "Fetching URL.createObjectURL(empty_data_blob) is OK"); + +let invalid_type_blob = new Blob([], {type: "invalid"}); +checkFetchResponse(URL.createObjectURL(invalid_type_blob), "", "", 0, + "Fetching URL.createObjectURL(invalid_type_blob) is OK"); + +promise_test(function(test) { + return fetch("/images/blue.png").then(function(resp) { + return resp.arrayBuffer(); + }).then(function(image_buffer) { + let blob = new Blob([image_buffer]); + return fetch(URL.createObjectURL(blob)).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type, "basic", "response type is basic"); + assert_equals(resp.headers.get("Content-Type"), "", "Content-Type is " + resp.headers.get("Content-Type")); + }) + }); +}, "Blob content is not sniffed for a content type [image/png]"); + +let simple_xml_string = ''; +let xml_blob_no_type = new Blob([simple_xml_string]); +checkFetchResponse(URL.createObjectURL(xml_blob_no_type), simple_xml_string, "", 45, + "Blob content is not sniffed for a content type [text/xml]"); + +let simple_text_string = 'Hello, World!'; +promise_test(function(test) { + let blob = new Blob([simple_text_string], {"type": "text/plain"}); + let slice = blob.slice(7, simple_text_string.length, "\0"); + return fetch(URL.createObjectURL(slice)).then(function (resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type, "basic", "response type is basic"); + assert_equals(resp.headers.get("Content-Type"), ""); + assert_equals(resp.headers.get("Content-Length"), "6"); + return resp.text(); + }).then(function(bodyAsText) { + assert_equals(bodyAsText, "World!"); + }); +}, "Set content type to the empty string for slice with invalid content type"); + +promise_test(function(test) { + let blob = new Blob([simple_text_string], {"type": "text/plain"}); + let slice = blob.slice(7, simple_text_string.length, "\0"); + return fetch(URL.createObjectURL(slice)).then(function (resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type, "basic", "response type is basic"); + assert_equals(resp.headers.get("Content-Type"), ""); + assert_equals(resp.headers.get("Content-Length"), "6"); + return resp.text(); + }).then(function(bodyAsText) { + assert_equals(bodyAsText, "World!"); + }); +}, "Set content type to the empty string for slice with no content type "); + +promise_test(function(test) { + let blob = new Blob([simple_xml_string]); + let slice = blob.slice(0, 38); + return fetch(URL.createObjectURL(slice)).then(function (resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type, "basic", "response type is basic"); + assert_equals(resp.headers.get("Content-Type"), ""); + assert_equals(resp.headers.get("Content-Length"), "38"); + return resp.text(); + }).then(function(bodyAsText) { + assert_equals(bodyAsText, ''); + }); +}, "Blob.slice should not sniff the content for a content type"); + +done(); diff --git a/test/wpt/tests/fetch/api/basic/scheme-data.any.js b/test/wpt/tests/fetch/api/basic/scheme-data.any.js new file mode 100644 index 0000000..55df43b --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/scheme-data.any.js @@ -0,0 +1,43 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +function checkFetchResponse(url, data, mime, fetchMode, method) { + var cut = (url.length >= 40) ? "[...]" : ""; + var desc = "Fetching " + (method ? "[" + method + "] " : "") + url.substring(0, 40) + cut + " is OK"; + var init = {"method": method || "GET"}; + if (fetchMode) { + init.mode = fetchMode; + desc += " (" + fetchMode + ")"; + } + promise_test(function(test) { + return fetch(url, init).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.statusText, "OK", "HTTP statusText is OK"); + assert_equals(resp.type, "basic", "response type is basic"); + assert_equals(resp.headers.get("Content-Type"), mime, "Content-Type is " + resp.headers.get("Content-Type")); + return resp.text(); + }).then(function(body) { + assert_equals(body, data, "Response's body is correct"); + }); + }, desc); +} + +checkFetchResponse("data:,response%27s%20body", "response's body", "text/plain;charset=US-ASCII"); +checkFetchResponse("data:,response%27s%20body", "response's body", "text/plain;charset=US-ASCII", "same-origin"); +checkFetchResponse("data:,response%27s%20body", "response's body", "text/plain;charset=US-ASCII", "cors"); +checkFetchResponse("data:text/plain;base64,cmVzcG9uc2UncyBib2R5", "response's body", "text/plain"); +checkFetchResponse("data:image/png;base64,cmVzcG9uc2UncyBib2R5", + "response's body", + "image/png"); +checkFetchResponse("data:,response%27s%20body", "response's body", "text/plain;charset=US-ASCII", null, "POST"); +checkFetchResponse("data:,response%27s%20body", "", "text/plain;charset=US-ASCII", null, "HEAD"); + +function checkKoUrl(url, method, desc) { + var cut = (url.length >= 40) ? "[...]" : ""; + desc = "Fetching [" + method + "] " + url.substring(0, 45) + cut + " is KO" + promise_test(function(test) { + return promise_rejects_js(test, TypeError, fetch(url, {"method": method})); + }, desc); +} + +checkKoUrl("data:notAdataUrl.com", "GET"); diff --git a/test/wpt/tests/fetch/api/basic/scheme-others.sub.any.js b/test/wpt/tests/fetch/api/basic/scheme-others.sub.any.js new file mode 100644 index 0000000..550f69c --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/scheme-others.sub.any.js @@ -0,0 +1,31 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +function checkKoUrl(url, desc) { + if (!desc) + desc = "Fetching " + url.substring(0, 45) + " is KO" + promise_test(function(test) { + var promise = fetch(url); + return promise_rejects_js(test, TypeError, promise); + }, desc); +} + +var urlWithoutScheme = "://{{host}}:{{ports[http][0]}}/"; +checkKoUrl("aaa" + urlWithoutScheme); +checkKoUrl("cap" + urlWithoutScheme); +checkKoUrl("cid" + urlWithoutScheme); +checkKoUrl("dav" + urlWithoutScheme); +checkKoUrl("dict" + urlWithoutScheme); +checkKoUrl("dns" + urlWithoutScheme); +checkKoUrl("geo" + urlWithoutScheme); +checkKoUrl("im" + urlWithoutScheme); +checkKoUrl("imap" + urlWithoutScheme); +checkKoUrl("ipp" + urlWithoutScheme); +checkKoUrl("ldap" + urlWithoutScheme); +checkKoUrl("mailto" + urlWithoutScheme); +checkKoUrl("nfs" + urlWithoutScheme); +checkKoUrl("pop" + urlWithoutScheme); +checkKoUrl("rtsp" + urlWithoutScheme); +checkKoUrl("snmp" + urlWithoutScheme); + +done(); diff --git a/test/wpt/tests/fetch/api/basic/status.h2.any.js b/test/wpt/tests/fetch/api/basic/status.h2.any.js new file mode 100644 index 0000000..99fec88 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/status.h2.any.js @@ -0,0 +1,17 @@ +// See also /xhr/status.h2.window.js + +[ + 200, + 210, + 400, + 404, + 410, + 500, + 502 +].forEach(status => { + promise_test(async t => { + const response = await fetch("/xhr/resources/status.py?code=" + status); + assert_equals(response.status, status, "status should be " + status); + assert_equals(response.statusText, "", "statusText should be the empty string"); + }, "statusText over H2 for status " + status + " should be the empty string"); +}); diff --git a/test/wpt/tests/fetch/api/basic/stream-response.any.js b/test/wpt/tests/fetch/api/basic/stream-response.any.js new file mode 100644 index 0000000..d964dda --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/stream-response.any.js @@ -0,0 +1,40 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +function streamBody(reader, test, count = 0) { + return reader.read().then(function(data) { + if (!data.done && count < 2) { + count += 1; + return streamBody(reader, test, count); + } else { + test.step(function() { + assert_true(count >= 2, "Retrieve body progressively"); + }); + } + }); +} + +//simulate streaming: +//count is large enough to let the UA deliver the body before it is completely retrieved +promise_test(function(test) { + return fetch(RESOURCES_DIR + "trickle.py?ms=30&count=100").then(function(resp) { + if (resp.body) + return streamBody(resp.body.getReader(), test); + else + test.step(function() { + assert_unreached( "Body does not exist in response"); + }); + }); +}, "Stream response's body when content-type is present"); + +// This test makes sure that the response body is not buffered if no content type is provided. +promise_test(function(test) { + return fetch(RESOURCES_DIR + "trickle.py?ms=300&count=10¬ype=true").then(function(resp) { + if (resp.body) + return streamBody(resp.body.getReader(), test); + else + test.step(function() { + assert_unreached( "Body does not exist in response"); + }); + }); +}, "Stream response's body when content-type is not present"); diff --git a/test/wpt/tests/fetch/api/basic/stream-safe-creation.any.js b/test/wpt/tests/fetch/api/basic/stream-safe-creation.any.js new file mode 100644 index 0000000..382efc1 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/stream-safe-creation.any.js @@ -0,0 +1,54 @@ +// META: global=window,worker + +// These tests verify that stream creation is not affected by changes to +// Object.prototype. + +const creationCases = { + fetch: async () => fetch(location.href), + request: () => new Request(location.href, {method: 'POST', body: 'hi'}), + response: () => new Response('bye'), + consumeEmptyResponse: () => new Response().text(), + consumeNonEmptyResponse: () => new Response(new Uint8Array([64])).text(), + consumeEmptyRequest: () => new Request(location.href).text(), + consumeNonEmptyRequest: () => new Request(location.href, + {method: 'POST', body: 'yes'}).arrayBuffer(), +}; + +for (const creationCase of Object.keys(creationCases)) { + for (const accessorName of ['start', 'type', 'size', 'highWaterMark']) { + promise_test(async t => { + Object.defineProperty(Object.prototype, accessorName, { + get() { throw Error(`Object.prototype.${accessorName} was accessed`); }, + configurable: true + }); + t.add_cleanup(() => { + delete Object.prototype[accessorName]; + return Promise.resolve(); + }); + await creationCases[creationCase](); + }, `throwing Object.prototype.${accessorName} accessor should not affect ` + + `stream creation by '${creationCase}'`); + + promise_test(async t => { + // -1 is a convenient value which is invalid, and should cause the + // constructor to throw, for all four fields. + Object.prototype[accessorName] = -1; + t.add_cleanup(() => { + delete Object.prototype[accessorName]; + return Promise.resolve(); + }); + await creationCases[creationCase](); + }, `Object.prototype.${accessorName} accessor returning invalid value ` + + `should not affect stream creation by '${creationCase}'`); + } + + promise_test(async t => { + Object.prototype.start = controller => controller.error(new Error('start')); + t.add_cleanup(() => { + delete Object.prototype.start; + return Promise.resolve(); + }); + await creationCases[creationCase](); + }, `Object.prototype.start function which errors the stream should not ` + + `affect stream creation by '${creationCase}'`); +} diff --git a/test/wpt/tests/fetch/api/basic/text-utf8.any.js b/test/wpt/tests/fetch/api/basic/text-utf8.any.js new file mode 100644 index 0000000..05c8c88 --- /dev/null +++ b/test/wpt/tests/fetch/api/basic/text-utf8.any.js @@ -0,0 +1,74 @@ +// META: title=Fetch: Request and Response text() should decode as UTF-8 +// META: global=window,worker +// META: script=../resources/utils.js + +function testTextDecoding(body, expectedText, urlParameter, title) +{ + var arrayBuffer = stringToArray(body); + + promise_test(function(test) { + var request = new Request("", {method: "POST", body: arrayBuffer}); + return request.text().then(function(value) { + assert_equals(value, expectedText, "Request.text() should decode data as UTF-8"); + }); + }, title + " with Request.text()"); + + promise_test(function(test) { + var response = new Response(arrayBuffer); + return response.text().then(function(value) { + assert_equals(value, expectedText, "Response.text() should decode data as UTF-8"); + }); + }, title + " with Response.text()"); + + promise_test(function(test) { + return fetch("../resources/status.py?code=200&type=text%2Fplain%3Bcharset%3DUTF-8&content=" + urlParameter).then(function(response) { + return response.text().then(function(value) { + assert_equals(value, expectedText, "Fetched Response.text() should decode data as UTF-8"); + }); + }); + }, title + " with fetched data (UTF-8 charset)"); + + promise_test(function(test) { + return fetch("../resources/status.py?code=200&type=text%2Fplain%3Bcharset%3DUTF-16&content=" + urlParameter).then(function(response) { + return response.text().then(function(value) { + assert_equals(value, expectedText, "Fetched Response.text() should decode data as UTF-8"); + }); + }); + }, title + " with fetched data (UTF-16 charset)"); + + promise_test(function(test) { + return new Response(body).arrayBuffer().then(function(buffer) { + assert_array_equals(new Uint8Array(buffer), encode_utf8(body), "Response.arrayBuffer() should contain data encoded as UTF-8"); + }); + }, title + " (Response object)"); + + promise_test(function(test) { + return new Request("", {method: "POST", body: body}).arrayBuffer().then(function(buffer) { + assert_array_equals(new Uint8Array(buffer), encode_utf8(body), "Request.arrayBuffer() should contain data encoded as UTF-8"); + }); + }, title + " (Request object)"); + +} + +var utf8WithBOM = "\xef\xbb\xbf\xe4\xb8\x89\xe6\x9d\x91\xe3\x81\x8b\xe3\x81\xaa\xe5\xad\x90"; +var utf8WithBOMAsURLParameter = "%EF%BB%BF%E4%B8%89%E6%9D%91%E3%81%8B%E3%81%AA%E5%AD%90"; +var utf8WithoutBOM = "\xe4\xb8\x89\xe6\x9d\x91\xe3\x81\x8b\xe3\x81\xaa\xe5\xad\x90"; +var utf8WithoutBOMAsURLParameter = "%E4%B8%89%E6%9D%91%E3%81%8B%E3%81%AA%E5%AD%90"; +var utf8Decoded = "三æ‘ã‹ãªå­"; +testTextDecoding(utf8WithBOM, utf8Decoded, utf8WithBOMAsURLParameter, "UTF-8 with BOM"); +testTextDecoding(utf8WithoutBOM, utf8Decoded, utf8WithoutBOMAsURLParameter, "UTF-8 without BOM"); + +var utf16BEWithBOM = "\xfe\xff\x4e\x09\x67\x51\x30\x4b\x30\x6a\x5b\x50"; +var utf16BEWithBOMAsURLParameter = "%fe%ff%4e%09%67%51%30%4b%30%6a%5b%50"; +var utf16BEWithBOMDecodedAsUTF8 = "��N\tgQ0K0j[P"; +testTextDecoding(utf16BEWithBOM, utf16BEWithBOMDecodedAsUTF8, utf16BEWithBOMAsURLParameter, "UTF-16BE with BOM decoded as UTF-8"); + +var utf16LEWithBOM = "\xff\xfe\x09\x4e\x51\x67\x4b\x30\x6a\x30\x50\x5b"; +var utf16LEWithBOMAsURLParameter = "%ff%fe%09%4e%51%67%4b%30%6a%30%50%5b"; +var utf16LEWithBOMDecodedAsUTF8 = "��\tNQgK0j0P["; +testTextDecoding(utf16LEWithBOM, utf16LEWithBOMDecodedAsUTF8, utf16LEWithBOMAsURLParameter, "UTF-16LE with BOM decoded as UTF-8"); + +var utf16WithoutBOM = "\xe6\x00\xf8\x00\xe5\x00\x0a\x00\xc6\x30\xb9\x30\xc8\x30\x0a\x00"; +var utf16WithoutBOMAsURLParameter = "%E6%00%F8%00%E5%00%0A%00%C6%30%B9%30%C8%30%0A%00"; +var utf16WithoutBOMDecoded = "\ufffd\u0000\ufffd\u0000\ufffd\u0000\u000a\u0000\ufffd\u0030\ufffd\u0030\ufffd\u0030\u000a\u0000"; +testTextDecoding(utf16WithoutBOM, utf16WithoutBOMDecoded, utf16WithoutBOMAsURLParameter, "UTF-16 without BOM decoded as UTF-8"); diff --git a/test/wpt/tests/fetch/api/body/cloned-any.js b/test/wpt/tests/fetch/api/body/cloned-any.js new file mode 100644 index 0000000..2bca96c --- /dev/null +++ b/test/wpt/tests/fetch/api/body/cloned-any.js @@ -0,0 +1,50 @@ +// Changing the body after it have been passed to Response/Request +// should not change the outcome of the consumed body + +const url = 'http://a'; +const method = 'post'; + +promise_test(async t => { + const body = new FormData(); + body.set('a', '1'); + const res = new Response(body); + const req = new Request(url, { method, body }); + body.set('a', '2'); + assert_true((await res.formData()).get('a') === '1'); + assert_true((await req.formData()).get('a') === '1'); +}, 'FormData is cloned'); + +promise_test(async t => { + const body = new URLSearchParams({a: '1'}); + const res = new Response(body); + const req = new Request(url, { method, body }); + body.set('a', '2'); + assert_true((await res.formData()).get('a') === '1'); + assert_true((await req.formData()).get('a') === '1'); +}, 'URLSearchParams is cloned'); + +promise_test(async t => { + const body = new Uint8Array([97]); // a + const res = new Response(body); + const req = new Request(url, { method, body }); + body[0] = 98; // b + assert_true(await res.text() === 'a'); + assert_true(await req.text() === 'a'); +}, 'TypedArray is cloned'); + +promise_test(async t => { + const body = new Uint8Array([97]); // a + const res = new Response(body.buffer); + const req = new Request(url, { method, body: body.buffer }); + body[0] = 98; // b + assert_true(await res.text() === 'a'); + assert_true(await req.text() === 'a'); +}, 'ArrayBuffer is cloned'); + +promise_test(async t => { + const body = new Blob(['a']); + const res = new Response(body); + const req = new Request(url, { method, body }); + assert_true(await res.blob() !== body); + assert_true(await req.blob() !== body); +}, 'Blob is cloned'); diff --git a/test/wpt/tests/fetch/api/body/formdata.any.js b/test/wpt/tests/fetch/api/body/formdata.any.js new file mode 100644 index 0000000..e250359 --- /dev/null +++ b/test/wpt/tests/fetch/api/body/formdata.any.js @@ -0,0 +1,14 @@ +promise_test(async t => { + const res = new Response(new FormData()); + const fd = await res.formData(); + assert_true(fd instanceof FormData); +}, 'Consume empty response.formData() as FormData'); + +promise_test(async t => { + const req = new Request('about:blank', { + method: 'POST', + body: new FormData() + }); + const fd = await req.formData(); + assert_true(fd instanceof FormData); +}, 'Consume empty request.formData() as FormData'); diff --git a/test/wpt/tests/fetch/api/body/mime-type.any.js b/test/wpt/tests/fetch/api/body/mime-type.any.js new file mode 100644 index 0000000..67c9af7 --- /dev/null +++ b/test/wpt/tests/fetch/api/body/mime-type.any.js @@ -0,0 +1,127 @@ +[ + () => new Request("about:blank", { headers: { "Content-Type": "text/plain" } }), + () => new Response("", { headers: { "Content-Type": "text/plain" } }) +].forEach(bodyContainerCreator => { + const bodyContainer = bodyContainerCreator(); + promise_test(async t => { + assert_equals(bodyContainer.headers.get("Content-Type"), "text/plain"); + const newMIMEType = "test/test"; + bodyContainer.headers.set("Content-Type", newMIMEType); + const blob = await bodyContainer.blob(); + assert_equals(blob.type, newMIMEType); + }, `${bodyContainer.constructor.name}: overriding explicit Content-Type`); +}); + +[ + () => new Request("about:blank", { body: new URLSearchParams(), method: "POST" }), + () => new Response(new URLSearchParams()), +].forEach(bodyContainerCreator => { + const bodyContainer = bodyContainerCreator(); + promise_test(async t => { + assert_equals(bodyContainer.headers.get("Content-Type"), "application/x-www-form-urlencoded;charset=UTF-8"); + bodyContainer.headers.delete("Content-Type"); + const blob = await bodyContainer.blob(); + assert_equals(blob.type, ""); + }, `${bodyContainer.constructor.name}: removing implicit Content-Type`); +}); + +[ + () => new Request("about:blank", { body: new ArrayBuffer(), method: "POST" }), + () => new Response(new ArrayBuffer()), +].forEach(bodyContainerCreator => { + const bodyContainer = bodyContainerCreator(); + promise_test(async t => { + assert_equals(bodyContainer.headers.get("Content-Type"), null); + const newMIMEType = "test/test"; + bodyContainer.headers.set("Content-Type", newMIMEType); + const blob = await bodyContainer.blob(); + assert_equals(blob.type, newMIMEType); + }, `${bodyContainer.constructor.name}: setting missing Content-Type`); +}); + +[ + () => new Request("about:blank", { method: "POST" }), + () => new Response(), +].forEach(bodyContainerCreator => { + const bodyContainer = bodyContainerCreator(); + promise_test(async t => { + const blob = await bodyContainer.blob(); + assert_equals(blob.type, ""); + }, `${bodyContainer.constructor.name}: MIME type for Blob from empty body`); +}); + +[ + () => new Request("about:blank", { method: "POST", headers: [["Content-Type", "Mytext/Plain"]] }), + () => new Response("", { headers: [["Content-Type", "Mytext/Plain"]] }) +].forEach(bodyContainerCreator => { + const bodyContainer = bodyContainerCreator(); + promise_test(async t => { + const blob = await bodyContainer.blob(); + assert_equals(blob.type, 'mytext/plain'); + }, `${bodyContainer.constructor.name}: MIME type for Blob from empty body with Content-Type`); +}); + +[ + () => new Request("about:blank", { body: new Blob([""]), method: "POST" }), + () => new Response(new Blob([""])) +].forEach(bodyContainerCreator => { + const bodyContainer = bodyContainerCreator(); + promise_test(async t => { + const blob = await bodyContainer.blob(); + assert_equals(blob.type, ""); + assert_equals(bodyContainer.headers.get("Content-Type"), null); + }, `${bodyContainer.constructor.name}: MIME type for Blob`); +}); + +[ + () => new Request("about:blank", { body: new Blob([""], { type: "Text/Plain" }), method: "POST" }), + () => new Response(new Blob([""], { type: "Text/Plain" })) +].forEach(bodyContainerCreator => { + const bodyContainer = bodyContainerCreator(); + promise_test(async t => { + const blob = await bodyContainer.blob(); + assert_equals(blob.type, "text/plain"); + assert_equals(bodyContainer.headers.get("Content-Type"), "text/plain"); + }, `${bodyContainer.constructor.name}: MIME type for Blob with non-empty type`); +}); + +[ + () => new Request("about:blank", { method: "POST", body: new Blob([""], { type: "Text/Plain" }), headers: [["Content-Type", "Text/Html"]] }), + () => new Response(new Blob([""], { type: "Text/Plain" }, { headers: [["Content-Type", "Text/Html"]] })) +].forEach(bodyContainerCreator => { + const bodyContainer = bodyContainerCreator(); + const cloned = bodyContainer.clone(); + promise_test(async t => { + const blobs = [await bodyContainer.blob(), await cloned.blob()]; + assert_equals(blobs[0].type, "text/html"); + assert_equals(blobs[1].type, "text/html"); + assert_equals(bodyContainer.headers.get("Content-Type"), "Text/Html"); + assert_equals(cloned.headers.get("Content-Type"), "Text/Html"); + }, `${bodyContainer.constructor.name}: Extract a MIME type with clone`); +}); + +[ + () => new Request("about:blank", { body: new Blob([], { type: "text/plain" }), method: "POST", headers: [["Content-Type", "text/html"]] }), + () => new Response(new Blob([], { type: "text/plain" }), { headers: [["Content-Type", "text/html"]] }), +].forEach(bodyContainerCreator => { + const bodyContainer = bodyContainerCreator(); + promise_test(async t => { + assert_equals(bodyContainer.headers.get("Content-Type"), "text/html"); + const blob = await bodyContainer.blob(); + assert_equals(blob.type, "text/html"); + }, `${bodyContainer.constructor.name}: Content-Type in headers wins Blob"s type`); +}); + +[ + () => new Request("about:blank", { body: new Blob([], { type: "text/plain" }), method: "POST" }), + () => new Response(new Blob([], { type: "text/plain" })), +].forEach(bodyContainerCreator => { + const bodyContainer = bodyContainerCreator(); + promise_test(async t => { + assert_equals(bodyContainer.headers.get("Content-Type"), "text/plain"); + const newMIMEType = "text/html"; + bodyContainer.headers.set("Content-Type", newMIMEType); + const blob = await bodyContainer.blob(); + assert_equals(blob.type, newMIMEType); + }, `${bodyContainer.constructor.name}: setting missing Content-Type in headers and it wins Blob"s type`); +}); diff --git a/test/wpt/tests/fetch/api/cors/cors-basic.any.js b/test/wpt/tests/fetch/api/cors/cors-basic.any.js new file mode 100644 index 0000000..95de0af --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-basic.any.js @@ -0,0 +1,43 @@ +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +const { + HTTPS_ORIGIN, + HTTP_ORIGIN_WITH_DIFFERENT_PORT, + HTTP_REMOTE_ORIGIN, + HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT, + HTTPS_REMOTE_ORIGIN, +} = get_host_info(); + +function cors(desc, origin) { + const url = `${origin}${dirname(location.pathname)}${RESOURCES_DIR}top.txt`; + const urlAllowCors = `${url}?pipe=header(Access-Control-Allow-Origin,*)`; + + promise_test((test) => { + return fetch(urlAllowCors, {'mode': 'no-cors'}).then((resp) => { + assert_equals(resp.status, 0, "Opaque filter: status is 0"); + assert_equals(resp.statusText, "", "Opaque filter: statusText is \"\""); + assert_equals(resp.type , "opaque", "Opaque filter: response's type is opaque"); + return resp.text().then((value) => { + assert_equals(value, "", "Opaque response should have an empty body"); + }); + }); + }, `${desc} [no-cors mode]`); + + promise_test((test) => { + return promise_rejects_js(test, TypeError, fetch(url, {'mode': 'cors'})); + }, `${desc} [server forbid CORS]`); + + promise_test((test) => { + return fetch(urlAllowCors, {'mode': 'cors'}).then((resp) => { + assert_equals(resp.status, 200, "Fetch's response's status is 200"); + assert_equals(resp.type , "cors", "CORS response's type is cors"); + }); + }, `${desc} [cors mode]`); +} + +cors('Same domain different port', HTTP_ORIGIN_WITH_DIFFERENT_PORT); +cors('Same domain different protocol different port', HTTPS_ORIGIN); +cors('Cross domain basic usage', HTTP_REMOTE_ORIGIN); +cors('Cross domain different port', HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT); +cors('Cross domain different protocol', HTTPS_REMOTE_ORIGIN); diff --git a/test/wpt/tests/fetch/api/cors/cors-cookies-redirect.any.js b/test/wpt/tests/fetch/api/cors/cors-cookies-redirect.any.js new file mode 100644 index 0000000..f5217b4 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-cookies-redirect.any.js @@ -0,0 +1,49 @@ +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +var redirectUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "redirect.py"; +var urlSetCookies1 = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "top.txt"; +var urlSetCookies2 = get_host_info().HTTP_ORIGIN_WITH_DIFFERENT_PORT + dirname(location.pathname) + RESOURCES_DIR + "top.txt"; +var urlCheckCookies = get_host_info().HTTP_ORIGIN_WITH_DIFFERENT_PORT + dirname(location.pathname) + RESOURCES_DIR + "inspect-headers.py?cors&headers=cookie"; + +var urlSetCookiesParameters = "?pipe=header(Access-Control-Allow-Origin," + location.origin + ")"; +urlSetCookiesParameters += "|header(Access-Control-Allow-Credentials,true)"; + +urlSetCookiesParameters1 = urlSetCookiesParameters + "|header(Set-Cookie,a=1)"; +urlSetCookiesParameters2 = urlSetCookiesParameters + "|header(Set-Cookie,a=2)"; + +urlClearCookiesParameters1 = urlSetCookiesParameters + "|header(Set-Cookie,a=1%3B%20max-age=0)"; +urlClearCookiesParameters2 = urlSetCookiesParameters + "|header(Set-Cookie,a=2%3B%20max-age=0)"; + +promise_test(async (test) => { + await fetch(urlSetCookies1 + urlSetCookiesParameters1, {"credentials": "include", "mode": "cors"}); + await fetch(urlSetCookies2 + urlSetCookiesParameters2, {"credentials": "include", "mode": "cors"}); +}, "Set cookies"); + +function doTest(usePreflight) { + promise_test(async (test) => { + var url = redirectUrl; + var uuid_token = token(); + var urlParameters = "?token=" + uuid_token + "&max_age=0"; + urlParameters += "&redirect_status=301"; + urlParameters += "&location=" + encodeURIComponent(urlCheckCookies); + urlParameters += "&allow_headers=a&headers=Cookie"; + headers = []; + if (usePreflight) + headers.push(["a", "b"]); + + var requestInit = {"credentials": "include", "mode": "cors", "headers": headers}; + var response = await fetch(url + urlParameters, requestInit); + + assert_equals(response.headers.get("x-request-cookie") , "a=2", "Request includes cookie(s)"); + }, "Testing credentials after cross-origin redirection with CORS and " + (usePreflight ? "" : "no ") + "preflight"); +} + +doTest(false); +doTest(true); + +promise_test(async (test) => { + await fetch(urlSetCookies1 + urlClearCookiesParameters1, {"credentials": "include", "mode": "cors"}); + await fetch(urlSetCookies2 + urlClearCookiesParameters2, {"credentials": "include", "mode": "cors"}); +}, "Clean cookies"); diff --git a/test/wpt/tests/fetch/api/cors/cors-cookies.any.js b/test/wpt/tests/fetch/api/cors/cors-cookies.any.js new file mode 100644 index 0000000..8c666e4 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-cookies.any.js @@ -0,0 +1,56 @@ +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function corsCookies(desc, baseURL1, baseURL2, credentialsMode, cookies) { + var urlSetCookie = baseURL1 + dirname(location.pathname) + RESOURCES_DIR + "top.txt"; + var urlCheckCookies = baseURL2 + dirname(location.pathname) + RESOURCES_DIR + "inspect-headers.py?cors&headers=cookie"; + //enable cors with credentials + var urlParameters = "?pipe=header(Access-Control-Allow-Origin," + location.origin + ")"; + urlParameters += "|header(Access-Control-Allow-Credentials,true)"; + + var urlCleanParameters = "?pipe=header(Access-Control-Allow-Origin," + location.origin + ")"; + urlCleanParameters += "|header(Access-Control-Allow-Credentials,true)"; + if (cookies) { + urlParameters += "|header(Set-Cookie,"; + urlParameters += cookies.join(",True)|header(Set-Cookie,") + ",True)"; + urlCleanParameters += "|header(Set-Cookie,"; + urlCleanParameters += cookies.join("%3B%20max-age=0,True)|header(Set-Cookie,") + "%3B%20max-age=0,True)"; + } + + var requestInit = {"credentials": credentialsMode, "mode": "cors"}; + + promise_test(function(test){ + return fetch(urlSetCookie + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + //check cookies sent + return fetch(urlCheckCookies, requestInit); + }).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_false(resp.headers.has("Cookie") , "Cookie header is not exposed in response"); + if (credentialsMode === "include" && baseURL1 === baseURL2) { + assert_equals(resp.headers.get("x-request-cookie") , cookies.join("; "), "Request includes cookie(s)"); + } + else { + assert_false(resp.headers.has("x-request-cookie") , "Request should have no cookie"); + } + //clean cookies + return fetch(urlSetCookie + urlCleanParameters, {"credentials": "include"}); + }).catch(function(e) { + return fetch(urlSetCookie + urlCleanParameters, {"credentials": "include"}).then(function(resp) { + throw e; + }) + }); + }, desc); +} + +var local = get_host_info().HTTP_ORIGIN; +var remote = get_host_info().HTTP_REMOTE_ORIGIN; +// FIXME: otherRemote might not be accessible on some test environments. +var otherRemote = local.replace("http://", "http://www."); + +corsCookies("Omit mode: no cookie sent", local, local, "omit", ["g=7"]); +corsCookies("Include mode: 1 cookie", remote, remote, "include", ["a=1"]); +corsCookies("Include mode: local cookies are not sent with remote request", local, remote, "include", ["c=3"]); +corsCookies("Include mode: remote cookies are not sent with local request", remote, local, "include", ["d=4"]); +corsCookies("Same-origin mode: cookies are discarded in cors request", remote, remote, "same-origin", ["f=6"]); +corsCookies("Include mode: remote cookies are not sent with other remote request", remote, otherRemote, "include", ["e=5"]); diff --git a/test/wpt/tests/fetch/api/cors/cors-expose-star.sub.any.js b/test/wpt/tests/fetch/api/cors/cors-expose-star.sub.any.js new file mode 100644 index 0000000..340e99a --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-expose-star.sub.any.js @@ -0,0 +1,41 @@ +// META: script=../resources/utils.js + +const url = "http://{{host}}:{{ports[http][1]}}" + dirname(location.pathname) + RESOURCES_DIR + "top.txt", + sharedHeaders = "?pipe=header(Access-Control-Expose-Headers,*)|header(Test,X)|header(Set-Cookie,X)|header(*,whoa)|" + +promise_test(() => { + const headers = "header(Access-Control-Allow-Origin,*)" + return fetch(url + sharedHeaders + headers).then(resp => { + assert_equals(resp.status, 200) + assert_equals(resp.type , "cors") + assert_equals(resp.headers.get("test"), "X") + assert_equals(resp.headers.get("set-cookie"), null) + assert_equals(resp.headers.get("*"), "whoa") + }) +}, "Basic Access-Control-Expose-Headers: * support") + +promise_test(() => { + const origin = location.origin, // assuming an ASCII origin + headers = "header(Access-Control-Allow-Origin," + origin + ")|header(Access-Control-Allow-Credentials,true)" + return fetch(url + sharedHeaders + headers, { credentials:"include" }).then(resp => { + assert_equals(resp.status, 200) + assert_equals(resp.type , "cors") + assert_equals(resp.headers.get("content-type"), "text/plain") // safelisted + assert_equals(resp.headers.get("test"), null) + assert_equals(resp.headers.get("set-cookie"), null) + assert_equals(resp.headers.get("*"), "whoa") + }) +}, "* for credentialed fetches only matches literally") + +promise_test(() => { + const headers = "header(Access-Control-Allow-Origin,*)|header(Access-Control-Expose-Headers,set-cookie\\,*)" + return fetch(url + sharedHeaders + headers).then(resp => { + assert_equals(resp.status, 200) + assert_equals(resp.type , "cors") + assert_equals(resp.headers.get("test"), "X") + assert_equals(resp.headers.get("set-cookie"), null) + assert_equals(resp.headers.get("*"), "whoa") + }) +}, "* can be one of several values") + +done(); diff --git a/test/wpt/tests/fetch/api/cors/cors-filtering.sub.any.js b/test/wpt/tests/fetch/api/cors/cors-filtering.sub.any.js new file mode 100644 index 0000000..a26eacc --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-filtering.sub.any.js @@ -0,0 +1,69 @@ +// META: script=../resources/utils.js + +function corsFilter(corsUrl, headerName, headerValue, isFiltered) { + var url = corsUrl + "?pipe=header(" + headerName + "," + encodeURIComponent(headerValue) +")|header(Access-Control-Allow-Origin,*)"; + promise_test(function(test) { + return fetch(url).then(function(resp) { + assert_equals(resp.status, 200, "Fetch success with code 200"); + assert_equals(resp.type , "cors", "CORS fetch's response has cors type"); + if (!isFiltered) { + assert_equals(resp.headers.get(headerName), headerValue, + headerName + " header should be included in response with value: " + headerValue); + } else { + assert_false(resp.headers.has(headerName), "UA should exclude " + headerName + " header from response"); + } + test.done(); + }); + }, "CORS filter on " + headerName + " header"); +} + +function corsExposeFilter(corsUrl, headerName, headerValue, isForbidden, withCredentials) { + var url = corsUrl + "?pipe=header(" + headerName + "," + encodeURIComponent(headerValue) +")|" + + "header(Access-Control-Allow-Origin, http://{{host}}:{{ports[http][0]}})" + + "header(Access-Control-Allow-Credentials, true)" + + "header(Access-Control-Expose-Headers," + headerName + ")"; + + var title = "CORS filter on " + headerName + " header, header is " + (isForbidden ? "forbidden" : "exposed"); + if (withCredentials) + title+= "(credentials = include)"; + promise_test(function(test) { + return fetch(new Request(url, { credentials: withCredentials ? "include" : "omit" })).then(function(resp) { + assert_equals(resp.status, 200, "Fetch success with code 200"); + assert_equals(resp.type , "cors", "CORS fetch's response has cors type"); + if (!isForbidden) { + assert_equals(resp.headers.get(headerName), headerValue, + headerName + " header should be included in response with value: " + headerValue); + } else { + assert_false(resp.headers.has(headerName), "UA should exclude " + headerName + " header from response"); + } + test.done(); + }); + }, title); +} + +var url = "http://{{host}}:{{ports[http][1]}}" + dirname(location.pathname) + RESOURCES_DIR + "top.txt"; + +corsFilter(url, "Cache-Control", "no-cache", false); +corsFilter(url, "Content-Language", "fr", false); +corsFilter(url, "Content-Type", "text/html", false); +corsFilter(url, "Expires","04 May 1988 22:22:22 GMT" , false); +corsFilter(url, "Last-Modified", "04 May 1988 22:22:22 GMT", false); +corsFilter(url, "Pragma", "no-cache", false); +corsFilter(url, "Content-Length", "3" , false); // top.txt contains "top" + +corsFilter(url, "Age", "27", true); +corsFilter(url, "Server", "wptServe" , true); +corsFilter(url, "Warning", "Mind the gap" , true); +corsFilter(url, "Set-Cookie", "name=value" , true); +corsFilter(url, "Set-Cookie2", "name=value" , true); + +corsExposeFilter(url, "Age", "27", false); +corsExposeFilter(url, "Server", "wptServe" , false); +corsExposeFilter(url, "Warning", "Mind the gap" , false); + +corsExposeFilter(url, "Set-Cookie", "name=value" , true); +corsExposeFilter(url, "Set-Cookie2", "name=value" , true); +corsExposeFilter(url, "Set-Cookie", "name=value" , true, true); +corsExposeFilter(url, "Set-Cookie2", "name=value" , true, true); + +done(); diff --git a/test/wpt/tests/fetch/api/cors/cors-keepalive.any.js b/test/wpt/tests/fetch/api/cors/cors-keepalive.any.js new file mode 100644 index 0000000..f68d90e --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-keepalive.any.js @@ -0,0 +1,118 @@ +// META: global=window +// META: timeout=long +// META: title=Fetch API: keepalive handling +// META: script=/resources/testharness.js +// META: script=/resources/testharnessreport.js +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=../resources/keepalive-helper.js +// META: script=../resources/utils.js + +'use strict'; + +const { + HTTP_NOTSAMESITE_ORIGIN, + HTTPS_ORIGIN, + HTTP_ORIGIN_WITH_DIFFERENT_PORT, + HTTP_REMOTE_ORIGIN, + HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT, + HTTPS_REMOTE_ORIGIN, +} = get_host_info(); + +/** + * Tests to cover the basic behaviors of keepalive + cors/no-cors mode requests + * to different `origin` when the initiator document is still alive. They should + * behave the same as without setting keepalive. + */ +function keepaliveCorsBasicTest(desc, origin) { + const url = `${origin}${dirname(location.pathname)}${RESOURCES_DIR}top.txt`; + const urlAllowCors = `${url}?pipe=header(Access-Control-Allow-Origin,*)`; + + promise_test((test) => { + return fetch(urlAllowCors, {keepalive: true, 'mode': 'no-cors'}) + .then((resp) => { + assert_equals(resp.status, 0, 'Opaque filter: status is 0'); + assert_equals(resp.statusText, '', 'Opaque filter: statusText is ""'); + assert_equals( + resp.type, 'opaque', 'Opaque filter: response\'s type is opaque'); + return resp.text().then((value) => { + assert_equals( + value, '', 'Opaque response should have an empty body'); + }); + }); + }, `${desc} [no-cors mode]`); + + promise_test((test) => { + return promise_rejects_js( + test, TypeError, fetch(url, {keepalive: true, 'mode': 'cors'})); + }, `${desc} [cors mode, server forbid CORS]`); + + promise_test((test) => { + return fetch(urlAllowCors, {keepalive: true, 'mode': 'cors'}) + .then((resp) => { + assert_equals(resp.status, 200, 'Fetch\'s response\'s status is 200'); + assert_equals(resp.type, 'cors', 'CORS response\'s type is cors'); + }); + }, `${desc} [cors mode]`); +} + +keepaliveCorsBasicTest( + `[keepalive] Same domain different port`, HTTP_ORIGIN_WITH_DIFFERENT_PORT); +keepaliveCorsBasicTest( + `[keepalive] Same domain different protocol different port`, HTTPS_ORIGIN); +keepaliveCorsBasicTest( + `[keepalive] Cross domain basic usage`, HTTP_REMOTE_ORIGIN); +keepaliveCorsBasicTest( + `[keepalive] Cross domain different port`, + HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT); +keepaliveCorsBasicTest( + `[keepalive] Cross domain different protocol`, HTTPS_REMOTE_ORIGIN); + +/** + * In a same-site iframe, and in `unload` event handler, test to fetch + * a keepalive URL that involves in different cors modes. + */ +function keepaliveCorsInUnloadTest(description, origin, method) { + const evt = 'unload'; + for (const mode of ['no-cors', 'cors']) { + for (const disallowOrigin of [false, true]) { + const desc = `${description} ${method} request in ${evt} [${mode} mode` + + (disallowOrigin ? `, server forbid CORS]` : `]`); + const shouldPass = !disallowOrigin || mode === 'no-cors'; + promise_test(async (test) => { + const token1 = token(); + const iframe = document.createElement('iframe'); + iframe.src = getKeepAliveIframeUrl(token1, method, { + frameOrigin: '', + requestOrigin: origin, + sendOn: evt, + mode: mode, + disallowOrigin + }); + document.body.appendChild(iframe); + await iframeLoaded(iframe); + iframe.remove(); + assert_equals(await getTokenFromMessage(), token1); + + assertStashedTokenAsync(desc, token1, {shouldPass}); + }, `${desc}; setting up`); + } + } +} + +for (const method of ['GET', 'POST']) { + keepaliveCorsInUnloadTest( + '[keepalive] Same domain different port', HTTP_ORIGIN_WITH_DIFFERENT_PORT, + method); + keepaliveCorsInUnloadTest( + `[keepalive] Same domain different protocol different port`, HTTPS_ORIGIN, + method); + keepaliveCorsInUnloadTest( + `[keepalive] Cross domain basic usage`, HTTP_REMOTE_ORIGIN, method); + keepaliveCorsInUnloadTest( + `[keepalive] Cross domain different port`, + HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT, method); + keepaliveCorsInUnloadTest( + `[keepalive] Cross domain different protocol`, HTTPS_REMOTE_ORIGIN, + method); +} diff --git a/test/wpt/tests/fetch/api/cors/cors-multiple-origins.sub.any.js b/test/wpt/tests/fetch/api/cors/cors-multiple-origins.sub.any.js new file mode 100644 index 0000000..b3abb92 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-multiple-origins.sub.any.js @@ -0,0 +1,22 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +function corsMultipleOrigins(originList) { + var urlParameters = "?origin=" + encodeURIComponent(originList.join(", ")); + var url = "http://{{host}}:{{ports[http][1]}}" + dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; + + promise_test(function(test) { + return promise_rejects_js(test, TypeError, fetch(url + urlParameters)); + }, "Listing multiple origins is illegal: " + originList); +} +/* Actual origin */ +var origin = "http://{{host}}:{{ports[http][0]}}"; + +corsMultipleOrigins(["\"\"", "http://example.com", origin]); +corsMultipleOrigins(["\"\"", "http://example.com", "*"]); +corsMultipleOrigins(["\"\"", origin, origin]); +corsMultipleOrigins(["*", "http://example.com", "*"]); +corsMultipleOrigins(["*", "http://example.com", origin]); +corsMultipleOrigins(["", "http://example.com", "https://example2.com"]); + +done(); diff --git a/test/wpt/tests/fetch/api/cors/cors-no-preflight.any.js b/test/wpt/tests/fetch/api/cors/cors-no-preflight.any.js new file mode 100644 index 0000000..7a0269a --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-no-preflight.any.js @@ -0,0 +1,41 @@ +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function corsNoPreflight(desc, baseURL, method, headerName, headerValue) { + + var uuid_token = token(); + var url = baseURL + dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; + var urlParameters = "?token=" + uuid_token + "&max_age=0"; + var requestInit = {"mode": "cors", "method": method, "headers":{}}; + if (headerName) + requestInit["headers"][headerName] = headerValue; + + promise_test(function(test) { + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token).then(function(resp) { + assert_equals(resp.status, 200, "Clean stash response's status is 200"); + return fetch(url + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "Response's status is 200"); + assert_equals(resp.headers.get("x-did-preflight"), "0", "No preflight request has been made"); + }); + }); + }, desc); +} + +var host_info = get_host_info(); + +corsNoPreflight("Cross domain basic usage [GET]", host_info.HTTP_REMOTE_ORIGIN, "GET"); +corsNoPreflight("Same domain different port [GET]", host_info.HTTP_ORIGIN_WITH_DIFFERENT_PORT, "GET"); +corsNoPreflight("Cross domain different port [GET]", host_info.HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT, "GET"); +corsNoPreflight("Cross domain different protocol [GET]", host_info.HTTPS_REMOTE_ORIGIN, "GET"); +corsNoPreflight("Same domain different protocol different port [GET]", host_info.HTTPS_ORIGIN, "GET"); +corsNoPreflight("Cross domain [POST]", host_info.HTTP_REMOTE_ORIGIN, "POST"); +corsNoPreflight("Cross domain [HEAD]", host_info.HTTP_REMOTE_ORIGIN, "HEAD"); +corsNoPreflight("Cross domain [GET] [Accept: */*]", host_info.HTTP_REMOTE_ORIGIN, "GET" , "Accept", "*/*"); +corsNoPreflight("Cross domain [GET] [Accept-Language: fr]", host_info.HTTP_REMOTE_ORIGIN, "GET" , "Accept-Language", "fr"); +corsNoPreflight("Cross domain [GET] [Content-Language: fr]", host_info.HTTP_REMOTE_ORIGIN, "GET" , "Content-Language", "fr"); +corsNoPreflight("Cross domain [GET] [Content-Type: application/x-www-form-urlencoded]", host_info.HTTP_REMOTE_ORIGIN, "GET" , "Content-Type", "application/x-www-form-urlencoded"); +corsNoPreflight("Cross domain [GET] [Content-Type: multipart/form-data]", host_info.HTTP_REMOTE_ORIGIN, "GET" , "Content-Type", "multipart/form-data"); +corsNoPreflight("Cross domain [GET] [Content-Type: text/plain]", host_info.HTTP_REMOTE_ORIGIN, "GET" , "Content-Type", "text/plain"); +corsNoPreflight("Cross domain [GET] [Content-Type: text/plain;charset=utf-8]", host_info.HTTP_REMOTE_ORIGIN, "GET" , "Content-Type", "text/plain;charset=utf-8"); +corsNoPreflight("Cross domain [GET] [Content-Type: Text/Plain;charset=utf-8]", host_info.HTTP_REMOTE_ORIGIN, "GET" , "Content-Type", "Text/Plain;charset=utf-8"); diff --git a/test/wpt/tests/fetch/api/cors/cors-origin.any.js b/test/wpt/tests/fetch/api/cors/cors-origin.any.js new file mode 100644 index 0000000..30a02d9 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-origin.any.js @@ -0,0 +1,51 @@ +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +/* If origin is undefined, it is set to fetched url's origin*/ +function corsOrigin(desc, baseURL, method, origin, shouldPass) { + if (!origin) + origin = baseURL; + + var uuid_token = token(); + var urlParameters = "?token=" + uuid_token + "&max_age=0&origin=" + encodeURIComponent(origin) + "&allow_methods=" + method; + var url = baseURL + dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; + var requestInit = {"mode": "cors", "method": method}; + + promise_test(function(test) { + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token).then(function(resp) { + assert_equals(resp.status, 200, "Clean stash response's status is 200"); + if (shouldPass) { + return fetch(url + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "Response's status is 200"); + }); + } else { + return promise_rejects_js(test, TypeError, fetch(url + urlParameters, requestInit)); + } + }); + }, desc); + +} + +var host_info = get_host_info(); + +/* Actual origin */ +var origin = host_info.HTTP_ORIGIN; + +corsOrigin("Cross domain different subdomain [origin OK]", host_info.HTTP_REMOTE_ORIGIN, "GET", origin, true); +corsOrigin("Cross domain different subdomain [origin KO]", host_info.HTTP_REMOTE_ORIGIN, "GET", undefined, false); +corsOrigin("Same domain different port [origin OK]", host_info.HTTP_ORIGIN_WITH_DIFFERENT_PORT, "GET", origin, true); +corsOrigin("Same domain different port [origin KO]", host_info.HTTP_ORIGIN_WITH_DIFFERENT_PORT, "GET", undefined, false); +corsOrigin("Cross domain different port [origin OK]", host_info.HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT, "GET", origin, true); +corsOrigin("Cross domain different port [origin KO]", host_info.HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT, "GET", undefined, false); +corsOrigin("Cross domain different protocol [origin OK]", host_info.HTTPS_REMOTE_ORIGIN, "GET", origin, true); +corsOrigin("Cross domain different protocol [origin KO]", host_info.HTTPS_REMOTE_ORIGIN, "GET", undefined, false); +corsOrigin("Same domain different protocol different port [origin OK]", host_info.HTTPS_ORIGIN, "GET", origin, true); +corsOrigin("Same domain different protocol different port [origin KO]", host_info.HTTPS_ORIGIN, "GET", undefined, false); +corsOrigin("Cross domain [POST] [origin OK]", host_info.HTTP_REMOTE_ORIGIN, "POST", origin, true); +corsOrigin("Cross domain [POST] [origin KO]", host_info.HTTP_REMOTE_ORIGIN, "POST", undefined, false); +corsOrigin("Cross domain [HEAD] [origin OK]", host_info.HTTP_REMOTE_ORIGIN, "HEAD", origin, true); +corsOrigin("Cross domain [HEAD] [origin KO]", host_info.HTTP_REMOTE_ORIGIN, "HEAD", undefined, false); +corsOrigin("CORS preflight [PUT] [origin OK]", host_info.HTTP_REMOTE_ORIGIN, "PUT", origin, true); +corsOrigin("CORS preflight [PUT] [origin KO]", host_info.HTTP_REMOTE_ORIGIN, "PUT", undefined, false); +corsOrigin("Allowed origin: \"\" [origin KO]", host_info.HTTP_REMOTE_ORIGIN, "GET", "" , false); diff --git a/test/wpt/tests/fetch/api/cors/cors-preflight-cache.any.js b/test/wpt/tests/fetch/api/cors/cors-preflight-cache.any.js new file mode 100644 index 0000000..ce6a169 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-preflight-cache.any.js @@ -0,0 +1,46 @@ +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +var cors_url = get_host_info().HTTP_REMOTE_ORIGIN + + dirname(location.pathname) + + RESOURCES_DIR + + "preflight.py"; + +promise_test((test) => { + var uuid_token = token(); + var request_url = + cors_url + "?token=" + uuid_token + "&max_age=12000&allow_methods=POST" + + "&allow_headers=x-test-header"; + return fetch(cors_url + "?token=" + uuid_token + "&clear-stash") + .then(() => { + return fetch( + new Request(request_url, + { + mode: "cors", + method: "POST", + headers: [["x-test-header", "test1"]] + })); + }) + .then((resp) => { + assert_equals(resp.status, 200, "Response's status is 200"); + assert_equals(resp.headers.get("x-did-preflight"), "1", "Preflight request has been made"); + return fetch(cors_url + "?token=" + uuid_token + "&clear-stash"); + }) + .then((res) => res.text()) + .then((txt) => { + assert_equals(txt, "1", "Server stash must be cleared."); + return fetch( + new Request(request_url, + { + mode: "cors", + method: "POST", + headers: [["x-test-header", "test2"]] + })); + }) + .then((resp) => { + assert_equals(resp.status, 200, "Response's status is 200"); + assert_equals(resp.headers.get("x-did-preflight"), "0", "Preflight request has not been made"); + return fetch(cors_url + "?token=" + uuid_token + "&clear-stash"); + }); +}); diff --git a/test/wpt/tests/fetch/api/cors/cors-preflight-not-cors-safelisted.any.js b/test/wpt/tests/fetch/api/cors/cors-preflight-not-cors-safelisted.any.js new file mode 100644 index 0000000..b2747cc --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-preflight-not-cors-safelisted.any.js @@ -0,0 +1,19 @@ +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=resources/corspreflight.js + +const corsURL = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; + +promise_test(() => fetch("resources/not-cors-safelisted.json").then(res => res.json().then(runTests)), "Loading data…"); + +function runTests(testArray) { + testArray.forEach(testItem => { + const [headerName, headerValue] = testItem; + corsPreflight("Need CORS-preflight for " + headerName + "/" + headerValue + " header", + corsURL, + "GET", + true, + [[headerName, headerValue]]); + }); +} diff --git a/test/wpt/tests/fetch/api/cors/cors-preflight-redirect.any.js b/test/wpt/tests/fetch/api/cors/cors-preflight-redirect.any.js new file mode 100644 index 0000000..15f7659 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-preflight-redirect.any.js @@ -0,0 +1,37 @@ +// META: global=window,worker +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function corsPreflightRedirect(desc, redirectUrl, redirectLocation, redirectStatus, redirectPreflight) { + var uuid_token = token(); + var url = redirectUrl; + var urlParameters = "?token=" + uuid_token + "&max_age=0"; + urlParameters += "&redirect_status=" + redirectStatus; + urlParameters += "&location=" + encodeURIComponent(redirectLocation); + + if (redirectPreflight) + urlParameters += "&redirect_preflight"; + var requestInit = {"mode": "cors", "redirect": "follow"}; + + /* Force preflight */ + requestInit["headers"] = {"x-force-preflight": ""}; + urlParameters += "&allow_headers=x-force-preflight"; + + promise_test(function(test) { + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token).then(function(resp) { + assert_equals(resp.status, 200, "Clean stash response's status is 200"); + return promise_rejects_js(test, TypeError, fetch(url + urlParameters, requestInit)); + }); + }, desc); +} + +var redirectUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "redirect.py"; +var locationUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; + +for (var code of [301, 302, 303, 307, 308]) { + /* preflight should not follow the redirection */ + corsPreflightRedirect("Redirection " + code + " on preflight failed", redirectUrl, locationUrl, code, true); + /* preflight is done before redirection: preflight force redirect to error */ + corsPreflightRedirect("Redirection " + code + " after preflight failed", redirectUrl, locationUrl, code, false); +} diff --git a/test/wpt/tests/fetch/api/cors/cors-preflight-referrer.any.js b/test/wpt/tests/fetch/api/cors/cors-preflight-referrer.any.js new file mode 100644 index 0000000..5df9fcf --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-preflight-referrer.any.js @@ -0,0 +1,51 @@ +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function corsPreflightReferrer(desc, corsUrl, referrerPolicy, referrer, expectedReferrer) { + var uuid_token = token(); + var url = corsUrl; + var urlParameters = "?token=" + uuid_token + "&max_age=0"; + var requestInit = {"mode": "cors", "referrerPolicy": referrerPolicy}; + + if (referrer) + requestInit.referrer = referrer; + + /* Force preflight */ + requestInit["headers"] = {"x-force-preflight": ""}; + urlParameters += "&allow_headers=x-force-preflight"; + + promise_test(function(test) { + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token).then(function(resp) { + assert_equals(resp.status, 200, "Clean stash response's status is 200"); + return fetch(url + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "Response's status is 200"); + assert_equals(resp.headers.get("x-did-preflight"), "1", "Preflight request has been made"); + assert_equals(resp.headers.get("x-preflight-referrer"), expectedReferrer, "Preflight's referrer is correct"); + assert_equals(resp.headers.get("x-referrer"), expectedReferrer, "Request's referrer is correct"); + assert_equals(resp.headers.get("x-control-request-headers"), "", "Access-Control-Allow-Headers value"); + }); + }); + }, desc + " and referrer: " + (referrer ? "'" + referrer + "'" : "default")); +} + +var corsUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; +var origin = get_host_info().HTTP_ORIGIN + "/"; + +corsPreflightReferrer("Referrer policy: no-referrer", corsUrl, "no-referrer", undefined, ""); +corsPreflightReferrer("Referrer policy: no-referrer", corsUrl, "no-referrer", "myreferrer", ""); + +corsPreflightReferrer("Referrer policy: \"\"", corsUrl, "", undefined, origin); +corsPreflightReferrer("Referrer policy: \"\"", corsUrl, "", "myreferrer", origin); + +corsPreflightReferrer("Referrer policy: no-referrer-when-downgrade", corsUrl, "no-referrer-when-downgrade", undefined, location.toString()) +corsPreflightReferrer("Referrer policy: no-referrer-when-downgrade", corsUrl, "no-referrer-when-downgrade", "myreferrer", new URL("myreferrer", location).toString()); + +corsPreflightReferrer("Referrer policy: origin", corsUrl, "origin", undefined, origin); +corsPreflightReferrer("Referrer policy: origin", corsUrl, "origin", "myreferrer", origin); + +corsPreflightReferrer("Referrer policy: origin-when-cross-origin", corsUrl, "origin-when-cross-origin", undefined, origin); +corsPreflightReferrer("Referrer policy: origin-when-cross-origin", corsUrl, "origin-when-cross-origin", "myreferrer", origin); + +corsPreflightReferrer("Referrer policy: unsafe-url", corsUrl, "unsafe-url", undefined, location.toString()); +corsPreflightReferrer("Referrer policy: unsafe-url", corsUrl, "unsafe-url", "myreferrer", new URL("myreferrer", location).toString()); diff --git a/test/wpt/tests/fetch/api/cors/cors-preflight-response-validation.any.js b/test/wpt/tests/fetch/api/cors/cors-preflight-response-validation.any.js new file mode 100644 index 0000000..718e351 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-preflight-response-validation.any.js @@ -0,0 +1,33 @@ +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function corsPreflightResponseValidation(desc, corsUrl, allowHeaders, allowMethods) { + var uuid_token = token(); + var url = corsUrl; + var requestInit = {"mode": "cors"}; + /* Force preflight */ + requestInit["headers"] = {"x-force-preflight": ""}; + + var urlParameters = "?token=" + uuid_token + "&max_age=0"; + urlParameters += "&allow_headers=x-force-preflight"; + if (allowHeaders) + urlParameters += "," + allowHeaders; + if (allowMethods) + urlParameters += "&allow_methods="+ allowMethods; + + promise_test(function(test) { + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token).then(async function(resp) { + assert_equals(resp.status, 200, "Clean stash response's status is 200"); + await promise_rejects_js(test, TypeError, fetch(url + urlParameters, requestInit)); + + return fetch(url + urlParameters).then(function(resp) { + assert_equals(resp.headers.get("x-did-preflight"), "1", "Preflight request has been made"); + }); + }); + }, desc); +} + +var corsUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; +corsPreflightResponseValidation("Preflight response with a bad Access-Control-Allow-Headers", corsUrl, "Bad value", null); +corsPreflightResponseValidation("Preflight response with a bad Access-Control-Allow-Methods", corsUrl, null, "Bad value"); diff --git a/test/wpt/tests/fetch/api/cors/cors-preflight-star.any.js b/test/wpt/tests/fetch/api/cors/cors-preflight-star.any.js new file mode 100644 index 0000000..f9fb204 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-preflight-star.any.js @@ -0,0 +1,86 @@ +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +const url = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "preflight.py", + origin = location.origin // assuming an ASCII origin + +function preflightTest(succeeds, withCredentials, allowMethod, allowHeader, useMethod, useHeader) { + return promise_test(t => { + let testURL = url + "?", + requestInit = {} + if (withCredentials) { + testURL += "origin=" + origin + "&" + testURL += "credentials&" + requestInit.credentials = "include" + } + if (useMethod) { + requestInit.method = useMethod + } + if (useHeader.length > 0) { + requestInit.headers = [useHeader] + } + testURL += "allow_methods=" + allowMethod + "&" + testURL += "allow_headers=" + allowHeader + "&" + + if (succeeds) { + return fetch(testURL, requestInit).then(resp => { + assert_equals(resp.headers.get("x-origin"), origin) + }) + } else { + return promise_rejects_js(t, TypeError, fetch(testURL, requestInit)) + } + }, "CORS that " + (succeeds ? "succeeds" : "fails") + " with credentials: " + withCredentials + "; method: " + useMethod + " (allowed: " + allowMethod + "); header: " + useHeader + " (allowed: " + allowHeader + ")") +} + +// "GET" does not pass the case-sensitive method check, but in the safe list. +preflightTest(true, false, "get", "x-test", "GET", ["X-Test", "1"]) +// Headers check is case-insensitive, and "*" works as any for method. +preflightTest(true, false, "*", "x-test", "SUPER", ["X-Test", "1"]) +// "*" works as any only without credentials. +preflightTest(true, false, "*", "*", "OK", ["X-Test", "1"]) +preflightTest(false, true, "*", "*", "OK", ["X-Test", "1"]) +preflightTest(false, true, "*", "", "PUT", []) +preflightTest(false, true, "get", "*", "GET", ["X-Test", "1"]) +preflightTest(false, true, "*", "*", "GET", ["X-Test", "1"]) +// Exact character match works even for "*" with credentials. +preflightTest(true, true, "*", "*", "*", ["*", "1"]) + +// The following methods are upper-cased for init["method"] by +// https://fetch.spec.whatwg.org/#concept-method-normalize +// but not in Access-Control-Allow-Methods response. +// But they are https://fetch.spec.whatwg.org/#cors-safelisted-method, +// CORS anyway passes regardless of the cases. +for (const METHOD of ['GET', 'HEAD', 'POST']) { + const method = METHOD.toLowerCase(); + preflightTest(true, true, METHOD, "*", METHOD, []) + preflightTest(true, true, METHOD, "*", method, []) + preflightTest(true, true, method, "*", METHOD, []) + preflightTest(true, true, method, "*", method, []) +} + +// The following methods are upper-cased for init["method"] by +// https://fetch.spec.whatwg.org/#concept-method-normalize +// but not in Access-Control-Allow-Methods response. +// As they are not https://fetch.spec.whatwg.org/#cors-safelisted-method, +// Access-Control-Allow-Methods should contain upper-cased methods, +// while init["method"] can be either in upper or lower case. +for (const METHOD of ['DELETE', 'PUT']) { + const method = METHOD.toLowerCase(); + preflightTest(true, true, METHOD, "*", METHOD, []) + preflightTest(true, true, METHOD, "*", method, []) + preflightTest(false, true, method, "*", METHOD, []) + preflightTest(false, true, method, "*", method, []) +} + +// "PATCH" is NOT upper-cased in both places because it is not listed in +// https://fetch.spec.whatwg.org/#concept-method-normalize. +// So Access-Control-Allow-Methods value and init["method"] should match +// case-sensitively. +preflightTest(true, true, "PATCH", "*", "PATCH", []) +preflightTest(false, true, "PATCH", "*", "patch", []) +preflightTest(false, true, "patch", "*", "PATCH", []) +preflightTest(true, true, "patch", "*", "patch", []) + +// "Authorization" header can't be wildcarded. +preflightTest(false, false, "*", "*", "POST", ["Authorization", "123"]) +preflightTest(true, false, "*", "*, Authorization", "POST", ["Authorization", "123"]) diff --git a/test/wpt/tests/fetch/api/cors/cors-preflight-status.any.js b/test/wpt/tests/fetch/api/cors/cors-preflight-status.any.js new file mode 100644 index 0000000..a4467a6 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-preflight-status.any.js @@ -0,0 +1,37 @@ +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +/* Check preflight is ok if status is ok status (200 to 299)*/ +function corsPreflightStatus(desc, corsUrl, preflightStatus) { + var uuid_token = token(); + var url = corsUrl; + var requestInit = {"mode": "cors"}; + /* Force preflight */ + requestInit["headers"] = {"x-force-preflight": ""}; + + var urlParameters = "?token=" + uuid_token + "&max_age=0"; + urlParameters += "&allow_headers=x-force-preflight"; + urlParameters += "&preflight_status=" + preflightStatus; + + promise_test(function(test) { + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token).then(function(resp) { + assert_equals(resp.status, 200, "Clean stash response's status is 200"); + if (200 <= preflightStatus && 299 >= preflightStatus) { + return fetch(url + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "Response's status is 200"); + assert_equals(resp.headers.get("x-did-preflight"), "1", "Preflight request has been made"); + }); + } else { + return promise_rejects_js(test, TypeError, fetch(url + urlParameters, requestInit)); + } + }); + }, desc); +} + +var corsUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; +for (status of [200, 201, 202, 203, 204, 205, 206, + 300, 301, 302, 303, 304, 305, 306, 307, 308, + 400, 401, 402, 403, 404, 405, + 501, 502, 503, 504, 505]) + corsPreflightStatus("Preflight answered with status " + status, corsUrl, status); diff --git a/test/wpt/tests/fetch/api/cors/cors-preflight.any.js b/test/wpt/tests/fetch/api/cors/cors-preflight.any.js new file mode 100644 index 0000000..045422f --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-preflight.any.js @@ -0,0 +1,62 @@ +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=resources/corspreflight.js + +var corsUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; + +corsPreflight("CORS [DELETE], server allows", corsUrl, "DELETE", true); +corsPreflight("CORS [DELETE], server refuses", corsUrl, "DELETE", false); +corsPreflight("CORS [PUT], server allows", corsUrl, "PUT", true); +corsPreflight("CORS [PUT], server allows, check preflight has user agent", corsUrl + "?checkUserAgentHeaderInPreflight", "PUT", true); +corsPreflight("CORS [PUT], server refuses", corsUrl, "PUT", false); +corsPreflight("CORS [PATCH], server allows", corsUrl, "PATCH", true); +corsPreflight("CORS [PATCH], server refuses", corsUrl, "PATCH", false); +corsPreflight("CORS [patcH], server allows", corsUrl, "patcH", true); +corsPreflight("CORS [patcH], server refuses", corsUrl, "patcH", false); +corsPreflight("CORS [NEW], server allows", corsUrl, "NEW", true); +corsPreflight("CORS [NEW], server refuses", corsUrl, "NEW", false); +corsPreflight("CORS [chicken], server allows", corsUrl, "chicken", true); +corsPreflight("CORS [chicken], server refuses", corsUrl, "chicken", false); + +corsPreflight("CORS [GET] [x-test-header: allowed], server allows", corsUrl, "GET", true, [["x-test-header1", "allowed"]]); +corsPreflight("CORS [GET] [x-test-header: refused], server refuses", corsUrl, "GET", false, [["x-test-header1", "refused"]]); + +var headers = [ + ["x-test-header1", "allowedOrRefused"], + ["x-test-header2", "allowedOrRefused"], + ["X-test-header3", "allowedOrRefused"], + ["x-test-header-b", "allowedOrRefused"], + ["x-test-header-D", "allowedOrRefused"], + ["x-test-header-C", "allowedOrRefused"], + ["x-test-header-a", "allowedOrRefused"], + ["Content-Type", "allowedOrRefused"], +]; +var safeHeaders= [ + ["Accept", "*"], + ["Accept-Language", "bzh"], + ["Content-Language", "eu"], +]; + +corsPreflight("CORS [GET] [several headers], server allows", corsUrl, "GET", true, headers, safeHeaders); +corsPreflight("CORS [GET] [several headers], server refuses", corsUrl, "GET", false, headers, safeHeaders); +corsPreflight("CORS [PUT] [several headers], server allows", corsUrl, "PUT", true, headers, safeHeaders); +corsPreflight("CORS [PUT] [several headers], server refuses", corsUrl, "PUT", false, headers, safeHeaders); + +corsPreflight("CORS [PUT] [only safe headers], server allows", corsUrl, "PUT", true, null, safeHeaders); + +promise_test(async t => { + const url = `${corsUrl}?allow_headers=*`; + await promise_rejects_js(t, TypeError, fetch(url, { + headers: { + authorization: 'foobar' + } + })); +}, '"authorization" should not be covered by the wildcard symbol'); + +promise_test(async t => { + const url = `${corsUrl}?allow_headers=authorization`; + await fetch(url, { headers: { + authorization: 'foobar' + }}); +}, '"authorization" should be covered by "authorization"'); \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/cors/cors-redirect-credentials.any.js b/test/wpt/tests/fetch/api/cors/cors-redirect-credentials.any.js new file mode 100644 index 0000000..2aff313 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-redirect-credentials.any.js @@ -0,0 +1,52 @@ +// META: timeout=long +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function corsRedirectCredentials(desc, redirectUrl, redirectLocation, redirectStatus, locationCredentials) { + var url = redirectUrl + var urlParameters = "?redirect_status=" + redirectStatus; + urlParameters += "&location=" + redirectLocation.replace("://", "://" + locationCredentials + "@"); + + var requestInit = {"mode": "cors", "redirect": "follow"}; + + promise_test(t => { + const result = fetch(url + urlParameters, requestInit) + if(locationCredentials === "") { + return result; + } else { + return promise_rejects_js(t, TypeError, result); + } + }, desc); +} + +var redirPath = dirname(location.pathname) + RESOURCES_DIR + "redirect.py"; +var preflightPath = dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; + +var host_info = get_host_info(); + +var localRedirect = host_info.HTTP_ORIGIN + redirPath; +var remoteRedirect = host_info.HTTP_ORIGIN_WITH_DIFFERENT_PORT + redirPath; + +var localLocation = host_info.HTTP_ORIGIN + preflightPath; +var remoteLocation = host_info.HTTP_ORIGIN_WITH_DIFFERENT_PORT + preflightPath; +var remoteLocation2 = host_info.HTTP_REMOTE_ORIGIN + preflightPath; + +for (var code of [301, 302, 303, 307, 308]) { + corsRedirectCredentials("Redirect " + code + " from same origin to remote without user and password", localRedirect, remoteLocation, code, ""); + + corsRedirectCredentials("Redirect " + code + " from same origin to remote with user and password", localRedirect, remoteLocation, code, "user:password"); + corsRedirectCredentials("Redirect " + code + " from same origin to remote with user", localRedirect, remoteLocation, code, "user:"); + corsRedirectCredentials("Redirect " + code + " from same origin to remote with password", localRedirect, remoteLocation, code, ":password"); + + corsRedirectCredentials("Redirect " + code + " from remote to same origin with user and password", remoteRedirect, localLocation, code, "user:password"); + corsRedirectCredentials("Redirect " + code + " from remote to same origin with user", remoteRedirect, localLocation, code, "user:"); + corsRedirectCredentials("Redirect " + code + " from remote to same origin with password", remoteRedirect, localLocation, code, ":password"); + + corsRedirectCredentials("Redirect " + code + " from remote to same remote with user and password", remoteRedirect, remoteLocation, code, "user:password"); + corsRedirectCredentials("Redirect " + code + " from remote to same remote with user", remoteRedirect, remoteLocation, code, "user:"); + corsRedirectCredentials("Redirect " + code + " from remote to same remote with password", remoteRedirect, remoteLocation, code, ":password"); + + corsRedirectCredentials("Redirect " + code + " from remote to another remote with user and password", remoteRedirect, remoteLocation2, code, "user:password"); + corsRedirectCredentials("Redirect " + code + " from remote to another remote with user", remoteRedirect, remoteLocation2, code, "user:"); + corsRedirectCredentials("Redirect " + code + " from remote to another remote with password", remoteRedirect, remoteLocation2, code, ":password"); +} diff --git a/test/wpt/tests/fetch/api/cors/cors-redirect-preflight.any.js b/test/wpt/tests/fetch/api/cors/cors-redirect-preflight.any.js new file mode 100644 index 0000000..5084817 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-redirect-preflight.any.js @@ -0,0 +1,46 @@ +// META: timeout=long +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function corsRedirect(desc, redirectUrl, redirectLocation, redirectStatus, expectSuccess) { + var urlBaseParameters = "&redirect_status=" + redirectStatus; + var urlParametersSuccess = urlBaseParameters + "&allow_headers=x-w3c&location=" + encodeURIComponent(redirectLocation + "?allow_headers=x-w3c"); + var urlParametersFailure = urlBaseParameters + "&location=" + encodeURIComponent(redirectLocation); + + var requestInit = {"mode": "cors", "redirect": "follow", "headers" : [["x-w3c", "test"]]}; + + promise_test(function(test) { + var uuid_token = token(); + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token).then(function(resp) { + return fetch(redirectUrl + "?token=" + uuid_token + "&max_age=0" + urlParametersSuccess, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "Response's status is 200"); + assert_equals(resp.headers.get("x-did-preflight"), "1", "Preflight request has been made"); + }); + }); + }, desc + " (preflight after redirection success case)"); + promise_test(function(test) { + var uuid_token = token(); + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token).then(function(resp) { + return promise_rejects_js(test, TypeError, fetch(redirectUrl + "?token=" + uuid_token + "&max_age=0" + urlParametersFailure, requestInit)); + }); + }, desc + " (preflight after redirection failure case)"); +} + +var redirPath = dirname(location.pathname) + RESOURCES_DIR + "redirect.py"; +var preflightPath = dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; + +var host_info = get_host_info(); + +var localRedirect = host_info.HTTP_ORIGIN + redirPath; +var remoteRedirect = host_info.HTTP_REMOTE_ORIGIN + redirPath; + +var localLocation = host_info.HTTP_ORIGIN + preflightPath; +var remoteLocation = host_info.HTTP_REMOTE_ORIGIN + preflightPath; +var remoteLocation2 = host_info.HTTP_ORIGIN_WITH_DIFFERENT_PORT + preflightPath; + +for (var code of [301, 302, 303, 307, 308]) { + corsRedirect("Redirect " + code + ": same origin to cors", localRedirect, remoteLocation, code); + corsRedirect("Redirect " + code + ": cors to same origin", remoteRedirect, localLocation, code); + corsRedirect("Redirect " + code + ": cors to another cors", remoteRedirect, remoteLocation2, code); +} diff --git a/test/wpt/tests/fetch/api/cors/cors-redirect.any.js b/test/wpt/tests/fetch/api/cors/cors-redirect.any.js new file mode 100644 index 0000000..cdf4097 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/cors-redirect.any.js @@ -0,0 +1,42 @@ +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function corsRedirect(desc, redirectUrl, redirectLocation, redirectStatus, expectedOrigin) { + var uuid_token = token(); + var url = redirectUrl; + var urlParameters = "?token=" + uuid_token + "&max_age=0"; + urlParameters += "&redirect_status=" + redirectStatus; + urlParameters += "&location=" + encodeURIComponent(redirectLocation); + + var requestInit = {"mode": "cors", "redirect": "follow"}; + + return promise_test(function(test) { + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token).then(function(resp) { + return fetch(url + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "Response's status is 200"); + assert_equals(resp.headers.get("x-did-preflight"), "0", "No preflight request has been made"); + assert_equals(resp.headers.get("x-origin"), expectedOrigin, "Origin is correctly set after redirect"); + }); + }); + }, desc); +} + +var redirPath = dirname(location.pathname) + RESOURCES_DIR + "redirect.py"; +var preflightPath = dirname(location.pathname) + RESOURCES_DIR + "preflight.py"; + +var host_info = get_host_info(); + +var localRedirect = host_info.HTTP_ORIGIN + redirPath; +var remoteRedirect = host_info.HTTP_REMOTE_ORIGIN + redirPath; + +var localLocation = host_info.HTTP_ORIGIN + preflightPath; +var remoteLocation = host_info.HTTP_REMOTE_ORIGIN + preflightPath; +var remoteLocation2 = host_info.HTTP_ORIGIN_WITH_DIFFERENT_PORT + preflightPath; + +for (var code of [301, 302, 303, 307, 308]) { + corsRedirect("Redirect " + code + ": cors to same cors", remoteRedirect, remoteLocation, code, location.origin); + corsRedirect("Redirect " + code + ": cors to another cors", remoteRedirect, remoteLocation2, code, "null"); + corsRedirect("Redirect " + code + ": same origin to cors", localRedirect, remoteLocation, code, location.origin); + corsRedirect("Redirect " + code + ": cors to same origin", remoteRedirect, localLocation, code, "null"); +} diff --git a/test/wpt/tests/fetch/api/cors/data-url-iframe.html b/test/wpt/tests/fetch/api/cors/data-url-iframe.html new file mode 100644 index 0000000..217baa3 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/data-url-iframe.html @@ -0,0 +1,58 @@ + + + + + + diff --git a/test/wpt/tests/fetch/api/cors/data-url-shared-worker.html b/test/wpt/tests/fetch/api/cors/data-url-shared-worker.html new file mode 100644 index 0000000..d69748a --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/data-url-shared-worker.html @@ -0,0 +1,53 @@ + + + + + diff --git a/test/wpt/tests/fetch/api/cors/data-url-worker.html b/test/wpt/tests/fetch/api/cors/data-url-worker.html new file mode 100644 index 0000000..13113e6 --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/data-url-worker.html @@ -0,0 +1,50 @@ + + + + + diff --git a/test/wpt/tests/fetch/api/cors/resources/corspreflight.js b/test/wpt/tests/fetch/api/cors/resources/corspreflight.js new file mode 100644 index 0000000..18b8f6d --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/resources/corspreflight.js @@ -0,0 +1,58 @@ +function headerNames(headers) { + let names = []; + for (let header of headers) { + names.push(header[0].toLowerCase()); + } + return names; +} + +/* + Check preflight is done + Control if server allows method and headers and check accordingly + Check control access headers added by UA (for method and headers) +*/ +function corsPreflight(desc, corsUrl, method, allowed, headers, safeHeaders) { + return promise_test(function(test) { + var uuid_token = token(); + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token).then(function(response) { + var url = corsUrl + (corsUrl.indexOf("?") === -1 ? "?" : "&"); + var urlParameters = "token=" + uuid_token + "&max_age=0"; + var requestInit = {"mode": "cors", "method": method}; + var requestHeaders = []; + if (headers) + requestHeaders.push.apply(requestHeaders, headers); + if (safeHeaders) + requestHeaders.push.apply(requestHeaders, safeHeaders); + requestInit["headers"] = requestHeaders; + + if (allowed) { + urlParameters += "&allow_methods=" + method + "&control_request_headers"; + if (headers) { + //Make the server allow the headers + urlParameters += "&allow_headers=" + headerNames(headers).join("%20%2C"); + } + return fetch(url + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "Response's status is 200"); + assert_equals(resp.headers.get("x-did-preflight"), "1", "Preflight request has been made"); + if (headers) { + var actualHeaders = resp.headers.get("x-control-request-headers").toLowerCase().split(","); + for (var i in actualHeaders) + actualHeaders[i] = actualHeaders[i].trim(); + for (var header of headers) + assert_in_array(header[0].toLowerCase(), actualHeaders, "Preflight asked permission for header: " + header); + + let accessControlAllowHeaders = headerNames(headers).sort().join(","); + assert_equals(resp.headers.get("x-control-request-headers"), accessControlAllowHeaders, "Access-Control-Allow-Headers value"); + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token); + } else { + assert_equals(resp.headers.get("x-control-request-headers"), null, "Access-Control-Request-Headers should be omitted") + } + }); + } else { + return promise_rejects_js(test, TypeError, fetch(url + urlParameters, requestInit)).then(function(){ + return fetch(RESOURCES_DIR + "clean-stash.py?token=" + uuid_token); + }); + } + }); + }, desc); +} diff --git a/test/wpt/tests/fetch/api/cors/resources/not-cors-safelisted.json b/test/wpt/tests/fetch/api/cors/resources/not-cors-safelisted.json new file mode 100644 index 0000000..945dc0f --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/resources/not-cors-safelisted.json @@ -0,0 +1,13 @@ +[ + ["accept", "\""], + ["accept", "012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678"], + ["accept-language", "\u0001"], + ["accept-language", "@"], + ["authorization", "basics"], + ["content-language", "\u0001"], + ["content-language", "@"], + ["content-type", "text/html"], + ["content-type", "text/plain; long=0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901"], + ["range", "bytes 0-"], + ["test", "hi"] +] diff --git a/test/wpt/tests/fetch/api/cors/sandboxed-iframe.html b/test/wpt/tests/fetch/api/cors/sandboxed-iframe.html new file mode 100644 index 0000000..feb9f1f --- /dev/null +++ b/test/wpt/tests/fetch/api/cors/sandboxed-iframe.html @@ -0,0 +1,14 @@ + + + + + + + diff --git a/test/wpt/tests/fetch/api/crashtests/body-window-destroy.html b/test/wpt/tests/fetch/api/crashtests/body-window-destroy.html new file mode 100644 index 0000000..646d3c5 --- /dev/null +++ b/test/wpt/tests/fetch/api/crashtests/body-window-destroy.html @@ -0,0 +1,11 @@ + + + diff --git a/test/wpt/tests/fetch/api/crashtests/request.html b/test/wpt/tests/fetch/api/crashtests/request.html new file mode 100644 index 0000000..2d21930 --- /dev/null +++ b/test/wpt/tests/fetch/api/crashtests/request.html @@ -0,0 +1,8 @@ + + + + diff --git a/test/wpt/tests/fetch/api/credentials/authentication-basic.any.js b/test/wpt/tests/fetch/api/credentials/authentication-basic.any.js new file mode 100644 index 0000000..31ccc38 --- /dev/null +++ b/test/wpt/tests/fetch/api/credentials/authentication-basic.any.js @@ -0,0 +1,17 @@ +// META: global=window,worker + +function basicAuth(desc, user, pass, mode, status) { + promise_test(function(test) { + var headers = { "Authorization": "Basic " + btoa(user + ":" + pass)}; + var requestInit = {"credentials": mode, "headers": headers}; + return fetch("../resources/authentication.py?realm=test", requestInit).then(function(resp) { + assert_equals(resp.status, status, "HTTP status is " + status); + assert_equals(resp.type , "basic", "Response's type is basic"); + }); + }, desc); +} + +basicAuth("User-added Authorization header with include mode", "user", "password", "include", 200); +basicAuth("User-added Authorization header with same-origin mode", "user", "password", "same-origin", 200); +basicAuth("User-added Authorization header with omit mode", "user", "password", "omit", 200); +basicAuth("User-added bogus Authorization header with omit mode", "notuser", "notpassword", "omit", 401); diff --git a/test/wpt/tests/fetch/api/credentials/authentication-redirection.any.js b/test/wpt/tests/fetch/api/credentials/authentication-redirection.any.js new file mode 100644 index 0000000..16656b5 --- /dev/null +++ b/test/wpt/tests/fetch/api/credentials/authentication-redirection.any.js @@ -0,0 +1,29 @@ +// META: global=window,worker +// META: script=/common/get-host-info.sub.js + +const authorizationValue = "Basic " + btoa("user:pass"); +async function getAuthorizationHeaderValue(url) +{ + const headers = { "Authorization": authorizationValue}; + const requestInit = {"headers": headers}; + const response = await fetch(url, requestInit); + return response.text(); +} + +promise_test(async test => { + const result = await getAuthorizationHeaderValue("/fetch/api/resources/dump-authorization-header.py"); + assert_equals(result, authorizationValue); +}, "getAuthorizationHeaderValue - no redirection"); + +promise_test(async test => { + result = await getAuthorizationHeaderValue("/fetch/api/resources/redirect.py?location=" + encodeURIComponent("/fetch/api/resources/dump-authorization-header.py")); + assert_equals(result, authorizationValue); + + result = await getAuthorizationHeaderValue(get_host_info().HTTPS_REMOTE_ORIGIN + "/fetch/api/resources/redirect.py?allow_headers=Authorization&location=" + encodeURIComponent(get_host_info().HTTPS_REMOTE_ORIGIN + "/fetch/api/resources/dump-authorization-header.py")); + assert_equals(result, authorizationValue); +}, "getAuthorizationHeaderValue - same origin redirection"); + +promise_test(async (test) => { + const result = await getAuthorizationHeaderValue(get_host_info().HTTPS_REMOTE_ORIGIN + "/fetch/api/resources/redirect.py?allow_headers=Authorization&location=" + encodeURIComponent(get_host_info().HTTPS_ORIGIN + "/fetch/api/resources/dump-authorization-header.py")); + assert_equals(result, "none"); +}, "getAuthorizationHeaderValue - cross origin redirection"); diff --git a/test/wpt/tests/fetch/api/credentials/cookies.any.js b/test/wpt/tests/fetch/api/credentials/cookies.any.js new file mode 100644 index 0000000..de30e47 --- /dev/null +++ b/test/wpt/tests/fetch/api/credentials/cookies.any.js @@ -0,0 +1,49 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +function cookies(desc, credentials1, credentials2 ,cookies) { + var url = RESOURCES_DIR + "top.txt" + var urlParameters = ""; + var urlCleanParameters = ""; + if (cookies) { + urlParameters +="?pipe=header(Set-Cookie,"; + urlParameters += cookies.join(",True)|header(Set-Cookie,") + ",True)"; + urlCleanParameters +="?pipe=header(Set-Cookie,"; + urlCleanParameters += cookies.join("%3B%20max-age=0,True)|header(Set-Cookie,") + "%3B%20max-age=0,True)"; + } + + var requestInit = {"credentials": credentials1} + promise_test(function(test){ + var requestInit = {"credentials": credentials1} + return fetch(url + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type , "basic", "Response's type is basic"); + //check cookies sent + return fetch(RESOURCES_DIR + "inspect-headers.py?headers=cookie" , {"credentials": credentials2}); + }).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type , "basic", "Response's type is basic"); + assert_false(resp.headers.has("Cookie") , "Cookie header is not exposed in response"); + if (credentials1 != "omit" && credentials2 != "omit") { + assert_equals(resp.headers.get("x-request-cookie") , cookies.join("; "), "Request include cookie(s)"); + } + else { + assert_false(resp.headers.has("x-request-cookie") , "Request does not have cookie(s)"); + } + //clean cookies + return fetch(url + urlCleanParameters, {"credentials": "include"}); + }).catch(function(e) { + return fetch(url + urlCleanParameters, {"credentials": "include"}).then(function() { + return Promise.reject(e); + }); + }); + }, desc); +} + +cookies("Include mode: 1 cookie", "include", "include", ["a=1"]); +cookies("Include mode: 2 cookies", "include", "include", ["b=2", "c=3"]); +cookies("Omit mode: discard cookies", "omit", "omit", ["d=4"]); +cookies("Omit mode: no cookie is stored", "omit", "include", ["e=5"]); +cookies("Omit mode: no cookie is sent", "include", "omit", ["f=6"]); +cookies("Same-origin mode: 1 cookie", "same-origin", "same-origin", ["a=1"]); +cookies("Same-origin mode: 2 cookies", "same-origin", "same-origin", ["b=2", "c=3"]); diff --git a/test/wpt/tests/fetch/api/headers/header-setcookie.any.js b/test/wpt/tests/fetch/api/headers/header-setcookie.any.js new file mode 100644 index 0000000..cafb780 --- /dev/null +++ b/test/wpt/tests/fetch/api/headers/header-setcookie.any.js @@ -0,0 +1,266 @@ +// META: title=Headers set-cookie special cases +// META: global=window,worker + +const headerList = [ + ["set-cookie", "foo=bar"], + ["Set-Cookie", "fizz=buzz; domain=example.com"], +]; + +const setCookie2HeaderList = [ + ["set-cookie2", "foo2=bar2"], + ["Set-Cookie2", "fizz2=buzz2; domain=example2.com"], +]; + +function assert_nested_array_equals(actual, expected) { + assert_equals(actual.length, expected.length, "Array length is not equal"); + for (let i = 0; i < expected.length; i++) { + assert_array_equals(actual[i], expected[i]); + } +} + +test(function () { + const headers = new Headers(headerList); + assert_equals( + headers.get("set-cookie"), + "foo=bar, fizz=buzz; domain=example.com", + ); +}, "Headers.prototype.get combines set-cookie headers in order"); + +test(function () { + const headers = new Headers(headerList); + const list = [...headers]; + assert_nested_array_equals(list, [ + ["set-cookie", "foo=bar"], + ["set-cookie", "fizz=buzz; domain=example.com"], + ]); +}, "Headers iterator does not combine set-cookie headers"); + +test(function () { + const headers = new Headers(setCookie2HeaderList); + const list = [...headers]; + assert_nested_array_equals(list, [ + ["set-cookie2", "foo2=bar2, fizz2=buzz2; domain=example2.com"], + ]); +}, "Headers iterator does not special case set-cookie2 headers"); + +test(function () { + const headers = new Headers([...headerList, ...setCookie2HeaderList]); + const list = [...headers]; + assert_nested_array_equals(list, [ + ["set-cookie", "foo=bar"], + ["set-cookie", "fizz=buzz; domain=example.com"], + ["set-cookie2", "foo2=bar2, fizz2=buzz2; domain=example2.com"], + ]); +}, "Headers iterator does not combine set-cookie & set-cookie2 headers"); + +test(function () { + // Values are in non alphabetic order, and the iterator should yield in the + // headers in the exact order of the input. + const headers = new Headers([ + ["set-cookie", "z=z"], + ["set-cookie", "a=a"], + ["set-cookie", "n=n"], + ]); + const list = [...headers]; + assert_nested_array_equals(list, [ + ["set-cookie", "z=z"], + ["set-cookie", "a=a"], + ["set-cookie", "n=n"], + ]); +}, "Headers iterator preserves set-cookie ordering"); + +test( + function () { + const headers = new Headers([ + ["xylophone-header", "1"], + ["best-header", "2"], + ["set-cookie", "3"], + ["a-cool-header", "4"], + ["set-cookie", "5"], + ["a-cool-header", "6"], + ["best-header", "7"], + ]); + const list = [...headers]; + assert_nested_array_equals(list, [ + ["a-cool-header", "4, 6"], + ["best-header", "2, 7"], + ["set-cookie", "3"], + ["set-cookie", "5"], + ["xylophone-header", "1"], + ]); + }, + "Headers iterator preserves per header ordering, but sorts keys alphabetically", +); + +test( + function () { + const headers = new Headers([ + ["xylophone-header", "7"], + ["best-header", "6"], + ["set-cookie", "5"], + ["a-cool-header", "4"], + ["set-cookie", "3"], + ["a-cool-header", "2"], + ["best-header", "1"], + ]); + const list = [...headers]; + assert_nested_array_equals(list, [ + ["a-cool-header", "4, 2"], + ["best-header", "6, 1"], + ["set-cookie", "5"], + ["set-cookie", "3"], + ["xylophone-header", "7"], + ]); + }, + "Headers iterator preserves per header ordering, but sorts keys alphabetically (and ignores value ordering)", +); + +test(function () { + const headers = new Headers([["fizz", "buzz"], ["X-Header", "test"]]); + const iterator = headers[Symbol.iterator](); + assert_array_equals(iterator.next().value, ["fizz", "buzz"]); + headers.append("Set-Cookie", "a=b"); + assert_array_equals(iterator.next().value, ["set-cookie", "a=b"]); + headers.append("Accept", "text/html"); + assert_array_equals(iterator.next().value, ["set-cookie", "a=b"]); + assert_array_equals(iterator.next().value, ["x-header", "test"]); + headers.append("set-cookie", "c=d"); + assert_array_equals(iterator.next().value, ["x-header", "test"]); + assert_true(iterator.next().done); +}, "Headers iterator is correctly updated with set-cookie changes"); + +test(function () { + const headers = new Headers([ + ["set-cookie", "a"], + ["set-cookie", "b"], + ["set-cookie", "c"] + ]); + const iterator = headers[Symbol.iterator](); + assert_array_equals(iterator.next().value, ["set-cookie", "a"]); + headers.delete("set-cookie"); + headers.append("set-cookie", "d"); + headers.append("set-cookie", "e"); + headers.append("set-cookie", "f"); + assert_array_equals(iterator.next().value, ["set-cookie", "e"]); + assert_array_equals(iterator.next().value, ["set-cookie", "f"]); + assert_true(iterator.next().done); +}, "Headers iterator is correctly updated with set-cookie changes #2"); + +test(function () { + const headers = new Headers(headerList); + assert_true(headers.has("sEt-cOoKiE")); +}, "Headers.prototype.has works for set-cookie"); + +test(function () { + const headers = new Headers(setCookie2HeaderList); + headers.append("set-Cookie", "foo=bar"); + headers.append("sEt-cOoKiE", "fizz=buzz"); + const list = [...headers]; + assert_nested_array_equals(list, [ + ["set-cookie", "foo=bar"], + ["set-cookie", "fizz=buzz"], + ["set-cookie2", "foo2=bar2, fizz2=buzz2; domain=example2.com"], + ]); +}, "Headers.prototype.append works for set-cookie"); + +test(function () { + const headers = new Headers(headerList); + headers.set("set-cookie", "foo2=bar2"); + const list = [...headers]; + assert_nested_array_equals(list, [ + ["set-cookie", "foo2=bar2"], + ]); +}, "Headers.prototype.set works for set-cookie"); + +test(function () { + const headers = new Headers(headerList); + headers.delete("set-Cookie"); + const list = [...headers]; + assert_nested_array_equals(list, []); +}, "Headers.prototype.delete works for set-cookie"); + +test(function () { + const headers = new Headers(); + assert_array_equals(headers.getSetCookie(), []); +}, "Headers.prototype.getSetCookie with no headers present"); + +test(function () { + const headers = new Headers([headerList[0]]); + assert_array_equals(headers.getSetCookie(), ["foo=bar"]); +}, "Headers.prototype.getSetCookie with one header"); + +test(function () { + const headers = new Headers({ "Set-Cookie": "foo=bar" }); + assert_array_equals(headers.getSetCookie(), ["foo=bar"]); +}, "Headers.prototype.getSetCookie with one header created from an object"); + +test(function () { + const headers = new Headers(headerList); + assert_array_equals(headers.getSetCookie(), [ + "foo=bar", + "fizz=buzz; domain=example.com", + ]); +}, "Headers.prototype.getSetCookie with multiple headers"); + +test(function () { + const headers = new Headers([["set-cookie", ""]]); + assert_array_equals(headers.getSetCookie(), [""]); +}, "Headers.prototype.getSetCookie with an empty header"); + +test(function () { + const headers = new Headers([["set-cookie", "x"], ["set-cookie", "x"]]); + assert_array_equals(headers.getSetCookie(), ["x", "x"]); +}, "Headers.prototype.getSetCookie with two equal headers"); + +test(function () { + const headers = new Headers([ + ["set-cookie2", "x"], + ["set-cookie", "y"], + ["set-cookie2", "z"], + ]); + assert_array_equals(headers.getSetCookie(), ["y"]); +}, "Headers.prototype.getSetCookie ignores set-cookie2 headers"); + +test(function () { + // Values are in non alphabetic order, and the iterator should yield in the + // headers in the exact order of the input. + const headers = new Headers([ + ["set-cookie", "z=z"], + ["set-cookie", "a=a"], + ["set-cookie", "n=n"], + ]); + assert_array_equals(headers.getSetCookie(), ["z=z", "a=a", "n=n"]); +}, "Headers.prototype.getSetCookie preserves header ordering"); + +test(function () { + const headers = new Headers({"Set-Cookie": " a=b\n"}); + headers.append("set-cookie", "\n\rc=d "); + assert_nested_array_equals([...headers], [ + ["set-cookie", "a=b"], + ["set-cookie", "c=d"] + ]); + headers.set("set-cookie", "\te=f "); + assert_nested_array_equals([...headers], [["set-cookie", "e=f"]]); +}, "Adding Set-Cookie headers normalizes their value"); + +test(function () { + assert_throws_js(TypeError, () => { + new Headers({"set-cookie": "\0"}); + }); + + const headers = new Headers(); + assert_throws_js(TypeError, () => { + headers.append("Set-Cookie", "a\nb"); + }); + assert_throws_js(TypeError, () => { + headers.set("Set-Cookie", "a\rb"); + }); +}, "Adding invalid Set-Cookie headers throws"); + +test(function () { + const response = new Response(); + response.headers.append("Set-Cookie", "foo=bar"); + assert_array_equals(response.headers.getSetCookie(), []); + response.headers.append("sEt-cOokIe", "bar=baz"); + assert_array_equals(response.headers.getSetCookie(), []); +}, "Set-Cookie is a forbidden response header"); diff --git a/test/wpt/tests/fetch/api/headers/header-values-normalize.any.js b/test/wpt/tests/fetch/api/headers/header-values-normalize.any.js new file mode 100644 index 0000000..5710554 --- /dev/null +++ b/test/wpt/tests/fetch/api/headers/header-values-normalize.any.js @@ -0,0 +1,72 @@ +// META: title=Header value normalizing test +// META: global=window,worker +// META: timeout=long + +"use strict"; + +for(let i = 0; i < 0x21; i++) { + let fail = false, + strip = false + + // REMOVE 0x0B/0x0C exception once https://github.com/web-platform-tests/wpt/issues/8372 is fixed + if(i === 0x0B || i === 0x0C) + continue + + if(i === 0) { + fail = true + } + + if(i === 0x09 || i === 0x0A || i === 0x0D || i === 0x20) { + strip = true + } + + let url = "../resources/inspect-headers.py?headers=val1|val2|val3", + val = String.fromCharCode(i), + expectedVal = strip ? "" : val, + val1 = val, + expectedVal1 = expectedVal, + val2 = "x" + val, + expectedVal2 = "x" + expectedVal, + val3 = val + "x", + expectedVal3 = expectedVal + "x" + + // XMLHttpRequest is not available in service workers + if (!self.GLOBAL.isWorker()) { + async_test((t) => { + let xhr = new XMLHttpRequest() + xhr.open("POST", url) + if(fail) { + assert_throws_dom("SyntaxError", () => xhr.setRequestHeader("val1", val1)) + assert_throws_dom("SyntaxError", () => xhr.setRequestHeader("val2", val2)) + assert_throws_dom("SyntaxError", () => xhr.setRequestHeader("val3", val3)) + t.done() + } else { + xhr.setRequestHeader("val1", val1) + xhr.setRequestHeader("val2", val2) + xhr.setRequestHeader("val3", val3) + xhr.onload = t.step_func_done(() => { + assert_equals(xhr.getResponseHeader("x-request-val1"), expectedVal1) + assert_equals(xhr.getResponseHeader("x-request-val2"), expectedVal2) + assert_equals(xhr.getResponseHeader("x-request-val3"), expectedVal3) + }) + xhr.send() + } + }, "XMLHttpRequest with value " + encodeURI(val)) + } + + promise_test((t) => { + if(fail) { + return Promise.all([ + promise_rejects_js(t, TypeError, fetch(url, { headers: {"val1": val1} })), + promise_rejects_js(t, TypeError, fetch(url, { headers: {"val2": val2} })), + promise_rejects_js(t, TypeError, fetch(url, { headers: {"val3": val3} })) + ]) + } else { + return fetch(url, { headers: {"val1": val1, "val2": val2, "val3": val3} }).then((res) => { + assert_equals(res.headers.get("x-request-val1"), expectedVal1) + assert_equals(res.headers.get("x-request-val2"), expectedVal2) + assert_equals(res.headers.get("x-request-val3"), expectedVal3) + }) + } + }, "fetch() with value " + encodeURI(val)) +} diff --git a/test/wpt/tests/fetch/api/headers/header-values.any.js b/test/wpt/tests/fetch/api/headers/header-values.any.js new file mode 100644 index 0000000..bb7570c --- /dev/null +++ b/test/wpt/tests/fetch/api/headers/header-values.any.js @@ -0,0 +1,63 @@ +// META: title=Header value test +// META: global=window,worker +// META: timeout=long + +"use strict"; + +// Invalid values +[0, 0x0A, 0x0D].forEach(val => { + val = "x" + String.fromCharCode(val) + "x" + + // XMLHttpRequest is not available in service workers + if (!self.GLOBAL.isWorker()) { + test(() => { + let xhr = new XMLHttpRequest() + xhr.open("POST", "/") + assert_throws_dom("SyntaxError", () => xhr.setRequestHeader("value-test", val)) + }, "XMLHttpRequest with value " + encodeURI(val) + " needs to throw") + } + + promise_test(t => promise_rejects_js(t, TypeError, fetch("/", { headers: {"value-test": val} })), "fetch() with value " + encodeURI(val) + " needs to throw") +}) + +// Valid values +let headerValues =[] +for(let i = 0; i < 0x100; i++) { + if(i === 0 || i === 0x0A || i === 0x0D) { + continue + } + headerValues.push("x" + String.fromCharCode(i) + "x") +} +var url = "../resources/inspect-headers.py?headers=" +headerValues.forEach((_, i) => { + url += "val" + i + "|" +}) + +// XMLHttpRequest is not available in service workers +if (!self.GLOBAL.isWorker()) { + async_test((t) => { + let xhr = new XMLHttpRequest() + xhr.open("POST", url) + headerValues.forEach((val, i) => { + xhr.setRequestHeader("val" + i, val) + }) + xhr.onload = t.step_func_done(() => { + headerValues.forEach((val, i) => { + assert_equals(xhr.getResponseHeader("x-request-val" + i), val) + }) + }) + xhr.send() + }, "XMLHttpRequest with all valid values") +} + +promise_test((t) => { + const headers = new Headers + headerValues.forEach((val, i) => { + headers.append("val" + i, val) + }) + return fetch(url, { headers }).then((res) => { + headerValues.forEach((val, i) => { + assert_equals(res.headers.get("x-request-val" + i), val) + }) + }) +}, "fetch() with all valid values") diff --git a/test/wpt/tests/fetch/api/headers/headers-basic.any.js b/test/wpt/tests/fetch/api/headers/headers-basic.any.js new file mode 100644 index 0000000..ead1047 --- /dev/null +++ b/test/wpt/tests/fetch/api/headers/headers-basic.any.js @@ -0,0 +1,275 @@ +// META: title=Headers structure +// META: global=window,worker + +"use strict"; + +test(function() { + new Headers(); +}, "Create headers from no parameter"); + +test(function() { + new Headers(undefined); +}, "Create headers from undefined parameter"); + +test(function() { + new Headers({}); +}, "Create headers from empty object"); + +var parameters = [null, 1]; +parameters.forEach(function(parameter) { + test(function() { + assert_throws_js(TypeError, function() { new Headers(parameter) }); + }, "Create headers with " + parameter + " should throw"); +}); + +var headerDict = {"name1": "value1", + "name2": "value2", + "name3": "value3", + "name4": null, + "name5": undefined, + "name6": 1, + "Content-Type": "value4" +}; + +var headerSeq = []; +for (var name in headerDict) + headerSeq.push([name, headerDict[name]]); + +test(function() { + var headers = new Headers(headerSeq); + for (name in headerDict) { + assert_equals(headers.get(name), String(headerDict[name]), + "name: " + name + " has value: " + headerDict[name]); + } + assert_equals(headers.get("length"), null, "init should be treated as a sequence, not as a dictionary"); +}, "Create headers with sequence"); + +test(function() { + var headers = new Headers(headerDict); + for (name in headerDict) { + assert_equals(headers.get(name), String(headerDict[name]), + "name: " + name + " has value: " + headerDict[name]); + } +}, "Create headers with record"); + +test(function() { + var headers = new Headers(headerDict); + var headers2 = new Headers(headers); + for (name in headerDict) { + assert_equals(headers2.get(name), String(headerDict[name]), + "name: " + name + " has value: " + headerDict[name]); + } +}, "Create headers with existing headers"); + +test(function() { + var headers = new Headers() + headers[Symbol.iterator] = function *() { + yield ["test", "test"] + } + var headers2 = new Headers(headers) + assert_equals(headers2.get("test"), "test") +}, "Create headers with existing headers with custom iterator"); + +test(function() { + var headers = new Headers(); + for (name in headerDict) { + headers.append(name, headerDict[name]); + assert_equals(headers.get(name), String(headerDict[name]), + "name: " + name + " has value: " + headerDict[name]); + } +}, "Check append method"); + +test(function() { + var headers = new Headers(); + for (name in headerDict) { + headers.set(name, headerDict[name]); + assert_equals(headers.get(name), String(headerDict[name]), + "name: " + name + " has value: " + headerDict[name]); + } +}, "Check set method"); + +test(function() { + var headers = new Headers(headerDict); + for (name in headerDict) + assert_true(headers.has(name),"headers has name " + name); + + assert_false(headers.has("nameNotInHeaders"),"headers do not have header: nameNotInHeaders"); +}, "Check has method"); + +test(function() { + var headers = new Headers(headerDict); + for (name in headerDict) { + assert_true(headers.has(name),"headers have a header: " + name); + headers.delete(name) + assert_true(!headers.has(name),"headers do not have anymore a header: " + name); + } +}, "Check delete method"); + +test(function() { + var headers = new Headers(headerDict); + for (name in headerDict) + assert_equals(headers.get(name), String(headerDict[name]), + "name: " + name + " has value: " + headerDict[name]); + + assert_equals(headers.get("nameNotInHeaders"), null, "header: nameNotInHeaders has no value"); +}, "Check get method"); + +var headerEntriesDict = {"name1": "value1", + "Name2": "value2", + "name": "value3", + "content-Type": "value4", + "Content-Typ": "value5", + "Content-Types": "value6" +}; +var sortedHeaderDict = {}; +var headerValues = []; +var sortedHeaderKeys = Object.keys(headerEntriesDict).map(function(value) { + sortedHeaderDict[value.toLowerCase()] = headerEntriesDict[value]; + headerValues.push(headerEntriesDict[value]); + return value.toLowerCase(); +}).sort(); + +var iteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())); +function checkIteratorProperties(iterator) { + var prototype = Object.getPrototypeOf(iterator); + assert_equals(Object.getPrototypeOf(prototype), iteratorPrototype); + + var descriptor = Object.getOwnPropertyDescriptor(prototype, "next"); + assert_true(descriptor.configurable, "configurable"); + assert_true(descriptor.enumerable, "enumerable"); + assert_true(descriptor.writable, "writable"); +} + +test(function() { + var headers = new Headers(headerEntriesDict); + var actual = headers.keys(); + checkIteratorProperties(actual); + + sortedHeaderKeys.forEach(function(key) { + const entry = actual.next(); + assert_false(entry.done); + assert_equals(entry.value, key); + }); + assert_true(actual.next().done); + assert_true(actual.next().done); + + for (const key of headers.keys()) + assert_true(sortedHeaderKeys.indexOf(key) != -1); +}, "Check keys method"); + +test(function() { + var headers = new Headers(headerEntriesDict); + var actual = headers.values(); + checkIteratorProperties(actual); + + sortedHeaderKeys.forEach(function(key) { + const entry = actual.next(); + assert_false(entry.done); + assert_equals(entry.value, sortedHeaderDict[key]); + }); + assert_true(actual.next().done); + assert_true(actual.next().done); + + for (const value of headers.values()) + assert_true(headerValues.indexOf(value) != -1); +}, "Check values method"); + +test(function() { + var headers = new Headers(headerEntriesDict); + var actual = headers.entries(); + checkIteratorProperties(actual); + + sortedHeaderKeys.forEach(function(key) { + const entry = actual.next(); + assert_false(entry.done); + assert_equals(entry.value[0], key); + assert_equals(entry.value[1], sortedHeaderDict[key]); + }); + assert_true(actual.next().done); + assert_true(actual.next().done); + + for (const entry of headers.entries()) + assert_equals(entry[1], sortedHeaderDict[entry[0]]); +}, "Check entries method"); + +test(function() { + var headers = new Headers(headerEntriesDict); + var actual = headers[Symbol.iterator](); + + sortedHeaderKeys.forEach(function(key) { + const entry = actual.next(); + assert_false(entry.done); + assert_equals(entry.value[0], key); + assert_equals(entry.value[1], sortedHeaderDict[key]); + }); + assert_true(actual.next().done); + assert_true(actual.next().done); +}, "Check Symbol.iterator method"); + +test(function() { + var headers = new Headers(headerEntriesDict); + var reference = sortedHeaderKeys[Symbol.iterator](); + headers.forEach(function(value, key, container) { + assert_equals(headers, container); + const entry = reference.next(); + assert_false(entry.done); + assert_equals(key, entry.value); + assert_equals(value, sortedHeaderDict[entry.value]); + }); + assert_true(reference.next().done); +}, "Check forEach method"); + +test(() => { + const headers = new Headers({"foo": "2", "baz": "1", "BAR": "0"}); + const actualKeys = []; + const actualValues = []; + for (const [header, value] of headers) { + actualKeys.push(header); + actualValues.push(value); + headers.delete("foo"); + } + assert_array_equals(actualKeys, ["bar", "baz"]); + assert_array_equals(actualValues, ["0", "1"]); +}, "Iteration skips elements removed while iterating"); + +test(() => { + const headers = new Headers({"foo": "2", "baz": "1", "BAR": "0", "quux": "3"}); + const actualKeys = []; + const actualValues = []; + for (const [header, value] of headers) { + actualKeys.push(header); + actualValues.push(value); + if (header === "baz") + headers.delete("bar"); + } + assert_array_equals(actualKeys, ["bar", "baz", "quux"]); + assert_array_equals(actualValues, ["0", "1", "3"]); +}, "Removing elements already iterated over causes an element to be skipped during iteration"); + +test(() => { + const headers = new Headers({"foo": "2", "baz": "1", "BAR": "0", "quux": "3"}); + const actualKeys = []; + const actualValues = []; + for (const [header, value] of headers) { + actualKeys.push(header); + actualValues.push(value); + if (header === "baz") + headers.append("X-yZ", "4"); + } + assert_array_equals(actualKeys, ["bar", "baz", "foo", "quux", "x-yz"]); + assert_array_equals(actualValues, ["0", "1", "2", "3", "4"]); +}, "Appending a value pair during iteration causes it to be reached during iteration"); + +test(() => { + const headers = new Headers({"foo": "2", "baz": "1", "BAR": "0", "quux": "3"}); + const actualKeys = []; + const actualValues = []; + for (const [header, value] of headers) { + actualKeys.push(header); + actualValues.push(value); + if (header === "baz") + headers.append("abc", "-1"); + } + assert_array_equals(actualKeys, ["bar", "baz", "baz", "foo", "quux"]); + assert_array_equals(actualValues, ["0", "1", "1", "2", "3"]); +}, "Prepending a value pair before the current element position causes it to be skipped during iteration and adds the current element a second time"); diff --git a/test/wpt/tests/fetch/api/headers/headers-casing.any.js b/test/wpt/tests/fetch/api/headers/headers-casing.any.js new file mode 100644 index 0000000..20b8a9d --- /dev/null +++ b/test/wpt/tests/fetch/api/headers/headers-casing.any.js @@ -0,0 +1,54 @@ +// META: title=Headers case management +// META: global=window,worker + +"use strict"; + +var headerDictCase = {"UPPERCASE": "value1", + "lowercase": "value2", + "mixedCase": "value3", + "Content-TYPE": "value4" + }; + +function checkHeadersCase(originalName, headersToCheck, expectedDict) { + var lowCaseName = originalName.toLowerCase(); + var upCaseName = originalName.toUpperCase(); + var expectedValue = expectedDict[originalName]; + assert_equals(headersToCheck.get(originalName), expectedValue, + "name: " + originalName + " has value: " + expectedValue); + assert_equals(headersToCheck.get(lowCaseName), expectedValue, + "name: " + lowCaseName + " has value: " + expectedValue); + assert_equals(headersToCheck.get(upCaseName), expectedValue, + "name: " + upCaseName + " has value: " + expectedValue); +} + +test(function() { + var headers = new Headers(headerDictCase); + for (const name in headerDictCase) + checkHeadersCase(name, headers, headerDictCase) +}, "Create headers, names use characters with different case"); + +test(function() { + var headers = new Headers(); + for (const name in headerDictCase) { + headers.append(name, headerDictCase[name]); + checkHeadersCase(name, headers, headerDictCase); + } +}, "Check append method, names use characters with different case"); + +test(function() { + var headers = new Headers(); + for (const name in headerDictCase) { + headers.set(name, headerDictCase[name]); + checkHeadersCase(name, headers, headerDictCase); + } +}, "Check set method, names use characters with different case"); + +test(function() { + var headers = new Headers(); + for (const name in headerDictCase) + headers.set(name, headerDictCase[name]); + for (const name in headerDictCase) + headers.delete(name.toLowerCase()); + for (const name in headerDictCase) + assert_false(headers.has(name), "header " + name + " should have been deleted"); +}, "Check delete method, names use characters with different case"); diff --git a/test/wpt/tests/fetch/api/headers/headers-combine.any.js b/test/wpt/tests/fetch/api/headers/headers-combine.any.js new file mode 100644 index 0000000..4f3b6d1 --- /dev/null +++ b/test/wpt/tests/fetch/api/headers/headers-combine.any.js @@ -0,0 +1,66 @@ +// META: title=Headers have combined (and sorted) values +// META: global=window,worker + +"use strict"; + +var headerSeqCombine = [["single", "singleValue"], + ["double", "doubleValue1"], + ["double", "doubleValue2"], + ["triple", "tripleValue1"], + ["triple", "tripleValue2"], + ["triple", "tripleValue3"] +]; +var expectedDict = {"single": "singleValue", + "double": "doubleValue1, doubleValue2", + "triple": "tripleValue1, tripleValue2, tripleValue3" +}; + +test(function() { + var headers = new Headers(headerSeqCombine); + for (const name in expectedDict) + assert_equals(headers.get(name), expectedDict[name]); +}, "Create headers using same name for different values"); + +test(function() { + var headers = new Headers(headerSeqCombine); + for (const name in expectedDict) { + assert_true(headers.has(name), "name: " + name + " has value(s)"); + headers.delete(name); + assert_false(headers.has(name), "name: " + name + " has no value(s) anymore"); + } +}, "Check delete and has methods when using same name for different values"); + +test(function() { + var headers = new Headers(headerSeqCombine); + for (const name in expectedDict) { + headers.set(name,"newSingleValue"); + assert_equals(headers.get(name), "newSingleValue", "name: " + name + " has value: newSingleValue"); + } +}, "Check set methods when called with already used name"); + +test(function() { + var headers = new Headers(headerSeqCombine); + for (const name in expectedDict) { + var value = headers.get(name); + headers.append(name,"newSingleValue"); + assert_equals(headers.get(name), (value + ", " + "newSingleValue")); + } +}, "Check append methods when called with already used name"); + +test(() => { + const headers = new Headers([["1", "a"],["1", "b"]]); + for(let header of headers) { + assert_array_equals(header, ["1", "a, b"]); + } +}, "Iterate combined values"); + +test(() => { + const headers = new Headers([["2", "a"], ["1", "b"], ["2", "b"]]), + expected = [["1", "b"], ["2", "a, b"]]; + let i = 0; + for(let header of headers) { + assert_array_equals(header, expected[i]); + i++; + } + assert_equals(i, 2); +}, "Iterate combined values in sorted order") diff --git a/test/wpt/tests/fetch/api/headers/headers-errors.any.js b/test/wpt/tests/fetch/api/headers/headers-errors.any.js new file mode 100644 index 0000000..82dadd8 --- /dev/null +++ b/test/wpt/tests/fetch/api/headers/headers-errors.any.js @@ -0,0 +1,96 @@ +// META: title=Headers errors +// META: global=window,worker + +"use strict"; + +test(function() { + assert_throws_js(TypeError, function() { new Headers([["name"]]); }); +}, "Create headers giving an array having one string as init argument"); + +test(function() { + assert_throws_js(TypeError, function() { new Headers([["invalid", "invalidValue1", "invalidValue2"]]); }); +}, "Create headers giving an array having three strings as init argument"); + +test(function() { + assert_throws_js(TypeError, function() { new Headers([["invalidÄ€", "Value1"]]); }); +}, "Create headers giving bad header name as init argument"); + +test(function() { + assert_throws_js(TypeError, function() { new Headers([["name", "invalidValueÄ€"]]); }); +}, "Create headers giving bad header value as init argument"); + +var badNames = ["invalidÄ€", {}]; +var badValues = ["invalidÄ€"]; + +badNames.forEach(function(name) { + test(function() { + var headers = new Headers(); + assert_throws_js(TypeError, function() { headers.get(name); }); + }, "Check headers get with an invalid name " + name); +}); + +badNames.forEach(function(name) { + test(function() { + var headers = new Headers(); + assert_throws_js(TypeError, function() { headers.delete(name); }); + }, "Check headers delete with an invalid name " + name); +}); + +badNames.forEach(function(name) { + test(function() { + var headers = new Headers(); + assert_throws_js(TypeError, function() { headers.has(name); }); + }, "Check headers has with an invalid name " + name); +}); + +badNames.forEach(function(name) { + test(function() { + var headers = new Headers(); + assert_throws_js(TypeError, function() { headers.set(name, "Value1"); }); + }, "Check headers set with an invalid name " + name); +}); + +badValues.forEach(function(value) { + test(function() { + var headers = new Headers(); + assert_throws_js(TypeError, function() { headers.set("name", value); }); + }, "Check headers set with an invalid value " + value); +}); + +badNames.forEach(function(name) { + test(function() { + var headers = new Headers(); + assert_throws_js(TypeError, function() { headers.append("invalidÄ€", "Value1"); }); + }, "Check headers append with an invalid name " + name); +}); + +badValues.forEach(function(value) { + test(function() { + var headers = new Headers(); + assert_throws_js(TypeError, function() { headers.append("name", value); }); + }, "Check headers append with an invalid value " + value); +}); + +test(function() { + var headers = new Headers([["name", "value"]]); + assert_throws_js(TypeError, function() { headers.forEach(); }); + assert_throws_js(TypeError, function() { headers.forEach(undefined); }); + assert_throws_js(TypeError, function() { headers.forEach(1); }); +}, "Headers forEach throws if argument is not callable"); + +test(function() { + var headers = new Headers([["name1", "value1"], ["name2", "value2"], ["name3", "value3"]]); + var counter = 0; + try { + headers.forEach(function(value, name) { + counter++; + if (name == "name2") + throw "error"; + }); + } catch (e) { + assert_equals(counter, 2); + assert_equals(e, "error"); + return; + } + assert_unreached(); +}, "Headers forEach loop should stop if callback is throwing exception"); diff --git a/test/wpt/tests/fetch/api/headers/headers-no-cors.any.js b/test/wpt/tests/fetch/api/headers/headers-no-cors.any.js new file mode 100644 index 0000000..60dbb9e --- /dev/null +++ b/test/wpt/tests/fetch/api/headers/headers-no-cors.any.js @@ -0,0 +1,59 @@ +// META: global=window,worker + +"use strict"; + +promise_test(() => fetch("../cors/resources/not-cors-safelisted.json").then(res => res.json().then(runTests)), "Loading data…"); + +const longValue = "s".repeat(127); + +[ + { + "headers": ["accept", "accept-language", "content-language"], + "values": [longValue, "", longValue] + }, + { + "headers": ["accept", "accept-language", "content-language"], + "values": ["", longValue] + }, + { + "headers": ["content-type"], + "values": ["text/plain;" + "s".repeat(116), "text/plain"] + } +].forEach(testItem => { + testItem.headers.forEach(header => { + test(() => { + const noCorsHeaders = new Request("about:blank", { mode: "no-cors" }).headers; + testItem.values.forEach((value) => { + noCorsHeaders.append(header, value); + assert_equals(noCorsHeaders.get(header), testItem.values[0], '1'); + }); + noCorsHeaders.set(header, testItem.values.join(", ")); + assert_equals(noCorsHeaders.get(header), testItem.values[0], '2'); + noCorsHeaders.delete(header); + assert_false(noCorsHeaders.has(header)); + }, "\"no-cors\" Headers object cannot have " + header + " set to " + testItem.values.join(", ")); + }); +}); + +function runTests(testArray) { + testArray = testArray.concat([ + ["dpr", "2"], + ["rtt", "1.0"], + ["downlink", "-1.0"], + ["ect", "6g"], + ["save-data", "on"], + ["viewport-width", "100"], + ["width", "100"], + ["unknown", "doesitmatter"] + ]); + testArray.forEach(testItem => { + const [headerName, headerValue] = testItem; + test(() => { + const noCorsHeaders = new Request("about:blank", { mode: "no-cors" }).headers; + noCorsHeaders.append(headerName, headerValue); + assert_false(noCorsHeaders.has(headerName)); + noCorsHeaders.set(headerName, headerValue); + assert_false(noCorsHeaders.has(headerName)); + }, "\"no-cors\" Headers object cannot have " + headerName + "/" + headerValue + " as header"); + }); +} diff --git a/test/wpt/tests/fetch/api/headers/headers-normalize.any.js b/test/wpt/tests/fetch/api/headers/headers-normalize.any.js new file mode 100644 index 0000000..68cf5b8 --- /dev/null +++ b/test/wpt/tests/fetch/api/headers/headers-normalize.any.js @@ -0,0 +1,56 @@ +// META: title=Headers normalize values +// META: global=window,worker + +"use strict"; + +const expectations = { + "name1": [" space ", "space"], + "name2": ["\ttab\t", "tab"], + "name3": [" spaceAndTab\t", "spaceAndTab"], + "name4": ["\r\n newLine", "newLine"], //obs-fold cases + "name5": ["newLine\r\n ", "newLine"], + "name6": ["\r\n\tnewLine", "newLine"], + "name7": ["\t\f\tnewLine\n", "\f\tnewLine"], + "name8": ["newLine\xa0", "newLine\xa0"], // \xa0 == non breaking space +}; + +test(function () { + const headerDict = Object.fromEntries( + Object.entries(expectations).map(([name, [actual]]) => [name, actual]), + ); + var headers = new Headers(headerDict); + for (const name in expectations) { + const expected = expectations[name][1]; + assert_equals( + headers.get(name), + expected, + "name: " + name + " has normalized value: " + expected, + ); + } +}, "Create headers with not normalized values"); + +test(function () { + var headers = new Headers(); + for (const name in expectations) { + headers.append(name, expectations[name][0]); + const expected = expectations[name][1]; + assert_equals( + headers.get(name), + expected, + "name: " + name + " has value: " + expected, + ); + } +}, "Check append method with not normalized values"); + +test(function () { + var headers = new Headers(); + for (const name in expectations) { + headers.set(name, expectations[name][0]); + const expected = expectations[name][1]; + assert_equals( + headers.get(name), + expected, + "name: " + name + " has value: " + expected, + ); + } +}, "Check set method with not normalized values"); diff --git a/test/wpt/tests/fetch/api/headers/headers-record.any.js b/test/wpt/tests/fetch/api/headers/headers-record.any.js new file mode 100644 index 0000000..fa85391 --- /dev/null +++ b/test/wpt/tests/fetch/api/headers/headers-record.any.js @@ -0,0 +1,357 @@ +// META: global=window,worker + +"use strict"; + +var log = []; +function clearLog() { + log = []; +} +function addLogEntry(name, args) { + log.push([ name, ...args ]); +} + +var loggingHandler = { +}; + +setup(function() { + for (let prop of Object.getOwnPropertyNames(Reflect)) { + loggingHandler[prop] = function(...args) { + addLogEntry(prop, args); + return Reflect[prop](...args); + } + } +}); + +test(function() { + var h = new Headers(); + assert_equals([...h].length, 0); +}, "Passing nothing to Headers constructor"); + +test(function() { + var h = new Headers(undefined); + assert_equals([...h].length, 0); +}, "Passing undefined to Headers constructor"); + +test(function() { + assert_throws_js(TypeError, function() { + var h = new Headers(null); + }); +}, "Passing null to Headers constructor"); + +test(function() { + this.add_cleanup(clearLog); + var record = { a: "b" }; + var proxy = new Proxy(record, loggingHandler); + var h = new Headers(proxy); + + assert_equals(log.length, 4); + // The first thing is the [[Get]] of Symbol.iterator to figure out whether + // we're a sequence, during overload resolution. + assert_array_equals(log[0], ["get", record, Symbol.iterator, proxy]); + // Then we have the [[OwnPropertyKeys]] from + // https://webidl.spec.whatwg.org/#es-to-record step 4. + assert_array_equals(log[1], ["ownKeys", record]); + // Then the [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[2], ["getOwnPropertyDescriptor", record, "a"]); + // Then the [[Get]] from step 5.2. + assert_array_equals(log[3], ["get", record, "a", proxy]); + + // Check the results. + assert_equals([...h].length, 1); + assert_array_equals([...h.keys()], ["a"]); + assert_true(h.has("a")); + assert_equals(h.get("a"), "b"); +}, "Basic operation with one property"); + +test(function() { + this.add_cleanup(clearLog); + var recordProto = { c: "d" }; + var record = Object.create(recordProto, { a: { value: "b", enumerable: true } }); + var proxy = new Proxy(record, loggingHandler); + var h = new Headers(proxy); + + assert_equals(log.length, 4); + // The first thing is the [[Get]] of Symbol.iterator to figure out whether + // we're a sequence, during overload resolution. + assert_array_equals(log[0], ["get", record, Symbol.iterator, proxy]); + // Then we have the [[OwnPropertyKeys]] from + // https://webidl.spec.whatwg.org/#es-to-record step 4. + assert_array_equals(log[1], ["ownKeys", record]); + // Then the [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[2], ["getOwnPropertyDescriptor", record, "a"]); + // Then the [[Get]] from step 5.2. + assert_array_equals(log[3], ["get", record, "a", proxy]); + + // Check the results. + assert_equals([...h].length, 1); + assert_array_equals([...h.keys()], ["a"]); + assert_true(h.has("a")); + assert_equals(h.get("a"), "b"); +}, "Basic operation with one property and a proto"); + +test(function() { + this.add_cleanup(clearLog); + var record = { a: "b", c: "d" }; + var proxy = new Proxy(record, loggingHandler); + var h = new Headers(proxy); + + assert_equals(log.length, 6); + // The first thing is the [[Get]] of Symbol.iterator to figure out whether + // we're a sequence, during overload resolution. + assert_array_equals(log[0], ["get", record, Symbol.iterator, proxy]); + // Then we have the [[OwnPropertyKeys]] from + // https://webidl.spec.whatwg.org/#es-to-record step 4. + assert_array_equals(log[1], ["ownKeys", record]); + // Then the [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[2], ["getOwnPropertyDescriptor", record, "a"]); + // Then the [[Get]] from step 5.2. + assert_array_equals(log[3], ["get", record, "a", proxy]); + // Then the second [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[4], ["getOwnPropertyDescriptor", record, "c"]); + // Then the second [[Get]] from step 5.2. + assert_array_equals(log[5], ["get", record, "c", proxy]); + + // Check the results. + assert_equals([...h].length, 2); + assert_array_equals([...h.keys()], ["a", "c"]); + assert_true(h.has("a")); + assert_equals(h.get("a"), "b"); + assert_true(h.has("c")); + assert_equals(h.get("c"), "d"); +}, "Correct operation ordering with two properties"); + +test(function() { + this.add_cleanup(clearLog); + var record = { a: "b", "\uFFFF": "d" }; + var proxy = new Proxy(record, loggingHandler); + assert_throws_js(TypeError, function() { + var h = new Headers(proxy); + }); + + assert_equals(log.length, 5); + // The first thing is the [[Get]] of Symbol.iterator to figure out whether + // we're a sequence, during overload resolution. + assert_array_equals(log[0], ["get", record, Symbol.iterator, proxy]); + // Then we have the [[OwnPropertyKeys]] from + // https://webidl.spec.whatwg.org/#es-to-record step 4. + assert_array_equals(log[1], ["ownKeys", record]); + // Then the [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[2], ["getOwnPropertyDescriptor", record, "a"]); + // Then the [[Get]] from step 5.2. + assert_array_equals(log[3], ["get", record, "a", proxy]); + // Then the second [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[4], ["getOwnPropertyDescriptor", record, "\uFFFF"]); + // The second [[Get]] never happens, because we convert the invalid name to a + // ByteString first and throw. +}, "Correct operation ordering with two properties one of which has an invalid name"); + +test(function() { + this.add_cleanup(clearLog); + var record = { a: "\uFFFF", c: "d" } + var proxy = new Proxy(record, loggingHandler); + assert_throws_js(TypeError, function() { + var h = new Headers(proxy); + }); + + assert_equals(log.length, 4); + // The first thing is the [[Get]] of Symbol.iterator to figure out whether + // we're a sequence, during overload resolution. + assert_array_equals(log[0], ["get", record, Symbol.iterator, proxy]); + // Then we have the [[OwnPropertyKeys]] from + // https://webidl.spec.whatwg.org/#es-to-record step 4. + assert_array_equals(log[1], ["ownKeys", record]); + // Then the [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[2], ["getOwnPropertyDescriptor", record, "a"]); + // Then the [[Get]] from step 5.2. + assert_array_equals(log[3], ["get", record, "a", proxy]); + // Nothing else after this, because converting the result of that [[Get]] to a + // ByteString throws. +}, "Correct operation ordering with two properties one of which has an invalid value"); + +test(function() { + this.add_cleanup(clearLog); + var record = {}; + Object.defineProperty(record, "a", { value: "b", enumerable: false }); + Object.defineProperty(record, "c", { value: "d", enumerable: true }); + Object.defineProperty(record, "e", { value: "f", enumerable: false }); + var proxy = new Proxy(record, loggingHandler); + var h = new Headers(proxy); + + assert_equals(log.length, 6); + // The first thing is the [[Get]] of Symbol.iterator to figure out whether + // we're a sequence, during overload resolution. + assert_array_equals(log[0], ["get", record, Symbol.iterator, proxy]); + // Then we have the [[OwnPropertyKeys]] from + // https://webidl.spec.whatwg.org/#es-to-record step 4. + assert_array_equals(log[1], ["ownKeys", record]); + // Then the [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[2], ["getOwnPropertyDescriptor", record, "a"]); + // No [[Get]] because not enumerable + // Then the second [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[3], ["getOwnPropertyDescriptor", record, "c"]); + // Then the [[Get]] from step 5.2. + assert_array_equals(log[4], ["get", record, "c", proxy]); + // Then the third [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[5], ["getOwnPropertyDescriptor", record, "e"]); + // No [[Get]] because not enumerable + + // Check the results. + assert_equals([...h].length, 1); + assert_array_equals([...h.keys()], ["c"]); + assert_true(h.has("c")); + assert_equals(h.get("c"), "d"); +}, "Correct operation ordering with non-enumerable properties"); + +test(function() { + this.add_cleanup(clearLog); + var record = {a: "b", c: "d", e: "f"}; + var lyingHandler = { + getOwnPropertyDescriptor: function(target, name) { + if (name == "a" || name == "e") { + return undefined; + } + return Reflect.getOwnPropertyDescriptor(target, name); + } + }; + var lyingProxy = new Proxy(record, lyingHandler); + var proxy = new Proxy(lyingProxy, loggingHandler); + var h = new Headers(proxy); + + assert_equals(log.length, 6); + // The first thing is the [[Get]] of Symbol.iterator to figure out whether + // we're a sequence, during overload resolution. + assert_array_equals(log[0], ["get", lyingProxy, Symbol.iterator, proxy]); + // Then we have the [[OwnPropertyKeys]] from + // https://webidl.spec.whatwg.org/#es-to-record step 4. + assert_array_equals(log[1], ["ownKeys", lyingProxy]); + // Then the [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[2], ["getOwnPropertyDescriptor", lyingProxy, "a"]); + // No [[Get]] because no descriptor + // Then the second [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[3], ["getOwnPropertyDescriptor", lyingProxy, "c"]); + // Then the [[Get]] from step 5.2. + assert_array_equals(log[4], ["get", lyingProxy, "c", proxy]); + // Then the third [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[5], ["getOwnPropertyDescriptor", lyingProxy, "e"]); + // No [[Get]] because no descriptor + + // Check the results. + assert_equals([...h].length, 1); + assert_array_equals([...h.keys()], ["c"]); + assert_true(h.has("c")); + assert_equals(h.get("c"), "d"); +}, "Correct operation ordering with undefined descriptors"); + +test(function() { + this.add_cleanup(clearLog); + var record = {a: "b", c: "d"}; + var lyingHandler = { + ownKeys: function() { + return [ "a", "c", "a", "c" ]; + }, + }; + var lyingProxy = new Proxy(record, lyingHandler); + var proxy = new Proxy(lyingProxy, loggingHandler); + + // Returning duplicate keys from ownKeys() throws a TypeError. + assert_throws_js(TypeError, + function() { var h = new Headers(proxy); }); + + assert_equals(log.length, 2); + // The first thing is the [[Get]] of Symbol.iterator to figure out whether + // we're a sequence, during overload resolution. + assert_array_equals(log[0], ["get", lyingProxy, Symbol.iterator, proxy]); + // Then we have the [[OwnPropertyKeys]] from + // https://webidl.spec.whatwg.org/#es-to-record step 4. + assert_array_equals(log[1], ["ownKeys", lyingProxy]); +}, "Correct operation ordering with repeated keys"); + +test(function() { + this.add_cleanup(clearLog); + var record = { + a: "b", + [Symbol.toStringTag]: { + // Make sure the ToString conversion of the value happens + // after the ToString conversion of the key. + toString: function () { addLogEntry("toString", [this]); return "nope"; } + }, + c: "d" }; + var proxy = new Proxy(record, loggingHandler); + assert_throws_js(TypeError, + function() { var h = new Headers(proxy); }); + + assert_equals(log.length, 7); + // The first thing is the [[Get]] of Symbol.iterator to figure out whether + // we're a sequence, during overload resolution. + assert_array_equals(log[0], ["get", record, Symbol.iterator, proxy]); + // Then we have the [[OwnPropertyKeys]] from + // https://webidl.spec.whatwg.org/#es-to-record step 4. + assert_array_equals(log[1], ["ownKeys", record]); + // Then the [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[2], ["getOwnPropertyDescriptor", record, "a"]); + // Then the [[Get]] from step 5.2. + assert_array_equals(log[3], ["get", record, "a", proxy]); + // Then the second [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[4], ["getOwnPropertyDescriptor", record, "c"]); + // Then the second [[Get]] from step 5.2. + assert_array_equals(log[5], ["get", record, "c", proxy]); + // Then the third [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[6], ["getOwnPropertyDescriptor", record, + Symbol.toStringTag]); + // Then we throw an exception converting the Symbol to a string, before we do + // the third [[Get]]. +}, "Basic operation with Symbol keys"); + +test(function() { + this.add_cleanup(clearLog); + var record = { + a: { + toString: function() { addLogEntry("toString", [this]); return "b"; } + }, + [Symbol.toStringTag]: { + toString: function () { addLogEntry("toString", [this]); return "nope"; } + }, + c: { + toString: function() { addLogEntry("toString", [this]); return "d"; } + } + }; + // Now make that Symbol-named property not enumerable. + Object.defineProperty(record, Symbol.toStringTag, { enumerable: false }); + assert_array_equals(Reflect.ownKeys(record), + ["a", "c", Symbol.toStringTag]); + + var proxy = new Proxy(record, loggingHandler); + var h = new Headers(proxy); + + assert_equals(log.length, 9); + // The first thing is the [[Get]] of Symbol.iterator to figure out whether + // we're a sequence, during overload resolution. + assert_array_equals(log[0], ["get", record, Symbol.iterator, proxy]); + // Then we have the [[OwnPropertyKeys]] from + // https://webidl.spec.whatwg.org/#es-to-record step 4. + assert_array_equals(log[1], ["ownKeys", record]); + // Then the [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[2], ["getOwnPropertyDescriptor", record, "a"]); + // Then the [[Get]] from step 5.2. + assert_array_equals(log[3], ["get", record, "a", proxy]); + // Then the ToString on the value. + assert_array_equals(log[4], ["toString", record.a]); + // Then the second [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[5], ["getOwnPropertyDescriptor", record, "c"]); + // Then the second [[Get]] from step 5.2. + assert_array_equals(log[6], ["get", record, "c", proxy]); + // Then the ToString on the value. + assert_array_equals(log[7], ["toString", record.c]); + // Then the third [[GetOwnProperty]] from step 5.1. + assert_array_equals(log[8], ["getOwnPropertyDescriptor", record, + Symbol.toStringTag]); + // No [[Get]] because not enumerable. + + // Check the results. + assert_equals([...h].length, 2); + assert_array_equals([...h.keys()], ["a", "c"]); + assert_true(h.has("a")); + assert_equals(h.get("a"), "b"); + assert_true(h.has("c")); + assert_equals(h.get("c"), "d"); +}, "Operation with non-enumerable Symbol keys"); diff --git a/test/wpt/tests/fetch/api/headers/headers-structure.any.js b/test/wpt/tests/fetch/api/headers/headers-structure.any.js new file mode 100644 index 0000000..d826bca --- /dev/null +++ b/test/wpt/tests/fetch/api/headers/headers-structure.any.js @@ -0,0 +1,20 @@ +// META: title=Headers basic +// META: global=window,worker + +"use strict"; + +var headers = new Headers(); +var methods = ["append", + "delete", + "get", + "has", + "set", + //Headers is iterable + "entries", + "keys", + "values" + ]; +for (var idx in methods) + test(function() { + assert_true(methods[idx] in headers, "headers has " + methods[idx] + " method"); + }, "Headers has " + methods[idx] + " method"); diff --git a/test/wpt/tests/fetch/api/idlharness.any.js b/test/wpt/tests/fetch/api/idlharness.any.js new file mode 100644 index 0000000..7b3c694 --- /dev/null +++ b/test/wpt/tests/fetch/api/idlharness.any.js @@ -0,0 +1,21 @@ +// META: global=window,worker +// META: script=/resources/WebIDLParser.js +// META: script=/resources/idlharness.js +// META: timeout=long + +idl_test( + ['fetch'], + ['referrer-policy', 'html', 'dom'], + idl_array => { + idl_array.add_objects({ + Headers: ["new Headers()"], + Request: ["new Request('about:blank')"], + Response: ["new Response()"], + }); + if (self.GLOBAL.isWindow()) { + idl_array.add_objects({ Window: ['window'] }); + } else if (self.GLOBAL.isWorker()) { + idl_array.add_objects({ WorkerGlobalScope: ['self'] }); + } + } +); diff --git a/test/wpt/tests/fetch/api/policies/csp-blocked-worker.html b/test/wpt/tests/fetch/api/policies/csp-blocked-worker.html new file mode 100644 index 0000000..e8660df --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/csp-blocked-worker.html @@ -0,0 +1,16 @@ + + + + + Fetch in worker: blocked by CSP + + + + + + + + + \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/policies/csp-blocked.html b/test/wpt/tests/fetch/api/policies/csp-blocked.html new file mode 100644 index 0000000..99e90df --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/csp-blocked.html @@ -0,0 +1,15 @@ + + + + + Fetch: blocked by CSP + + + + + + + + + + \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/policies/csp-blocked.html.headers b/test/wpt/tests/fetch/api/policies/csp-blocked.html.headers new file mode 100644 index 0000000..c8c1e9f --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/csp-blocked.html.headers @@ -0,0 +1 @@ +Content-Security-Policy: connect-src 'none'; \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/policies/csp-blocked.js b/test/wpt/tests/fetch/api/policies/csp-blocked.js new file mode 100644 index 0000000..28653ff --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/csp-blocked.js @@ -0,0 +1,13 @@ +if (this.document === undefined) { + importScripts("/resources/testharness.js"); + importScripts("../resources/utils.js"); +} + +//Content-Security-Policy: connect-src 'none'; cf .headers file +cspViolationUrl = RESOURCES_DIR + "top.txt"; + +promise_test(function(test) { + return promise_rejects_js(test, TypeError, fetch(cspViolationUrl)); +}, "Fetch is blocked by CSP, got a TypeError"); + +done(); diff --git a/test/wpt/tests/fetch/api/policies/csp-blocked.js.headers b/test/wpt/tests/fetch/api/policies/csp-blocked.js.headers new file mode 100644 index 0000000..c8c1e9f --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/csp-blocked.js.headers @@ -0,0 +1 @@ +Content-Security-Policy: connect-src 'none'; \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/policies/nested-policy.js b/test/wpt/tests/fetch/api/policies/nested-policy.js new file mode 100644 index 0000000..b0d1769 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/nested-policy.js @@ -0,0 +1 @@ +// empty, but referrer-policy set on this file diff --git a/test/wpt/tests/fetch/api/policies/nested-policy.js.headers b/test/wpt/tests/fetch/api/policies/nested-policy.js.headers new file mode 100644 index 0000000..7ffbf17 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/nested-policy.js.headers @@ -0,0 +1 @@ +Referrer-Policy: no-referrer diff --git a/test/wpt/tests/fetch/api/policies/referrer-no-referrer-service-worker.https.html b/test/wpt/tests/fetch/api/policies/referrer-no-referrer-service-worker.https.html new file mode 100644 index 0000000..af898aa --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-no-referrer-service-worker.https.html @@ -0,0 +1,18 @@ + + + + + Fetch in service worker: referrer with no-referrer policy + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/policies/referrer-no-referrer-worker.html b/test/wpt/tests/fetch/api/policies/referrer-no-referrer-worker.html new file mode 100644 index 0000000..dbef9bb --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-no-referrer-worker.html @@ -0,0 +1,17 @@ + + + + + Fetch in worker: referrer with no-referrer policy + + + + + + + + + + \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/policies/referrer-no-referrer.html b/test/wpt/tests/fetch/api/policies/referrer-no-referrer.html new file mode 100644 index 0000000..22a6f34 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-no-referrer.html @@ -0,0 +1,15 @@ + + + + + Fetch: referrer with no-referrer policy + + + + + + + + + + \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/policies/referrer-no-referrer.html.headers b/test/wpt/tests/fetch/api/policies/referrer-no-referrer.html.headers new file mode 100644 index 0000000..7ffbf17 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-no-referrer.html.headers @@ -0,0 +1 @@ +Referrer-Policy: no-referrer diff --git a/test/wpt/tests/fetch/api/policies/referrer-no-referrer.js b/test/wpt/tests/fetch/api/policies/referrer-no-referrer.js new file mode 100644 index 0000000..60600bf --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-no-referrer.js @@ -0,0 +1,19 @@ +if (this.document === undefined) { + importScripts("/resources/testharness.js"); + importScripts("../resources/utils.js"); +} + +var fetchedUrl = RESOURCES_DIR + "inspect-headers.py?headers=origin"; + +promise_test(function(test) { + return fetch(fetchedUrl).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type , "basic", "Response's type is basic"); + var referrer = resp.headers.get("x-request-referer"); + //Either no referrer header is sent or it is empty + if (referrer) + assert_equals(referrer, "", "request's referrer is empty"); + }); +}, "Request's referrer is empty"); + +done(); diff --git a/test/wpt/tests/fetch/api/policies/referrer-no-referrer.js.headers b/test/wpt/tests/fetch/api/policies/referrer-no-referrer.js.headers new file mode 100644 index 0000000..7ffbf17 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-no-referrer.js.headers @@ -0,0 +1 @@ +Referrer-Policy: no-referrer diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin-service-worker.https.html b/test/wpt/tests/fetch/api/policies/referrer-origin-service-worker.https.html new file mode 100644 index 0000000..4018b83 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin-service-worker.https.html @@ -0,0 +1,18 @@ + + + + + Fetch in service worker: referrer with no-referrer policy + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin-service-worker.https.html b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin-service-worker.https.html new file mode 100644 index 0000000..d87192e --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin-service-worker.https.html @@ -0,0 +1,17 @@ + + + + + Fetch in service worker: referrer with origin-when-cross-origin policy + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin-worker.html b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin-worker.html new file mode 100644 index 0000000..f95ae8c --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin-worker.html @@ -0,0 +1,16 @@ + + + + + Fetch in worker: referrer with origin-when-cross-origin policy + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.html b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.html new file mode 100644 index 0000000..5cd79e4 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.html @@ -0,0 +1,16 @@ + + + + + Fetch: referrer with origin-when-cross-origin policy + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.html.headers b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.html.headers new file mode 100644 index 0000000..ad768e6 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.html.headers @@ -0,0 +1 @@ +Referrer-Policy: origin-when-cross-origin diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.js b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.js new file mode 100644 index 0000000..0adadbc --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.js @@ -0,0 +1,21 @@ +if (this.document === undefined) { + importScripts("/resources/testharness.js"); + importScripts("../resources/utils.js"); + importScripts("/common/get-host-info.sub.js"); + + // A nested importScripts() with a referrer-policy should have no effect + // on overall worker policy. + importScripts("nested-policy.js"); +} + +var referrerOrigin = location.origin + '/'; +var fetchedUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "inspect-headers.py?cors&headers=referer"; + +promise_test(function(test) { + return fetch(fetchedUrl).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.headers.get("x-request-referer"), referrerOrigin, "request's referrer is " + referrerOrigin); + }); +}, "Request's referrer is origin"); + +done(); diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.js.headers b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.js.headers new file mode 100644 index 0000000..ad768e6 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin-when-cross-origin.js.headers @@ -0,0 +1 @@ +Referrer-Policy: origin-when-cross-origin diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin-worker.html b/test/wpt/tests/fetch/api/policies/referrer-origin-worker.html new file mode 100644 index 0000000..bb80dd5 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin-worker.html @@ -0,0 +1,17 @@ + + + + + Fetch in worker: referrer with origin policy + + + + + + + + + + \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin.html b/test/wpt/tests/fetch/api/policies/referrer-origin.html new file mode 100644 index 0000000..b164afe --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin.html @@ -0,0 +1,16 @@ + + + + + Fetch: referrer with origin policy + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin.html.headers b/test/wpt/tests/fetch/api/policies/referrer-origin.html.headers new file mode 100644 index 0000000..5b29739 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin.html.headers @@ -0,0 +1 @@ +Referrer-Policy: origin diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin.js b/test/wpt/tests/fetch/api/policies/referrer-origin.js new file mode 100644 index 0000000..918f8f2 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin.js @@ -0,0 +1,30 @@ +if (this.document === undefined) { + importScripts("/resources/testharness.js"); + importScripts("../resources/utils.js"); + + // A nested importScripts() with a referrer-policy should have no effect + // on overall worker policy. + importScripts("nested-policy.js"); +} + +var referrerOrigin = (new URL("/", location.href)).href; +var fetchedUrl = RESOURCES_DIR + "inspect-headers.py?headers=referer"; + +promise_test(function(test) { + return fetch(fetchedUrl).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type , "basic", "Response's type is basic"); + assert_equals(resp.headers.get("x-request-referer"), referrerOrigin, "request's referrer is " + referrerOrigin); + }); +}, "Request's referrer is origin"); + +promise_test(function(test) { + var referrerUrl = "https://{{domains[www]}}:{{ports[https][0]}}/"; + return fetch(fetchedUrl, { "referrer": referrerUrl }).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type , "basic", "Response's type is basic"); + assert_equals(resp.headers.get("x-request-referer"), referrerOrigin, "request's referrer is " + referrerOrigin); + }); +}, "Cross-origin referrer is overridden by client origin"); + +done(); diff --git a/test/wpt/tests/fetch/api/policies/referrer-origin.js.headers b/test/wpt/tests/fetch/api/policies/referrer-origin.js.headers new file mode 100644 index 0000000..5b29739 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-origin.js.headers @@ -0,0 +1 @@ +Referrer-Policy: origin diff --git a/test/wpt/tests/fetch/api/policies/referrer-unsafe-url-service-worker.https.html b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url-service-worker.https.html new file mode 100644 index 0000000..634877e --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url-service-worker.https.html @@ -0,0 +1,18 @@ + + + + + Fetch in worker: referrer with unsafe-url policy + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/policies/referrer-unsafe-url-worker.html b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url-worker.html new file mode 100644 index 0000000..4204577 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url-worker.html @@ -0,0 +1,17 @@ + + + + + Fetch in worker: referrer with unsafe-url policy + + + + + + + + + + \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.html b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.html new file mode 100644 index 0000000..10dd79e --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.html @@ -0,0 +1,16 @@ + + + + + Fetch: referrer with unsafe-url policy + + + + + + + + + + + \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.html.headers b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.html.headers new file mode 100644 index 0000000..8e23770 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.html.headers @@ -0,0 +1 @@ +Referrer-Policy: unsafe-url diff --git a/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.js b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.js new file mode 100644 index 0000000..4d61172 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.js @@ -0,0 +1,21 @@ +if (this.document === undefined) { + importScripts("/resources/testharness.js"); + importScripts("../resources/utils.js"); + + // A nested importScripts() with a referrer-policy should have no effect + // on overall worker policy. + importScripts("nested-policy.js"); +} + +var referrerUrl = location.href; +var fetchedUrl = RESOURCES_DIR + "inspect-headers.py?headers=referer"; + +promise_test(function(test) { + return fetch(fetchedUrl).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type , "basic", "Response's type is basic"); + assert_equals(resp.headers.get("x-request-referer"), referrerUrl, "request's referrer is " + referrerUrl); + }); +}, "Request's referrer is the full url of current document/worker"); + +done(); diff --git a/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.js.headers b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.js.headers new file mode 100644 index 0000000..8e23770 --- /dev/null +++ b/test/wpt/tests/fetch/api/policies/referrer-unsafe-url.js.headers @@ -0,0 +1 @@ +Referrer-Policy: unsafe-url diff --git a/test/wpt/tests/fetch/api/redirect/redirect-back-to-original-origin.any.js b/test/wpt/tests/fetch/api/redirect/redirect-back-to-original-origin.any.js new file mode 100644 index 0000000..74d731f --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-back-to-original-origin.any.js @@ -0,0 +1,38 @@ +// META: global=window,worker +// META: script=/common/get-host-info.sub.js + +const BASE = location.href; +const IS_HTTPS = new URL(BASE).protocol === 'https:'; +const REMOTE_HOST = get_host_info()['REMOTE_HOST']; +const REMOTE_PORT = + IS_HTTPS ? get_host_info()['HTTPS_PORT'] : get_host_info()['HTTP_PORT']; + +const REMOTE_ORIGIN = + new URL(`//${REMOTE_HOST}:${REMOTE_PORT}`, BASE).origin; +const DESTINATION = new URL('../resources/cors-top.txt', BASE); + +function CreateURL(url, BASE, params) { + const u = new URL(url, BASE); + for (const {name, value} of params) { + u.searchParams.append(name, value); + } + return u; +} + +const redirect = + CreateURL('/fetch/api/resources/redirect.py', REMOTE_ORIGIN, + [{name: 'redirect_status', value: 303}, + {name: 'location', value: DESTINATION.href}]); + +promise_test(async (test) => { + const res = await fetch(redirect.href, {mode: 'no-cors'}); + // This is discussed at https://github.com/whatwg/fetch/issues/737. + assert_equals(res.type, 'opaque'); +}, 'original => remote => original with mode: "no-cors"'); + +promise_test(async (test) => { + const res = await fetch(redirect.href, {mode: 'cors'}); + assert_equals(res.type, 'cors'); +}, 'original => remote => original with mode: "cors"'); + +done(); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-count.any.js b/test/wpt/tests/fetch/api/redirect/redirect-count.any.js new file mode 100644 index 0000000..420f9c0 --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-count.any.js @@ -0,0 +1,51 @@ +// META: global=window,worker +// META: script=../resources/utils.js +// META: script=/common/utils.js +// META: timeout=long + +/** + * Fetches a target that returns response with HTTP status code `statusCode` to + * redirect `maxCount` times. + */ +function redirectCountTest(maxCount, {statusCode, shouldPass = true} = {}) { + const desc = `Redirect ${statusCode} ${maxCount} times`; + + const fromUrl = `${RESOURCES_DIR}redirect.py`; + const toUrl = fromUrl; + const token1 = token(); + const url = `${fromUrl}?token=${token1}` + + `&max_age=0` + + `&redirect_status=${statusCode}` + + `&max_count=${maxCount}` + + `&location=${encodeURIComponent(toUrl)}`; + + const requestInit = {'redirect': 'follow'}; + + promise_test((test) => { + return fetch(`${RESOURCES_DIR}clean-stash.py?token=${token1}`) + .then((resp) => { + assert_equals( + resp.status, 200, 'Clean stash response\'s status is 200'); + + if (!shouldPass) + return promise_rejects_js(test, TypeError, fetch(url, requestInit)); + + return fetch(url, requestInit) + .then((resp) => { + assert_equals(resp.status, 200, 'Response\'s status is 200'); + return resp.text(); + }) + .then((body) => { + assert_equals( + body, maxCount.toString(), `Redirected ${maxCount} times`); + }); + }); + }, desc); +} + +for (const statusCode of [301, 302, 303, 307, 308]) { + redirectCountTest(20, {statusCode}); + redirectCountTest(21, {statusCode, shouldPass: false}); +} + +done(); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-empty-location.any.js b/test/wpt/tests/fetch/api/redirect/redirect-empty-location.any.js new file mode 100644 index 0000000..487f4d4 --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-empty-location.any.js @@ -0,0 +1,21 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +// Tests receiving a redirect response with a Location header with an empty +// value. + +const url = RESOURCES_DIR + 'redirect-empty-location.py'; + +promise_test(t => { + return promise_rejects_js(t, TypeError, fetch(url, {redirect:'follow'})); +}, 'redirect response with empty Location, follow mode'); + +promise_test(t => { + return fetch(url, {redirect:'manual'}) + .then(resp => { + assert_equals(resp.type, 'opaqueredirect'); + assert_equals(resp.status, 0); + }); +}, 'redirect response with empty Location, manual mode'); + +done(); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-keepalive.any.js b/test/wpt/tests/fetch/api/redirect/redirect-keepalive.any.js new file mode 100644 index 0000000..bcfc444 --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-keepalive.any.js @@ -0,0 +1,94 @@ +// META: global=window +// META: title=Fetch API: keepalive handling +// META: script=/resources/testharness.js +// META: script=/resources/testharnessreport.js +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=../resources/keepalive-helper.js + +'use strict'; + +const { + HTTP_NOTSAMESITE_ORIGIN, + HTTP_REMOTE_ORIGIN, + HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT +} = get_host_info(); + +/** + * In an iframe, test to fetch a keepalive URL that involves in redirect to + * another URL. + */ +function keepaliveRedirectTest( + desc, {origin1 = '', origin2 = '', withPreflight = false} = {}) { + desc = `[keepalive] ${desc}`; + promise_test(async (test) => { + const tokenToStash = token(); + const iframe = document.createElement('iframe'); + iframe.src = getKeepAliveAndRedirectIframeUrl( + tokenToStash, origin1, origin2, withPreflight); + document.body.appendChild(iframe); + await iframeLoaded(iframe); + assert_equals(await getTokenFromMessage(), tokenToStash); + iframe.remove(); + + assertStashedTokenAsync(desc, tokenToStash); + }, `${desc}; setting up`); +} + +/** + * Opens a different site window, and in `unload` event handler, test to fetch + * a keepalive URL that involves in redirect to another URL. + */ +function keepaliveRedirectInUnloadTest(desc, { + origin1 = '', + origin2 = '', + url2 = '', + withPreflight = false, + shouldPass = true +} = {}) { + desc = `[keepalive][new window][unload] ${desc}`; + + promise_test(async (test) => { + const targetUrl = + `${HTTP_NOTSAMESITE_ORIGIN}/fetch/api/resources/keepalive-redirect-window.html?` + + `origin1=${origin1}&` + + `origin2=${origin2}&` + + `url2=${url2}&` + (withPreflight ? `with-headers` : ``); + const w = window.open(targetUrl); + const token = await getTokenFromMessage(); + w.close(); + + assertStashedTokenAsync(desc, token, {shouldPass}); + }, `${desc}; setting up`); +} + +keepaliveRedirectTest(`same-origin redirect`); +keepaliveRedirectTest( + `same-origin redirect + preflight`, {withPreflight: true}); +keepaliveRedirectTest(`cross-origin redirect`, { + origin1: HTTP_REMOTE_ORIGIN, + origin2: HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT +}); +keepaliveRedirectTest(`cross-origin redirect + preflight`, { + origin1: HTTP_REMOTE_ORIGIN, + origin2: HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT, + withPreflight: true +}); + +keepaliveRedirectInUnloadTest('same-origin redirect'); +keepaliveRedirectInUnloadTest( + 'same-origin redirect + preflight', {withPreflight: true}); +keepaliveRedirectInUnloadTest('cross-origin redirect', { + origin1: HTTP_REMOTE_ORIGIN, + origin2: HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT +}); +keepaliveRedirectInUnloadTest('cross-origin redirect + preflight', { + origin1: HTTP_REMOTE_ORIGIN, + origin2: HTTP_REMOTE_ORIGIN_WITH_DIFFERENT_PORT, + withPreflight: true +}); +keepaliveRedirectInUnloadTest( + 'redirect to file URL', {url2: 'file://tmp/bar.txt', shouldPass: false}); +keepaliveRedirectInUnloadTest( + 'redirect to data URL', + {url2: 'data:text/plain;base64,cmVzcG9uc2UncyBib2R5', shouldPass: false}); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-location-escape.tentative.any.js b/test/wpt/tests/fetch/api/redirect/redirect-location-escape.tentative.any.js new file mode 100644 index 0000000..779ad70 --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-location-escape.tentative.any.js @@ -0,0 +1,46 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +// See https://github.com/whatwg/fetch/issues/883 for the behavior covered by +// this test. As of writing, the Fetch spec has not been updated to cover these. + +// redirectLocation tests that a Location header of |locationHeader| is resolved +// to a URL which ends in |expectedUrlSuffix|. |locationHeader| is interpreted +// as a byte sequence via isomorphic encode, as described in [INFRA]. This +// allows the caller to specify byte sequences which are not valid UTF-8. +// However, this means, e.g., U+2603 must be passed in as "\xe2\x98\x83", its +// UTF-8 encoding, not "\u2603". +// +// [INFRA] https://infra.spec.whatwg.org/#isomorphic-encode +function redirectLocation( + desc, redirectUrl, locationHeader, expectedUrlSuffix) { + promise_test(function(test) { + // Note we use escape() instead of encodeURIComponent(), so that characters + // are escaped as bytes in the isomorphic encoding. + var url = redirectUrl + '?simple=1&location=' + escape(locationHeader); + + return fetch(url, {'redirect': 'follow'}).then(function(resp) { + assert_true( + resp.url.endsWith(expectedUrlSuffix), + resp.url + ' ends with ' + expectedUrlSuffix); + }); + }, desc); +} + +var redirUrl = RESOURCES_DIR + 'redirect.py'; +redirectLocation( + 'Redirect to escaped UTF-8', redirUrl, 'top.txt?%E2%98%83%e2%98%83', + 'top.txt?%E2%98%83%e2%98%83'); +redirectLocation( + 'Redirect to unescaped UTF-8', redirUrl, 'top.txt?\xe2\x98\x83', + 'top.txt?%E2%98%83'); +redirectLocation( + 'Redirect to escaped and unescaped UTF-8', redirUrl, + 'top.txt?\xe2\x98\x83%e2%98%83', 'top.txt?%E2%98%83%e2%98%83'); +redirectLocation( + 'Escaping produces double-percent', redirUrl, 'top.txt?%\xe2\x98\x83', + 'top.txt?%%E2%98%83'); +redirectLocation( + 'Redirect to invalid UTF-8', redirUrl, 'top.txt?\xff', 'top.txt?%FF'); + +done(); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-location.any.js b/test/wpt/tests/fetch/api/redirect/redirect-location.any.js new file mode 100644 index 0000000..3d483bd --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-location.any.js @@ -0,0 +1,73 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +const VALID_URL = 'top.txt'; +const INVALID_URL = 'invalidurl:'; +const DATA_URL = 'data:text/plain;base64,cmVzcG9uc2UncyBib2R5'; + +/** + * A test to fetch a URL that returns response redirecting to `toUrl` with + * `status` as its HTTP status code. `expectStatus` can be set to test the + * status code in fetch's Promise response. + */ +function redirectLocationTest(toUrlDesc, { + toUrl = undefined, + status, + expectStatus = undefined, + mode, + shouldPass = true +} = {}) { + toUrlDesc = toUrl ? `with ${toUrlDesc}` : `without`; + const desc = `Redirect ${status} in "${mode}" mode ${toUrlDesc} location`; + const url = `${RESOURCES_DIR}redirect.py?redirect_status=${status}` + + (toUrl ? `&location=${encodeURIComponent(toUrl)}` : ''); + const requestInit = {'redirect': mode}; + if (!expectStatus) + expectStatus = status; + + promise_test((test) => { + if (mode === 'error' || !shouldPass) + return promise_rejects_js(test, TypeError, fetch(url, requestInit)); + if (mode === 'manual') + return fetch(url, requestInit).then((resp) => { + assert_equals(resp.status, 0, "Response's status is 0"); + assert_equals(resp.type, "opaqueredirect", "Response's type is opaqueredirect"); + assert_equals(resp.statusText, '', `Response's statusText is ""`); + assert_true(resp.headers.entries().next().done, "Headers should be empty"); + }); + + if (mode === 'follow') + return fetch(url, requestInit).then((resp) => { + assert_equals( + resp.status, expectStatus, `Response's status is ${expectStatus}`); + }); + assert_unreached(`${mode} is not a valid redirect mode`); + }, desc); +} + +// FIXME: We may want to mix redirect-mode and cors-mode. +for (const status of [301, 302, 303, 307, 308]) { + redirectLocationTest('without location', {status, mode: 'follow'}); + redirectLocationTest('without location', {status, mode: 'manual'}); + // FIXME: Add tests for "error" redirect-mode without location. + + // When succeeded, `follow` mode should have followed all redirects. + redirectLocationTest( + 'valid', {toUrl: VALID_URL, status, expectStatus: 200, mode: 'follow'}); + redirectLocationTest('valid', {toUrl: VALID_URL, status, mode: 'manual'}); + redirectLocationTest('valid', {toUrl: VALID_URL, status, mode: 'error'}); + + redirectLocationTest( + 'invalid', + {toUrl: INVALID_URL, status, mode: 'follow', shouldPass: false}); + redirectLocationTest('invalid', {toUrl: INVALID_URL, status, mode: 'manual'}); + redirectLocationTest('invalid', {toUrl: INVALID_URL, status, mode: 'error'}); + + redirectLocationTest( + 'data', {toUrl: DATA_URL, status, mode: 'follow', shouldPass: false}); + // FIXME: Should this pass? + redirectLocationTest('data', {toUrl: DATA_URL, status, mode: 'manual'}); + redirectLocationTest('data', {toUrl: DATA_URL, status, mode: 'error'}); +} + +done(); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-method.any.js b/test/wpt/tests/fetch/api/redirect/redirect-method.any.js new file mode 100644 index 0000000..9fe086a --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-method.any.js @@ -0,0 +1,112 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +// Creates a promise_test that fetches a URL that returns a redirect response. +// +// |opts| has additional options: +// |opts.body|: the request body as a string or blob (default is empty body) +// |opts.expectedBodyAsString|: the expected response body as a string. The +// server is expected to echo the request body. The default is the empty string +// if the request after redirection isn't POST; otherwise it's |opts.body|. +// |opts.expectedRequestContentType|: the expected Content-Type of redirected +// request. +function redirectMethod(desc, redirectUrl, redirectLocation, redirectStatus, method, expectedMethod, opts) { + let url = redirectUrl; + let urlParameters = "?redirect_status=" + redirectStatus; + urlParameters += "&location=" + encodeURIComponent(redirectLocation); + + let requestHeaders = { + "Content-Encoding": "Identity", + "Content-Language": "en-US", + "Content-Location": "foo", + }; + let requestInit = {"method": method, "redirect": "follow", "headers" : requestHeaders}; + opts = opts || {}; + if (opts.body) { + requestInit.body = opts.body; + } + + promise_test(function(test) { + return fetch(url + urlParameters, requestInit).then(function(resp) { + let expectedRequestContentType = "NO"; + if (opts.expectedRequestContentType) { + expectedRequestContentType = opts.expectedRequestContentType; + } + + assert_equals(resp.status, 200, "Response's status is 200"); + assert_equals(resp.type, "basic", "Response's type basic"); + assert_equals( + resp.headers.get("x-request-method"), + expectedMethod, + "Request method after redirection is " + expectedMethod); + let hasRequestBodyHeader = true; + if (opts.expectedStripRequestBodyHeader) { + hasRequestBodyHeader = !opts.expectedStripRequestBodyHeader; + } + assert_equals( + resp.headers.get("x-request-content-type"), + expectedRequestContentType, + "Request Content-Type after redirection is " + expectedRequestContentType); + [ + "Content-Encoding", + "Content-Language", + "Content-Location" + ].forEach(header => { + let xHeader = "x-request-" + header.toLowerCase(); + let expectedValue = hasRequestBodyHeader ? requestHeaders[header] : "NO"; + assert_equals( + resp.headers.get(xHeader), + expectedValue, + "Request " + header + " after redirection is " + expectedValue); + }); + assert_true(resp.redirected); + return resp.text().then(function(text) { + let expectedBody = ""; + if (expectedMethod == "POST") { + expectedBody = opts.expectedBodyAsString || requestInit.body; + } + let expectedContentLength = expectedBody ? expectedBody.length.toString() : "NO"; + assert_equals(text, expectedBody, "request body"); + assert_equals( + resp.headers.get("x-request-content-length"), + expectedContentLength, + "Request Content-Length after redirection is " + expectedContentLength); + }); + }); + }, desc); +} + +promise_test(function(test) { + assert_false(new Response().redirected); + return fetch(RESOURCES_DIR + "method.py").then(function(resp) { + assert_equals(resp.status, 200, "Response's status is 200"); + assert_false(resp.redirected); + }); +}, "Response.redirected should be false on not-redirected responses"); + +var redirUrl = RESOURCES_DIR + "redirect.py"; +var locationUrl = "method.py"; + +const stringBody = "this is my body"; +const blobBody = new Blob(["it's me the blob!", " ", "and more blob!"]); +const blobBodyAsString = "it's me the blob! and more blob!"; + +redirectMethod("Redirect 301 with GET", redirUrl, locationUrl, 301, "GET", "GET"); +redirectMethod("Redirect 301 with POST", redirUrl, locationUrl, 301, "POST", "GET", { body: stringBody, expectedStripRequestBodyHeader: true }); +redirectMethod("Redirect 301 with HEAD", redirUrl, locationUrl, 301, "HEAD", "HEAD"); + +redirectMethod("Redirect 302 with GET", redirUrl, locationUrl, 302, "GET", "GET"); +redirectMethod("Redirect 302 with POST", redirUrl, locationUrl, 302, "POST", "GET", { body: stringBody, expectedStripRequestBodyHeader: true }); +redirectMethod("Redirect 302 with HEAD", redirUrl, locationUrl, 302, "HEAD", "HEAD"); + +redirectMethod("Redirect 303 with GET", redirUrl, locationUrl, 303, "GET", "GET"); +redirectMethod("Redirect 303 with POST", redirUrl, locationUrl, 303, "POST", "GET", { body: stringBody, expectedStripRequestBodyHeader: true }); +redirectMethod("Redirect 303 with HEAD", redirUrl, locationUrl, 303, "HEAD", "HEAD"); +redirectMethod("Redirect 303 with TESTING", redirUrl, locationUrl, 303, "TESTING", "GET", { expectedStripRequestBodyHeader: true }); + +redirectMethod("Redirect 307 with GET", redirUrl, locationUrl, 307, "GET", "GET"); +redirectMethod("Redirect 307 with POST (string body)", redirUrl, locationUrl, 307, "POST", "POST", { body: stringBody , expectedRequestContentType: "text/plain;charset=UTF-8"}); +redirectMethod("Redirect 307 with POST (blob body)", redirUrl, locationUrl, 307, "POST", "POST", { body: blobBody, expectedBodyAsString: blobBodyAsString }); +redirectMethod("Redirect 307 with HEAD", redirUrl, locationUrl, 307, "HEAD", "HEAD"); + +done(); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-mode.any.js b/test/wpt/tests/fetch/api/redirect/redirect-mode.any.js new file mode 100644 index 0000000..9f1ff98 --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-mode.any.js @@ -0,0 +1,59 @@ +// META: script=/common/get-host-info.sub.js + +var redirectLocation = "cors-top.txt"; +const { ORIGIN, REMOTE_ORIGIN } = get_host_info(); + +function testRedirect(origin, redirectStatus, redirectMode, corsMode) { + var url = new URL("../resources/redirect.py", self.location); + if (origin === "cross-origin") { + url.host = get_host_info().REMOTE_HOST; + url.port = get_host_info().HTTP_PORT; + } + + var urlParameters = "?redirect_status=" + redirectStatus; + urlParameters += "&location=" + encodeURIComponent(redirectLocation); + + var requestInit = {redirect: redirectMode, mode: corsMode}; + + promise_test(function(test) { + if (redirectMode === "error" || + (corsMode === "no-cors" && redirectMode !== "follow" && origin !== "same-origin")) + return promise_rejects_js(test, TypeError, fetch(url + urlParameters, requestInit)); + if (redirectMode === "manual") + return fetch(url + urlParameters, requestInit).then(function(resp) { + assert_equals(resp.status, 0, "Response's status is 0"); + assert_equals(resp.type, "opaqueredirect", "Response's type is opaqueredirect"); + assert_equals(resp.statusText, "", "Response's statusText is \"\""); + assert_equals(resp.url, url + urlParameters, "Response URL should be the original one"); + }); + if (redirectMode === "follow") + return fetch(url + urlParameters, requestInit).then(function(resp) { + if (corsMode !== "no-cors" || origin === "same-origin") { + assert_true(new URL(resp.url).pathname.endsWith(redirectLocation), "Response's url should be the redirected one"); + assert_equals(resp.status, 200, "Response's status is 200"); + } else { + assert_equals(resp.type, "opaque", "Response is opaque"); + } + }); + assert_unreached(redirectMode + " is no a valid redirect mode"); + }, origin + " redirect " + redirectStatus + " in " + redirectMode + " redirect and " + corsMode + " mode"); +} + +for (var origin of ["same-origin", "cross-origin"]) { + for (var statusCode of [301, 302, 303, 307, 308]) { + for (var redirect of ["error", "manual", "follow"]) { + for (var mode of ["cors", "no-cors"]) + testRedirect(origin, statusCode, redirect, mode); + } + } +} + +promise_test(async (t) => { + const destination = `${ORIGIN}/common/blank.html`; + // We use /common/redirect.py intentionally, as we want a CORS error. + const url = + `${REMOTE_ORIGIN}/common/redirect.py?location=${destination}`; + await promise_rejects_js(t, TypeError, fetch(url, { redirect: "manual" })); +}, "manual redirect with a CORS error should be rejected"); + +done(); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-origin.any.js b/test/wpt/tests/fetch/api/redirect/redirect-origin.any.js new file mode 100644 index 0000000..6001c50 --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-origin.any.js @@ -0,0 +1,68 @@ +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +const { + HTTP_ORIGIN, + HTTP_REMOTE_ORIGIN, +} = get_host_info(); + +/** + * Fetches `fromUrl` with 'cors' and 'follow' modes that returns response to + * redirect to `toUrl`. + */ +function testOriginAfterRedirection( + desc, method, fromUrl, toUrl, statusCode, expectedOrigin) { + desc = `[${method}] Redirect ${statusCode} ${desc}`; + const token1 = token(); + const url = `${fromUrl}?token=${token1}&max_age=0` + + `&redirect_status=${statusCode}` + + `&location=${encodeURIComponent(toUrl)}`; + + const requestInit = {method, 'mode': 'cors', 'redirect': 'follow'}; + + promise_test(function(test) { + return fetch(`${RESOURCES_DIR}clean-stash.py?token=${token1}`) + .then((cleanResponse) => { + assert_equals( + cleanResponse.status, 200, + `Clean stash response's status is 200`); + return fetch(url, requestInit).then((redirectResponse) => { + assert_equals( + redirectResponse.status, 200, + `Inspect header response's status is 200`); + assert_equals( + redirectResponse.headers.get('x-request-origin'), + expectedOrigin, 'Check origin header'); + }); + }); + }, desc); +} + +const FROM_URL = `${RESOURCES_DIR}redirect.py`; +const CORS_FROM_URL = + `${HTTP_REMOTE_ORIGIN}${dirname(location.pathname)}${FROM_URL}`; +const TO_URL = `${HTTP_ORIGIN}${dirname(location.pathname)}${ + RESOURCES_DIR}inspect-headers.py?headers=origin`; +const CORS_TO_URL = `${HTTP_REMOTE_ORIGIN}${dirname(location.pathname)}${ + RESOURCES_DIR}inspect-headers.py?cors&headers=origin`; + +for (const statusCode of [301, 302, 303, 307, 308]) { + for (const method of ['GET', 'POST']) { + testOriginAfterRedirection( + 'Same origin to same origin', method, FROM_URL, TO_URL, statusCode, + null); + testOriginAfterRedirection( + 'Same origin to other origin', method, FROM_URL, CORS_TO_URL, + statusCode, HTTP_ORIGIN); + testOriginAfterRedirection( + 'Other origin to other origin', method, CORS_FROM_URL, CORS_TO_URL, + statusCode, HTTP_ORIGIN); + // TODO(crbug.com/1432059): Fix broken tests. + testOriginAfterRedirection( + 'Other origin to same origin', method, CORS_FROM_URL, `${TO_URL}&cors`, + statusCode, 'null'); + } +} + +done(); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-referrer-override.any.js b/test/wpt/tests/fetch/api/redirect/redirect-referrer-override.any.js new file mode 100644 index 0000000..56e55d7 --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-referrer-override.any.js @@ -0,0 +1,104 @@ +// META: timeout=long +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function getExpectation(expectations, init, initScenario, redirectPolicy, redirectScenario) { + let policies = [ + expectations[initPolicy][initScenario], + expectations[redirectPolicy][redirectScenario] + ]; + + if (policies.includes("omitted")) { + return null; + } else if (policies.includes("origin")) { + return referrerOrigin; + } else { + // "stripped-referrer" + return referrerUrl; + } +} + +function testReferrerAfterRedirection(desc, redirectUrl, redirectLocation, referrerPolicy, redirectReferrerPolicy, expectedReferrer) { + var url = redirectUrl; + var urlParameters = "?location=" + encodeURIComponent(redirectLocation); + var description = desc + ", " + referrerPolicy + " init, " + redirectReferrerPolicy + " redirect header "; + + if (redirectReferrerPolicy) + urlParameters += "&redirect_referrerpolicy=" + redirectReferrerPolicy; + + var requestInit = {"redirect": "follow", "referrerPolicy": referrerPolicy}; + promise_test(function(test) { + return fetch(url + urlParameters, requestInit).then(function(response) { + assert_equals(response.status, 200, "Inspect header response's status is 200"); + assert_equals(response.headers.get("x-request-referer"), expectedReferrer ? expectedReferrer : null, "Check referrer header"); + }); + }, description); +} + +var referrerOrigin = get_host_info().HTTP_ORIGIN + "/"; +var referrerUrl = location.href; + +var redirectUrl = RESOURCES_DIR + "redirect.py"; +var locationUrl = get_host_info().HTTP_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "inspect-headers.py?headers=referer"; +var crossLocationUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "inspect-headers.py?cors&headers=referer"; + +var expectations = { + "no-referrer": { + "same-origin": "omitted", + "cross-origin": "omitted" + }, + "no-referrer-when-downgrade": { + "same-origin": "stripped-referrer", + "cross-origin": "stripped-referrer" + }, + "origin": { + "same-origin": "origin", + "cross-origin": "origin" + }, + "origin-when-cross-origin": { + "same-origin": "stripped-referrer", + "cross-origin": "origin", + }, + "same-origin": { + "same-origin": "stripped-referrer", + "cross-origin": "omitted" + }, + "strict-origin": { + "same-origin": "origin", + "cross-origin": "origin" + }, + "strict-origin-when-cross-origin": { + "same-origin": "stripped-referrer", + "cross-origin": "origin" + }, + "unsafe-url": { + "same-origin": "stripped-referrer", + "cross-origin": "stripped-referrer" + } +}; + +for (var initPolicy in expectations) { + for (var redirectPolicy in expectations) { + + // Redirect to same-origin URL + testReferrerAfterRedirection( + "Same origin redirection", + redirectUrl, + locationUrl, + initPolicy, + redirectPolicy, + getExpectation(expectations, initPolicy, "same-origin", redirectPolicy, "same-origin")); + + // Redirect to cross-origin URL + testReferrerAfterRedirection( + "Cross origin redirection", + redirectUrl, + crossLocationUrl, + initPolicy, + redirectPolicy, + getExpectation(expectations, initPolicy, "same-origin", redirectPolicy, "cross-origin")); + } +} + +done(); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-referrer.any.js b/test/wpt/tests/fetch/api/redirect/redirect-referrer.any.js new file mode 100644 index 0000000..99fda42 --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-referrer.any.js @@ -0,0 +1,66 @@ +// META: timeout=long +// META: script=/common/utils.js +// META: script=../resources/utils.js +// META: script=/common/get-host-info.sub.js + +function testReferrerAfterRedirection(desc, redirectUrl, redirectLocation, referrerPolicy, redirectReferrerPolicy, expectedReferrer) { + var url = redirectUrl; + var urlParameters = "?location=" + encodeURIComponent(redirectLocation); + + if (redirectReferrerPolicy) + urlParameters += "&redirect_referrerpolicy=" + redirectReferrerPolicy; + + var requestInit = {"redirect": "follow", "referrerPolicy": referrerPolicy}; + + promise_test(function(test) { + return fetch(url + urlParameters, requestInit).then(function(response) { + assert_equals(response.status, 200, "Inspect header response's status is 200"); + assert_equals(response.headers.get("x-request-referer"), expectedReferrer ? expectedReferrer : null, "Check referrer header"); + }); + }, desc); +} + +var referrerOrigin = get_host_info().HTTP_ORIGIN + "/"; +var referrerUrl = location.href; + +var redirectUrl = RESOURCES_DIR + "redirect.py"; +var locationUrl = get_host_info().HTTP_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "inspect-headers.py?headers=referer"; +var crossLocationUrl = get_host_info().HTTP_REMOTE_ORIGIN + dirname(location.pathname) + RESOURCES_DIR + "inspect-headers.py?cors&headers=referer"; + +testReferrerAfterRedirection("Same origin redirection, empty init, unsafe-url redirect header ", redirectUrl, locationUrl, "", "unsafe-url", referrerUrl); +testReferrerAfterRedirection("Same origin redirection, empty init, no-referrer-when-downgrade redirect header ", redirectUrl, locationUrl, "", "no-referrer-when-downgrade", referrerUrl); +testReferrerAfterRedirection("Same origin redirection, empty init, same-origin redirect header ", redirectUrl, locationUrl, "", "same-origin", referrerUrl); +testReferrerAfterRedirection("Same origin redirection, empty init, origin redirect header ", redirectUrl, locationUrl, "", "origin", referrerOrigin); +testReferrerAfterRedirection("Same origin redirection, empty init, origin-when-cross-origin redirect header ", redirectUrl, locationUrl, "", "origin-when-cross-origin", referrerUrl); +testReferrerAfterRedirection("Same origin redirection, empty init, no-referrer redirect header ", redirectUrl, locationUrl, "", "no-referrer", null); +testReferrerAfterRedirection("Same origin redirection, empty init, strict-origin redirect header ", redirectUrl, locationUrl, "", "strict-origin", referrerOrigin); +testReferrerAfterRedirection("Same origin redirection, empty init, strict-origin-when-cross-origin redirect header ", redirectUrl, locationUrl, "", "strict-origin-when-cross-origin", referrerUrl); + +testReferrerAfterRedirection("Same origin redirection, empty redirect header, unsafe-url init ", redirectUrl, locationUrl, "unsafe-url", "", referrerUrl); +testReferrerAfterRedirection("Same origin redirection, empty redirect header, no-referrer-when-downgrade init ", redirectUrl, locationUrl, "no-referrer-when-downgrade", "", referrerUrl); +testReferrerAfterRedirection("Same origin redirection, empty redirect header, same-origin init ", redirectUrl, locationUrl, "same-origin", "", referrerUrl); +testReferrerAfterRedirection("Same origin redirection, empty redirect header, origin init ", redirectUrl, locationUrl, "origin", "", referrerOrigin); +testReferrerAfterRedirection("Same origin redirection, empty redirect header, origin-when-cross-origin init ", redirectUrl, locationUrl, "origin-when-cross-origin", "", referrerUrl); +testReferrerAfterRedirection("Same origin redirection, empty redirect header, no-referrer init ", redirectUrl, locationUrl, "no-referrer", "", null); +testReferrerAfterRedirection("Same origin redirection, empty redirect header, strict-origin init ", redirectUrl, locationUrl, "strict-origin", "", referrerOrigin); +testReferrerAfterRedirection("Same origin redirection, empty redirect header, strict-origin-when-cross-origin init ", redirectUrl, locationUrl, "strict-origin-when-cross-origin", "", referrerUrl); + +testReferrerAfterRedirection("Cross origin redirection, empty init, unsafe-url redirect header ", redirectUrl, crossLocationUrl, "", "unsafe-url", referrerUrl); +testReferrerAfterRedirection("Cross origin redirection, empty init, no-referrer-when-downgrade redirect header ", redirectUrl, crossLocationUrl, "", "no-referrer-when-downgrade", referrerUrl); +testReferrerAfterRedirection("Cross origin redirection, empty init, same-origin redirect header ", redirectUrl, crossLocationUrl, "", "same-origin", null); +testReferrerAfterRedirection("Cross origin redirection, empty init, origin redirect header ", redirectUrl, crossLocationUrl, "", "origin", referrerOrigin); +testReferrerAfterRedirection("Cross origin redirection, empty init, origin-when-cross-origin redirect header ", redirectUrl, crossLocationUrl, "", "origin-when-cross-origin", referrerOrigin); +testReferrerAfterRedirection("Cross origin redirection, empty init, no-referrer redirect header ", redirectUrl, crossLocationUrl, "", "no-referrer", null); +testReferrerAfterRedirection("Cross origin redirection, empty init, strict-origin redirect header ", redirectUrl, crossLocationUrl, "", "strict-origin", referrerOrigin); +testReferrerAfterRedirection("Cross origin redirection, empty init, strict-origin-when-cross-origin redirect header ", redirectUrl, crossLocationUrl, "", "strict-origin-when-cross-origin", referrerOrigin); + +testReferrerAfterRedirection("Cross origin redirection, empty redirect header, unsafe-url init ", redirectUrl, crossLocationUrl, "unsafe-url", "", referrerUrl); +testReferrerAfterRedirection("Cross origin redirection, empty redirect header, no-referrer-when-downgrade init ", redirectUrl, crossLocationUrl, "no-referrer-when-downgrade", "", referrerUrl); +testReferrerAfterRedirection("Cross origin redirection, empty redirect header, same-origin init ", redirectUrl, crossLocationUrl, "same-origin", "", null); +testReferrerAfterRedirection("Cross origin redirection, empty redirect header, origin init ", redirectUrl, crossLocationUrl, "origin", "", referrerOrigin); +testReferrerAfterRedirection("Cross origin redirection, empty redirect header, origin-when-cross-origin init ", redirectUrl, crossLocationUrl, "origin-when-cross-origin", "", referrerOrigin); +testReferrerAfterRedirection("Cross origin redirection, empty redirect header, no-referrer init ", redirectUrl, crossLocationUrl, "no-referrer", "", null); +testReferrerAfterRedirection("Cross origin redirection, empty redirect header, strict-origin init ", redirectUrl, crossLocationUrl, "strict-origin", "", referrerOrigin); +testReferrerAfterRedirection("Cross origin redirection, empty redirect header, strict-origin-when-cross-origin init ", redirectUrl, crossLocationUrl, "strict-origin-when-cross-origin", "", referrerOrigin); + +done(); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-schemes.any.js b/test/wpt/tests/fetch/api/redirect/redirect-schemes.any.js new file mode 100644 index 0000000..31ec124 --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-schemes.any.js @@ -0,0 +1,19 @@ +// META: title=Fetch: handling different schemes in redirects +// META: global=window,worker +// META: script=/common/get-host-info.sub.js + +// All non-HTTP(S) schemes cannot survive redirects +var url = "../resources/redirect.py?location="; +var tests = [ + url + "mailto:a@a.com", + url + "data:,HI", + url + "facetime:a@a.org", + url + "about:blank", + url + "about:unicorn", + url + "blob:djfksfjs" +]; +tests.forEach(function(url) { + promise_test(function(test) { + return promise_rejects_js(test, TypeError, fetch(url)) + }) +}) diff --git a/test/wpt/tests/fetch/api/redirect/redirect-to-dataurl.any.js b/test/wpt/tests/fetch/api/redirect/redirect-to-dataurl.any.js new file mode 100644 index 0000000..9d0f147 --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-to-dataurl.any.js @@ -0,0 +1,28 @@ +// META: global=window,worker +// META: script=/common/get-host-info.sub.js + +var dataURL = "data:text/plain;base64,cmVzcG9uc2UncyBib2R5"; +var body = "response's body"; +var contentType = "text/plain"; + +function redirectDataURL(desc, redirectUrl, mode) { + var url = redirectUrl + "?cors&location=" + encodeURIComponent(dataURL); + + var requestInit = {"mode": mode}; + + promise_test(function(test) { + return promise_rejects_js(test, TypeError, fetch(url, requestInit)); + }, desc); +} + +var redirUrl = get_host_info().HTTP_ORIGIN + "/fetch/api/resources/redirect.py"; +var corsRedirUrl = get_host_info().HTTP_REMOTE_ORIGIN + "/fetch/api/resources/redirect.py"; + +redirectDataURL("Testing data URL loading after same-origin redirection (cors mode)", redirUrl, "cors"); +redirectDataURL("Testing data URL loading after same-origin redirection (no-cors mode)", redirUrl, "no-cors"); +redirectDataURL("Testing data URL loading after same-origin redirection (same-origin mode)", redirUrl, "same-origin"); + +redirectDataURL("Testing data URL loading after cross-origin redirection (cors mode)", corsRedirUrl, "cors"); +redirectDataURL("Testing data URL loading after cross-origin redirection (no-cors mode)", corsRedirUrl, "no-cors"); + +done(); diff --git a/test/wpt/tests/fetch/api/redirect/redirect-upload.h2.any.js b/test/wpt/tests/fetch/api/redirect/redirect-upload.h2.any.js new file mode 100644 index 0000000..521bd3a --- /dev/null +++ b/test/wpt/tests/fetch/api/redirect/redirect-upload.h2.any.js @@ -0,0 +1,33 @@ +// META: global=window,worker +// META: script=../resources/utils.js +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js + +const redirectUrl = RESOURCES_DIR + "redirect.h2.py"; +const redirectLocation = "top.txt"; + +async function fetchStreamRedirect(statusCode) { + const url = RESOURCES_DIR + "redirect.h2.py" + + `?redirect_status=${statusCode}&location=${redirectLocation}`; + const requestInit = {method: "POST"}; + requestInit["body"] = new ReadableStream({start: controller => { + const encoder = new TextEncoder(); + controller.enqueue(encoder.encode("Test")); + controller.close(); + }}); + requestInit.duplex = "half"; + return fetch(url, requestInit); +} + +promise_test(async () => { + const resp = await fetchStreamRedirect(303); + assert_equals(resp.status, 200); + assert_true(new URL(resp.url).pathname.endsWith(redirectLocation), + "Response's url should be the redirected one"); +}, "Fetch upload streaming should be accepted on 303"); + +for (const statusCode of [301, 302, 307, 308]) { + promise_test(t => { + return promise_rejects_js(t, TypeError, fetchStreamRedirect(statusCode)); + }, `Fetch upload streaming should fail on ${statusCode}`); +} diff --git a/test/wpt/tests/fetch/api/request/destination/fetch-destination-frame.https.html b/test/wpt/tests/fetch/api/request/destination/fetch-destination-frame.https.html new file mode 100644 index 0000000..f3f9f78 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/fetch-destination-frame.https.html @@ -0,0 +1,51 @@ + +Fetch destination tests for resources with no load event + + + + + diff --git a/test/wpt/tests/fetch/api/request/destination/fetch-destination-iframe.https.html b/test/wpt/tests/fetch/api/request/destination/fetch-destination-iframe.https.html new file mode 100644 index 0000000..1aa5a56 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/fetch-destination-iframe.https.html @@ -0,0 +1,51 @@ + +Fetch destination tests for resources with no load event + + + + + diff --git a/test/wpt/tests/fetch/api/request/destination/fetch-destination-no-load-event.https.html b/test/wpt/tests/fetch/api/request/destination/fetch-destination-no-load-event.https.html new file mode 100644 index 0000000..1778bf2 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/fetch-destination-no-load-event.https.html @@ -0,0 +1,124 @@ + +Fetch destination tests for resources with no load event + + + + + diff --git a/test/wpt/tests/fetch/api/request/destination/fetch-destination-prefetch.https.html b/test/wpt/tests/fetch/api/request/destination/fetch-destination-prefetch.https.html new file mode 100644 index 0000000..db99202 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/fetch-destination-prefetch.https.html @@ -0,0 +1,46 @@ + +Fetch destination test for prefetching + + + + + + diff --git a/test/wpt/tests/fetch/api/request/destination/fetch-destination-worker.https.html b/test/wpt/tests/fetch/api/request/destination/fetch-destination-worker.https.html new file mode 100644 index 0000000..5935c1f --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/fetch-destination-worker.https.html @@ -0,0 +1,60 @@ + +Fetch destination tests for resources with no load event + + + + + diff --git a/test/wpt/tests/fetch/api/request/destination/fetch-destination.https.html b/test/wpt/tests/fetch/api/request/destination/fetch-destination.https.html new file mode 100644 index 0000000..0094b0b --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/fetch-destination.https.html @@ -0,0 +1,435 @@ + +Fetch destination tests + + + + + + diff --git a/test/wpt/tests/fetch/api/request/destination/resources/dummy b/test/wpt/tests/fetch/api/request/destination/resources/dummy new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/fetch/api/request/destination/resources/dummy.es b/test/wpt/tests/fetch/api/request/destination/resources/dummy.es new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/fetch/api/request/destination/resources/dummy.es.headers b/test/wpt/tests/fetch/api/request/destination/resources/dummy.es.headers new file mode 100644 index 0000000..9bb8bad --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/resources/dummy.es.headers @@ -0,0 +1 @@ +Content-Type: text/event-stream diff --git a/test/wpt/tests/fetch/api/request/destination/resources/dummy.html b/test/wpt/tests/fetch/api/request/destination/resources/dummy.html new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/fetch/api/request/destination/resources/dummy.png b/test/wpt/tests/fetch/api/request/destination/resources/dummy.png new file mode 100644 index 0000000..01c9666 Binary files /dev/null and b/test/wpt/tests/fetch/api/request/destination/resources/dummy.png differ diff --git a/test/wpt/tests/fetch/api/request/destination/resources/dummy.ttf b/test/wpt/tests/fetch/api/request/destination/resources/dummy.ttf new file mode 100644 index 0000000..9023592 Binary files /dev/null and b/test/wpt/tests/fetch/api/request/destination/resources/dummy.ttf differ diff --git a/test/wpt/tests/fetch/api/request/destination/resources/dummy_audio.mp3 b/test/wpt/tests/fetch/api/request/destination/resources/dummy_audio.mp3 new file mode 100644 index 0000000..0091330 Binary files /dev/null and b/test/wpt/tests/fetch/api/request/destination/resources/dummy_audio.mp3 differ diff --git a/test/wpt/tests/fetch/api/request/destination/resources/dummy_audio.oga b/test/wpt/tests/fetch/api/request/destination/resources/dummy_audio.oga new file mode 100644 index 0000000..239ad2b Binary files /dev/null and b/test/wpt/tests/fetch/api/request/destination/resources/dummy_audio.oga differ diff --git a/test/wpt/tests/fetch/api/request/destination/resources/dummy_video.mp4 b/test/wpt/tests/fetch/api/request/destination/resources/dummy_video.mp4 new file mode 100644 index 0000000..7022e75 Binary files /dev/null and b/test/wpt/tests/fetch/api/request/destination/resources/dummy_video.mp4 differ diff --git a/test/wpt/tests/fetch/api/request/destination/resources/dummy_video.ogv b/test/wpt/tests/fetch/api/request/destination/resources/dummy_video.ogv new file mode 100644 index 0000000..de99616 Binary files /dev/null and b/test/wpt/tests/fetch/api/request/destination/resources/dummy_video.ogv differ diff --git a/test/wpt/tests/fetch/api/request/destination/resources/dummy_video.webm b/test/wpt/tests/fetch/api/request/destination/resources/dummy_video.webm new file mode 100644 index 0000000..c3d433a Binary files /dev/null and b/test/wpt/tests/fetch/api/request/destination/resources/dummy_video.webm differ diff --git a/test/wpt/tests/fetch/api/request/destination/resources/empty.https.html b/test/wpt/tests/fetch/api/request/destination/resources/empty.https.html new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-frame.js b/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-frame.js new file mode 100644 index 0000000..b69de0b --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-frame.js @@ -0,0 +1,20 @@ +self.addEventListener('fetch', function(event) { + if (event.request.url.includes('dummy')) { + event.waitUntil(async function() { + let destination = new URL(event.request.url).searchParams.get("dest"); + let clients = await self.clients.matchAll({"includeUncontrolled": true}); + clients.forEach(function(client) { + if (client.url.includes("fetch-destination-frame")) { + if (event.request.destination == destination) { + client.postMessage("PASS"); + } else { + client.postMessage("FAIL"); + } + } + }) + }()); + } + event.respondWith(fetch(event.request)); +}); + + diff --git a/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-iframe.js b/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-iframe.js new file mode 100644 index 0000000..7634583 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-iframe.js @@ -0,0 +1,20 @@ +self.addEventListener('fetch', function(event) { + if (event.request.url.includes('dummy')) { + event.waitUntil(async function() { + let destination = new URL(event.request.url).searchParams.get("dest"); + let clients = await self.clients.matchAll({"includeUncontrolled": true}); + clients.forEach(function(client) { + if (client.url.includes("fetch-destination-iframe")) { + if (event.request.destination == destination) { + client.postMessage("PASS"); + } else { + client.postMessage("FAIL"); + } + } + }) + }()); + } + event.respondWith(fetch(event.request)); +}); + + diff --git a/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-no-load-event.js b/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-no-load-event.js new file mode 100644 index 0000000..a583b12 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker-no-load-event.js @@ -0,0 +1,20 @@ +self.addEventListener('fetch', function(event) { + const url = event.request.url; + if (url.includes('dummy') && url.includes('?')) { + event.waitUntil(async function() { + let destination = new URL(url).searchParams.get("dest"); + var result = "FAIL"; + if (event.request.destination == destination || + (event.request.destination == "empty" && destination == "")) { + result = "PASS"; + } + let cl = await clients.matchAll({includeUncontrolled: true}); + for (i = 0; i < cl.length; i++) { + cl[i].postMessage(result); + } + }()) + } + event.respondWith(fetch(event.request)); +}); + + diff --git a/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker.js b/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker.js new file mode 100644 index 0000000..904009c --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/resources/fetch-destination-worker.js @@ -0,0 +1,12 @@ +self.addEventListener('fetch', function(event) { + if (event.request.url.includes('dummy')) { + let destination = new URL(event.request.url).searchParams.get("dest"); + if (event.request.destination == destination || + (event.request.destination == "empty" && destination == "")) { + event.respondWith(fetch(event.request)); + } else { + event.respondWith(Response.error()); + } + } +}); + diff --git a/test/wpt/tests/fetch/api/request/destination/resources/importer.js b/test/wpt/tests/fetch/api/request/destination/resources/importer.js new file mode 100644 index 0000000..9568474 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/destination/resources/importer.js @@ -0,0 +1 @@ +importScripts("dummy?t=importScripts&dest=script"); diff --git a/test/wpt/tests/fetch/api/request/forbidden-method.any.js b/test/wpt/tests/fetch/api/request/forbidden-method.any.js new file mode 100644 index 0000000..eb13f37 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/forbidden-method.any.js @@ -0,0 +1,13 @@ +// META: global=window,worker + +// https://fetch.spec.whatwg.org/#forbidden-method +for (const method of [ + 'CONNECT', 'TRACE', 'TRACK', + 'connect', 'trace', 'track' + ]) { + test(function() { + assert_throws_js(TypeError, + function() { new Request('./', {method: method}); } + ); + }, 'Request() with a forbidden method ' + method + ' must throw.'); +} diff --git a/test/wpt/tests/fetch/api/request/multi-globals/construct-in-detached-frame.window.js b/test/wpt/tests/fetch/api/request/multi-globals/construct-in-detached-frame.window.js new file mode 100644 index 0000000..b0d6ba5 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/multi-globals/construct-in-detached-frame.window.js @@ -0,0 +1,11 @@ +// This is a regression test for Chromium issue https://crbug.com/1427266. +test(() => { + const iframe = document.createElement('iframe'); + document.body.append(iframe); + const otherRequest = iframe.contentWindow.Request; + iframe.remove(); + const r1 = new otherRequest('resource', { method: 'POST', body: 'string' }); + const r2 = new otherRequest(r1); + assert_true(r1.bodyUsed); + assert_false(r2.bodyUsed); +}, 'creating a request from another request in a detached realm should work'); diff --git a/test/wpt/tests/fetch/api/request/multi-globals/current/current.html b/test/wpt/tests/fetch/api/request/multi-globals/current/current.html new file mode 100644 index 0000000..9bb6e0b --- /dev/null +++ b/test/wpt/tests/fetch/api/request/multi-globals/current/current.html @@ -0,0 +1,3 @@ + +Current page used as a test helper + diff --git a/test/wpt/tests/fetch/api/request/multi-globals/incumbent/incumbent.html b/test/wpt/tests/fetch/api/request/multi-globals/incumbent/incumbent.html new file mode 100644 index 0000000..a885b8a --- /dev/null +++ b/test/wpt/tests/fetch/api/request/multi-globals/incumbent/incumbent.html @@ -0,0 +1,14 @@ + +Incumbent page used as a test helper + + + + diff --git a/test/wpt/tests/fetch/api/request/multi-globals/url-parsing.html b/test/wpt/tests/fetch/api/request/multi-globals/url-parsing.html new file mode 100644 index 0000000..df60e72 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/multi-globals/url-parsing.html @@ -0,0 +1,27 @@ + +Request constructor URL parsing, with multiple globals in play + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/request/request-bad-port.any.js b/test/wpt/tests/fetch/api/request/request-bad-port.any.js new file mode 100644 index 0000000..b0684d4 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-bad-port.any.js @@ -0,0 +1,92 @@ +// META: global=window,worker + +// list of bad ports according to +// https://fetch.spec.whatwg.org/#port-blocking +var BLOCKED_PORTS_LIST = [ + 1, // tcpmux + 7, // echo + 9, // discard + 11, // systat + 13, // daytime + 15, // netstat + 17, // qotd + 19, // chargen + 20, // ftp-data + 21, // ftp + 22, // ssh + 23, // telnet + 25, // smtp + 37, // time + 42, // name + 43, // nicname + 53, // domain + 69, // tftp + 77, // priv-rjs + 79, // finger + 87, // ttylink + 95, // supdup + 101, // hostriame + 102, // iso-tsap + 103, // gppitnp + 104, // acr-nema + 109, // pop2 + 110, // pop3 + 111, // sunrpc + 113, // auth + 115, // sftp + 117, // uucp-path + 119, // nntp + 123, // ntp + 135, // loc-srv / epmap + 137, // netbios-ns + 139, // netbios-ssn + 143, // imap2 + 161, // snmp + 179, // bgp + 389, // ldap + 427, // afp (alternate) + 465, // smtp (alternate) + 512, // print / exec + 513, // login + 514, // shell + 515, // printer + 526, // tempo + 530, // courier + 531, // chat + 532, // netnews + 540, // uucp + 548, // afp + 554, // rtsp + 556, // remotefs + 563, // nntp+ssl + 587, // smtp (outgoing) + 601, // syslog-conn + 636, // ldap+ssl + 989, // ftps-data + 990, // ftps + 993, // ldap+ssl + 995, // pop3+ssl + 1719, // h323gatestat + 1720, // h323hostcall + 1723, // pptp + 2049, // nfs + 3659, // apple-sasl + 4045, // lockd + 5060, // sip + 5061, // sips + 6000, // x11 + 6566, // sane-port + 6665, // irc (alternate) + 6666, // irc (alternate) + 6667, // irc (default) + 6668, // irc (alternate) + 6669, // irc (alternate) + 6697, // irc+tls + 10080, // amanda +]; + +BLOCKED_PORTS_LIST.map(function(a){ + promise_test(function(t){ + return promise_rejects_js(t, TypeError, fetch("http://example.com:" + a)) + }, 'Request on bad port ' + a + ' should throw TypeError.'); +}); diff --git a/test/wpt/tests/fetch/api/request/request-cache-default-conditional.any.js b/test/wpt/tests/fetch/api/request/request-cache-default-conditional.any.js new file mode 100644 index 0000000..c5b2001 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-cache-default-conditional.any.js @@ -0,0 +1,170 @@ +// META: global=window,worker +// META: title=Request cache - default with conditional requests +// META: timeout=long +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=request-cache.js + +var tests = [ + { + name: 'RequestCache "default" mode with an If-Modified-Since header (following a request without additional headers) is treated similarly to "no-store"', + state: "stale", + request_cache: ["default", "default"], + request_headers: [{}, {"If-Modified-Since": now.toGMTString()}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "default" mode with an If-Modified-Since header (following a request without additional headers) is treated similarly to "no-store"', + state: "fresh", + request_cache: ["default", "default"], + request_headers: [{}, {"If-Modified-Since": now.toGMTString()}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "default" mode with an If-Modified-Since header is treated similarly to "no-store"', + state: "stale", + request_cache: ["default", "default"], + request_headers: [{"If-Modified-Since": now.toGMTString()}, {}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, + { + name: 'RequestCache "default" mode with an If-Modified-Since header is treated similarly to "no-store"', + state: "fresh", + request_cache: ["default", "default"], + request_headers: [{"If-Modified-Since": now.toGMTString()}, {}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, + { + name: 'RequestCache "default" mode with an If-None-Match header (following a request without additional headers) is treated similarly to "no-store"', + state: "stale", + request_cache: ["default", "default"], + request_headers: [{}, {"If-None-Match": '"foo"'}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "default" mode with an If-None-Match header (following a request without additional headers) is treated similarly to "no-store"', + state: "fresh", + request_cache: ["default", "default"], + request_headers: [{}, {"If-None-Match": '"foo"'}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "default" mode with an If-None-Match header is treated similarly to "no-store"', + state: "stale", + request_cache: ["default", "default"], + request_headers: [{"If-None-Match": '"foo"'}, {}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, + { + name: 'RequestCache "default" mode with an If-None-Match header is treated similarly to "no-store"', + state: "fresh", + request_cache: ["default", "default"], + request_headers: [{"If-None-Match": '"foo"'}, {}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, + { + name: 'RequestCache "default" mode with an If-Unmodified-Since header (following a request without additional headers) is treated similarly to "no-store"', + state: "stale", + request_cache: ["default", "default"], + request_headers: [{}, {"If-Unmodified-Since": now.toGMTString()}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "default" mode with an If-Unmodified-Since header (following a request without additional headers) is treated similarly to "no-store"', + state: "fresh", + request_cache: ["default", "default"], + request_headers: [{}, {"If-Unmodified-Since": now.toGMTString()}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "default" mode with an If-Unmodified-Since header is treated similarly to "no-store"', + state: "stale", + request_cache: ["default", "default"], + request_headers: [{"If-Unmodified-Since": now.toGMTString()}, {}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, + { + name: 'RequestCache "default" mode with an If-Unmodified-Since header is treated similarly to "no-store"', + state: "fresh", + request_cache: ["default", "default"], + request_headers: [{"If-Unmodified-Since": now.toGMTString()}, {}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, + { + name: 'RequestCache "default" mode with an If-Match header (following a request without additional headers) is treated similarly to "no-store"', + state: "stale", + request_cache: ["default", "default"], + request_headers: [{}, {"If-Match": '"foo"'}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "default" mode with an If-Match header (following a request without additional headers) is treated similarly to "no-store"', + state: "fresh", + request_cache: ["default", "default"], + request_headers: [{}, {"If-Match": '"foo"'}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "default" mode with an If-Match header is treated similarly to "no-store"', + state: "stale", + request_cache: ["default", "default"], + request_headers: [{"If-Match": '"foo"'}, {}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, + { + name: 'RequestCache "default" mode with an If-Match header is treated similarly to "no-store"', + state: "fresh", + request_cache: ["default", "default"], + request_headers: [{"If-Match": '"foo"'}, {}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, + { + name: 'RequestCache "default" mode with an If-Range header (following a request without additional headers) is treated similarly to "no-store"', + state: "stale", + request_cache: ["default", "default"], + request_headers: [{}, {"If-Range": '"foo"'}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "default" mode with an If-Range header (following a request without additional headers) is treated similarly to "no-store"', + state: "fresh", + request_cache: ["default", "default"], + request_headers: [{}, {"If-Range": '"foo"'}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "default" mode with an If-Range header is treated similarly to "no-store"', + state: "stale", + request_cache: ["default", "default"], + request_headers: [{"If-Range": '"foo"'}, {}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, + { + name: 'RequestCache "default" mode with an If-Range header is treated similarly to "no-store"', + state: "fresh", + request_cache: ["default", "default"], + request_headers: [{"If-Range": '"foo"'}, {}], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/api/request/request-cache-default.any.js b/test/wpt/tests/fetch/api/request/request-cache-default.any.js new file mode 100644 index 0000000..dfa8369 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-cache-default.any.js @@ -0,0 +1,39 @@ +// META: global=window,worker +// META: title=Request cache - default +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=request-cache.js + +var tests = [ + { + name: 'RequestCache "default" mode checks the cache for previously cached content and goes to the network for stale responses', + state: "stale", + request_cache: ["default", "default"], + expected_validation_headers: [false, true], + expected_no_cache_headers: [false, false], + }, + { + name: 'RequestCache "default" mode checks the cache for previously cached content and avoids going to the network if a fresh response exists', + state: "fresh", + request_cache: ["default", "default"], + expected_validation_headers: [false], + expected_no_cache_headers: [false], + }, + { + name: 'Responses with the "Cache-Control: no-store" header are not stored in the cache', + state: "stale", + cache_control: "no-store", + request_cache: ["default", "default"], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, false], + }, + { + name: 'Responses with the "Cache-Control: no-store" header are not stored in the cache', + state: "fresh", + cache_control: "no-store", + request_cache: ["default", "default"], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, false], + }, +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/api/request/request-cache-force-cache.any.js b/test/wpt/tests/fetch/api/request/request-cache-force-cache.any.js new file mode 100644 index 0000000..00dce09 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-cache-force-cache.any.js @@ -0,0 +1,67 @@ +// META: global=window,worker +// META: title=Request cache - force-cache +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=request-cache.js + +var tests = [ + { + name: 'RequestCache "force-cache" mode checks the cache for previously cached content and avoid revalidation for stale responses', + state: "stale", + request_cache: ["default", "force-cache"], + expected_validation_headers: [false], + expected_no_cache_headers: [false], + }, + { + name: 'RequestCache "force-cache" mode checks the cache for previously cached content and avoid revalidation for fresh responses', + state: "fresh", + request_cache: ["default", "force-cache"], + expected_validation_headers: [false], + expected_no_cache_headers: [false], + }, + { + name: 'RequestCache "force-cache" mode checks the cache for previously cached content and goes to the network if a cached response is not found', + state: "stale", + request_cache: ["force-cache"], + expected_validation_headers: [false], + expected_no_cache_headers: [false], + }, + { + name: 'RequestCache "force-cache" mode checks the cache for previously cached content and goes to the network if a cached response is not found', + state: "fresh", + request_cache: ["force-cache"], + expected_validation_headers: [false], + expected_no_cache_headers: [false], + }, + { + name: 'RequestCache "force-cache" mode checks the cache for previously cached content and goes to the network if a cached response would vary', + state: "stale", + vary: "*", + request_cache: ["default", "force-cache"], + expected_validation_headers: [false, true], + expected_no_cache_headers: [false, false], + }, + { + name: 'RequestCache "force-cache" mode checks the cache for previously cached content and goes to the network if a cached response would vary', + state: "fresh", + vary: "*", + request_cache: ["default", "force-cache"], + expected_validation_headers: [false, true], + expected_no_cache_headers: [false, false], + }, + { + name: 'RequestCache "force-cache" stores the response in the cache if it goes to the network', + state: "stale", + request_cache: ["force-cache", "default"], + expected_validation_headers: [false, true], + expected_no_cache_headers: [false, false], + }, + { + name: 'RequestCache "force-cache" stores the response in the cache if it goes to the network', + state: "fresh", + request_cache: ["force-cache", "default"], + expected_validation_headers: [false], + expected_no_cache_headers: [false], + }, +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/api/request/request-cache-no-cache.any.js b/test/wpt/tests/fetch/api/request/request-cache-no-cache.any.js new file mode 100644 index 0000000..41fc22b --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-cache-no-cache.any.js @@ -0,0 +1,25 @@ +// META: global=window,worker +// META: title=Request cache : no-cache +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=request-cache.js + +var tests = [ + { + name: 'RequestCache "no-cache" mode revalidates stale responses found in the cache', + state: "stale", + request_cache: ["default", "no-cache"], + expected_validation_headers: [false, true], + expected_no_cache_headers: [false, false], + expected_max_age_headers: [false, true], + }, + { + name: 'RequestCache "no-cache" mode revalidates fresh responses found in the cache', + state: "fresh", + request_cache: ["default", "no-cache"], + expected_validation_headers: [false, true], + expected_no_cache_headers: [false, false], + expected_max_age_headers: [false, true], + }, +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/api/request/request-cache-no-store.any.js b/test/wpt/tests/fetch/api/request/request-cache-no-store.any.js new file mode 100644 index 0000000..9a28718 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-cache-no-store.any.js @@ -0,0 +1,37 @@ +// META: global=window,worker +// META: title=Request cache - no store +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=request-cache.js + +var tests = [ + { + name: 'RequestCache "no-store" mode does not check the cache for previously cached content and goes to the network regardless', + state: "stale", + request_cache: ["default", "no-store"], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "no-store" mode does not check the cache for previously cached content and goes to the network regardless', + state: "fresh", + request_cache: ["default", "no-store"], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "no-store" mode does not store the response in the cache', + state: "stale", + request_cache: ["no-store", "default"], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, + { + name: 'RequestCache "no-store" mode does not store the response in the cache', + state: "fresh", + request_cache: ["no-store", "default"], + expected_validation_headers: [false, false], + expected_no_cache_headers: [true, false], + }, +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/api/request/request-cache-only-if-cached.any.js b/test/wpt/tests/fetch/api/request/request-cache-only-if-cached.any.js new file mode 100644 index 0000000..1305787 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-cache-only-if-cached.any.js @@ -0,0 +1,66 @@ +// META: global=window,dedicatedworker,sharedworker +// META: title=Request cache - only-if-cached +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=request-cache.js + +// FIXME: avoid mixed content requests to enable service worker global +var tests = [ + { + name: 'RequestCache "only-if-cached" mode checks the cache for previously cached content and avoids revalidation for stale responses', + state: "stale", + request_cache: ["default", "only-if-cached"], + expected_validation_headers: [false], + expected_no_cache_headers: [false] + }, + { + name: 'RequestCache "only-if-cached" mode checks the cache for previously cached content and avoids revalidation for fresh responses', + state: "fresh", + request_cache: ["default", "only-if-cached"], + expected_validation_headers: [false], + expected_no_cache_headers: [false] + }, + { + name: 'RequestCache "only-if-cached" mode checks the cache for previously cached content and does not go to the network if a cached response is not found', + state: "fresh", + request_cache: ["only-if-cached"], + response: ["error"], + expected_validation_headers: [], + expected_no_cache_headers: [] + }, + { + name: 'RequestCache "only-if-cached" (with "same-origin") uses cached same-origin redirects to same-origin content', + state: "fresh", + request_cache: ["default", "only-if-cached"], + redirect: "same-origin", + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, false], + }, + { + name: 'RequestCache "only-if-cached" (with "same-origin") uses cached same-origin redirects to same-origin content', + state: "stale", + request_cache: ["default", "only-if-cached"], + redirect: "same-origin", + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, false], + }, + { + name: 'RequestCache "only-if-cached" (with "same-origin") does not follow redirects across origins and rejects', + state: "fresh", + request_cache: ["default", "only-if-cached"], + redirect: "cross-origin", + response: [null, "error"], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, false], + }, + { + name: 'RequestCache "only-if-cached" (with "same-origin") does not follow redirects across origins and rejects', + state: "stale", + request_cache: ["default", "only-if-cached"], + redirect: "cross-origin", + response: [null, "error"], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, false], + }, +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/api/request/request-cache-reload.any.js b/test/wpt/tests/fetch/api/request/request-cache-reload.any.js new file mode 100644 index 0000000..c7bfffb --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-cache-reload.any.js @@ -0,0 +1,51 @@ +// META: global=window,worker +// META: title=Request cache - reload +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=request-cache.js + +var tests = [ + { + name: 'RequestCache "reload" mode does not check the cache for previously cached content and goes to the network regardless', + state: "stale", + request_cache: ["default", "reload"], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "reload" mode does not check the cache for previously cached content and goes to the network regardless', + state: "fresh", + request_cache: ["default", "reload"], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, + { + name: 'RequestCache "reload" mode does store the response in the cache', + state: "stale", + request_cache: ["reload", "default"], + expected_validation_headers: [false, true], + expected_no_cache_headers: [true, false], + }, + { + name: 'RequestCache "reload" mode does store the response in the cache', + state: "fresh", + request_cache: ["reload", "default"], + expected_validation_headers: [false], + expected_no_cache_headers: [true], + }, + { + name: 'RequestCache "reload" mode does store the response in the cache even if a previous response is already stored', + state: "stale", + request_cache: ["default", "reload", "default"], + expected_validation_headers: [false, false, true], + expected_no_cache_headers: [false, true, false], + }, + { + name: 'RequestCache "reload" mode does store the response in the cache even if a previous response is already stored', + state: "fresh", + request_cache: ["default", "reload", "default"], + expected_validation_headers: [false, false], + expected_no_cache_headers: [false, true], + }, +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/api/request/request-cache.js b/test/wpt/tests/fetch/api/request/request-cache.js new file mode 100644 index 0000000..f2fbecf --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-cache.js @@ -0,0 +1,223 @@ +/** + * Each test is run twice: once using etag/If-None-Match and once with + * date/If-Modified-Since. Each test run gets its own URL and randomized + * content and operates independently. + * + * The test steps are run with request_cache.length fetch requests issued + * and their immediate results sanity-checked. The cache.py server script + * stashes an entry containing any If-None-Match, If-Modified-Since, Pragma, + * and Cache-Control observed headers for each request it receives. When + * the test fetches have run, this state is retrieved from cache.py and the + * expected_* lists are checked, including their length. + * + * This means that if a request_* fetch is expected to hit the cache and not + * touch the network, then there will be no entry for it in the expect_* + * lists. AKA (request_cache.length - expected_validation_headers.length) + * should equal the number of cache hits that didn't touch the network. + * + * Test dictionary keys: + * - state: required string that determines whether the Expires response for + * the fetched document should be set in the future ("fresh") or past + * ("stale"). + * - vary: optional string to be passed to the server for it to quote back + * in a Vary header on the response to us. + * - cache_control: optional string to be passed to the server for it to + * quote back in a Cache-Control header on the response to us. + * - redirect: optional string "same-origin" or "cross-origin". If + * provided, the server will issue an absolute redirect to the script on + * the same or a different origin, as appropriate. The redirected + * location is the script with the redirect parameter removed, so the + * content/state/etc. will be as if you hadn't specified a redirect. + * - request_cache: required array of cache modes to use (via `cache`). + * - request_headers: optional array of explicit fetch `headers` arguments. + * If provided, the server will log an empty dictionary for each request + * instead of the request headers it would normally log. + * - response: optional array of specialized response handling. Right now, + * "error" array entries indicate a network error response is expected + * which will reject with a TypeError. + * - expected_validation_headers: required boolean array indicating whether + * the server should have seen an If-None-Match/If-Modified-Since header + * in the request. + * - expected_no_cache_headers: required boolean array indicating whether + * the server should have seen Pragma/Cache-control:no-cache headers in + * the request. + * - expected_max_age_headers: optional boolean array indicating whether + * the server should have seen a Cache-Control:max-age=0 header in the + * request. + */ + +var now = new Date(); + +function base_path() { + return location.pathname.replace(/\/[^\/]*$/, '/'); +} +function make_url(uuid, id, value, content, info) { + var dates = { + fresh: new Date(now.getFullYear() + 1, now.getMonth(), now.getDay()).toGMTString(), + stale: new Date(now.getFullYear() - 1, now.getMonth(), now.getDay()).toGMTString(), + }; + var vary = ""; + if ("vary" in info) { + vary = "&vary=" + info.vary; + } + var cache_control = ""; + if ("cache_control" in info) { + cache_control = "&cache_control=" + info.cache_control; + } + var redirect = ""; + + var ignore_request_headers = ""; + if ("request_headers" in info) { + // Ignore the request headers that we send since they may be synthesized by the test. + ignore_request_headers = "&ignore"; + } + var url_sans_redirect = "resources/cache.py?token=" + uuid + + "&content=" + content + + "&" + id + "=" + value + + "&expires=" + dates[info.state] + + vary + cache_control + ignore_request_headers; + // If there's a redirect, the target is the script without any redirect at + // either the same domain or a different domain. + if ("redirect" in info) { + var host_info = get_host_info(); + var origin; + switch (info.redirect) { + case "same-origin": + origin = host_info['HTTP_ORIGIN']; + break; + case "cross-origin": + origin = host_info['HTTP_REMOTE_ORIGIN']; + break; + } + var redirected_url = origin + base_path() + url_sans_redirect; + return url_sans_redirect + "&redirect=" + encodeURIComponent(redirected_url); + } else { + return url_sans_redirect; + } +} +function expected_status(type, identifier, init) { + if (type == "date" && + init.headers && + init.headers["If-Modified-Since"] == identifier) { + // The server will respond with a 304 in this case. + return [304, "Not Modified"]; + } + return [200, "OK"]; +} +function expected_response_text(type, identifier, init, content) { + if (type == "date" && + init.headers && + init.headers["If-Modified-Since"] == identifier) { + // The server will respond with a 304 in this case. + return ""; + } + return content; +} +function server_state(uuid) { + return fetch("resources/cache.py?querystate&token=" + uuid) + .then(function(response) { + return response.text(); + }).then(function(text) { + // null will be returned if the server never received any requests + // for the given uuid. Normalize that to an empty list consistent + // with our representation. + return JSON.parse(text) || []; + }); +} +function make_test(type, info) { + return function(test) { + var uuid = token(); + var identifier = (type == "tag" ? Math.random() : now.toGMTString()); + var content = Math.random().toString(); + var url = make_url(uuid, type, identifier, content, info); + var fetch_functions = []; + for (var i = 0; i < info.request_cache.length; ++i) { + fetch_functions.push(function(idx) { + var init = {cache: info.request_cache[idx]}; + if ("request_headers" in info) { + init.headers = info.request_headers[idx]; + } + if (init.cache === "only-if-cached") { + // only-if-cached requires we use same-origin mode. + init.mode = "same-origin"; + } + return fetch(url, init) + .then(function(response) { + if ("response" in info && info.response[idx] === "error") { + assert_true(false, "fetch should have been an error"); + return; + } + assert_array_equals([response.status, response.statusText], + expected_status(type, identifier, init)); + return response.text(); + }).then(function(text) { + assert_equals(text, expected_response_text(type, identifier, init, content)); + }, function(reason) { + if ("response" in info && info.response[idx] === "error") { + assert_throws_js(TypeError, function() { throw reason; }); + } else { + throw reason; + } + }); + }); + } + var i = 0; + function run_next_step() { + if (fetch_functions.length) { + return fetch_functions.shift()(i++) + .then(run_next_step); + } else { + return Promise.resolve(); + } + } + return run_next_step() + .then(function() { + // Now, query the server state + return server_state(uuid); + }).then(function(state) { + var expectedState = []; + info.expected_validation_headers.forEach(function (validate) { + if (validate) { + if (type == "tag") { + expectedState.push({"If-None-Match": '"' + identifier + '"'}); + } else { + expectedState.push({"If-Modified-Since": identifier}); + } + } else { + expectedState.push({}); + } + }); + for (var i = 0; i < info.expected_no_cache_headers.length; ++i) { + if (info.expected_no_cache_headers[i]) { + expectedState[i]["Pragma"] = "no-cache"; + expectedState[i]["Cache-Control"] = "no-cache"; + } + } + if ("expected_max_age_headers" in info) { + for (var i = 0; i < info.expected_max_age_headers.length; ++i) { + if (info.expected_max_age_headers[i]) { + expectedState[i]["Cache-Control"] = "max-age=0"; + } + } + } + assert_equals(state.length, expectedState.length); + for (var i = 0; i < state.length; ++i) { + for (var header in state[i]) { + assert_equals(state[i][header], expectedState[i][header]); + delete expectedState[i][header]; + } + for (var header in expectedState[i]) { + assert_false(header in state[i]); + } + } + }); + }; +} + +function run_tests(tests) +{ + tests.forEach(function(info) { + promise_test(make_test("tag", info), info.name + " with Etag and " + info.state + " response"); + promise_test(make_test("date", info), info.name + " with Last-Modified and " + info.state + " response"); + }); +} diff --git a/test/wpt/tests/fetch/api/request/request-clone.sub.html b/test/wpt/tests/fetch/api/request/request-clone.sub.html new file mode 100644 index 0000000..c690bb3 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-clone.sub.html @@ -0,0 +1,63 @@ + + + + + Request clone + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/request/request-consume-empty.any.js b/test/wpt/tests/fetch/api/request/request-consume-empty.any.js new file mode 100644 index 0000000..034a860 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-consume-empty.any.js @@ -0,0 +1,101 @@ +// META: global=window,worker +// META: title=Request consume empty bodies + +function checkBodyText(test, request) { + return request.text().then(function(bodyAsText) { + assert_equals(bodyAsText, "", "Resolved value should be empty"); + assert_false(request.bodyUsed); + }); +} + +function checkBodyBlob(test, request) { + return request.blob().then(function(bodyAsBlob) { + var promise = new Promise(function(resolve, reject) { + var reader = new FileReader(); + reader.onload = function(evt) { + resolve(reader.result) + }; + reader.onerror = function() { + reject("Blob's reader failed"); + }; + reader.readAsText(bodyAsBlob); + }); + return promise.then(function(body) { + assert_equals(body, "", "Resolved value should be empty"); + assert_false(request.bodyUsed); + }); + }); +} + +function checkBodyArrayBuffer(test, request) { + return request.arrayBuffer().then(function(bodyAsArrayBuffer) { + assert_equals(bodyAsArrayBuffer.byteLength, 0, "Resolved value should be empty"); + assert_false(request.bodyUsed); + }); +} + +function checkBodyJSON(test, request) { + return request.json().then( + function(bodyAsJSON) { + assert_unreached("JSON parsing should fail"); + }, + function() { + assert_false(request.bodyUsed); + }); +} + +function checkBodyFormData(test, request) { + return request.formData().then(function(bodyAsFormData) { + assert_true(bodyAsFormData instanceof FormData, "Should receive a FormData"); + assert_false(request.bodyUsed); + }); +} + +function checkBodyFormDataError(test, request) { + return promise_rejects_js(test, TypeError, request.formData()).then(function() { + assert_false(request.bodyUsed); + }); +} + +function checkRequestWithNoBody(bodyType, checkFunction, headers = []) { + promise_test(function(test) { + var request = new Request("", {"method": "POST", "headers": headers}); + assert_false(request.bodyUsed); + return checkFunction(test, request); + }, "Consume request's body as " + bodyType); +} + +checkRequestWithNoBody("text", checkBodyText); +checkRequestWithNoBody("blob", checkBodyBlob); +checkRequestWithNoBody("arrayBuffer", checkBodyArrayBuffer); +checkRequestWithNoBody("json (error case)", checkBodyJSON); +checkRequestWithNoBody("formData with correct multipart type (error case)", checkBodyFormDataError, [["Content-Type", 'multipart/form-data; boundary="boundary"']]); +checkRequestWithNoBody("formData with correct urlencoded type", checkBodyFormData, [["Content-Type", "application/x-www-form-urlencoded;charset=UTF-8"]]); +checkRequestWithNoBody("formData without correct type (error case)", checkBodyFormDataError); + +function checkRequestWithEmptyBody(bodyType, body, asText) { + promise_test(function(test) { + var request = new Request("", {"method": "POST", "body": body}); + assert_false(request.bodyUsed, "bodyUsed is false at init"); + if (asText) { + return request.text().then(function(bodyAsString) { + assert_equals(bodyAsString.length, 0, "Resolved value should be empty"); + assert_true(request.bodyUsed, "bodyUsed is true after being consumed"); + }); + } + return request.arrayBuffer().then(function(bodyAsArrayBuffer) { + assert_equals(bodyAsArrayBuffer.byteLength, 0, "Resolved value should be empty"); + assert_true(request.bodyUsed, "bodyUsed is true after being consumed"); + }); + }, "Consume empty " + bodyType + " request body as " + (asText ? "text" : "arrayBuffer")); +} + +// FIXME: Add BufferSource, FormData and URLSearchParams. +checkRequestWithEmptyBody("blob", new Blob([], { "type" : "text/plain" }), false); +checkRequestWithEmptyBody("text", "", false); +checkRequestWithEmptyBody("blob", new Blob([], { "type" : "text/plain" }), true); +checkRequestWithEmptyBody("text", "", true); +checkRequestWithEmptyBody("URLSearchParams", new URLSearchParams(""), true); +// FIXME: This test assumes that the empty string be returned but it is not clear whether that is right. See https://github.com/web-platform-tests/wpt/pull/3950. +checkRequestWithEmptyBody("FormData", new FormData(), true); +checkRequestWithEmptyBody("ArrayBuffer", new ArrayBuffer(), true); diff --git a/test/wpt/tests/fetch/api/request/request-consume.any.js b/test/wpt/tests/fetch/api/request/request-consume.any.js new file mode 100644 index 0000000..aff5d65 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-consume.any.js @@ -0,0 +1,145 @@ +// META: global=window,worker +// META: title=Request consume +// META: script=../resources/utils.js + +function checkBodyText(request, expectedBody) { + return request.text().then(function(bodyAsText) { + assert_equals(bodyAsText, expectedBody, "Retrieve and verify request's body"); + assert_true(request.bodyUsed, "body as text: bodyUsed turned true"); + }); +} + +function checkBodyBlob(request, expectedBody, checkContentType) { + return request.blob().then(function(bodyAsBlob) { + if (checkContentType) + assert_equals(bodyAsBlob.type, "text/plain", "Blob body type should be computed from the request Content-Type"); + + var promise = new Promise(function (resolve, reject) { + var reader = new FileReader(); + reader.onload = function(evt) { + resolve(reader.result) + }; + reader.onerror = function() { + reject("Blob's reader failed"); + }; + reader.readAsText(bodyAsBlob); + }); + return promise.then(function(body) { + assert_equals(body, expectedBody, "Retrieve and verify request's body"); + assert_true(request.bodyUsed, "body as blob: bodyUsed turned true"); + }); + }); +} + +function checkBodyArrayBuffer(request, expectedBody) { + return request.arrayBuffer().then(function(bodyAsArrayBuffer) { + validateBufferFromString(bodyAsArrayBuffer, expectedBody, "Retrieve and verify request's body"); + assert_true(request.bodyUsed, "body as arrayBuffer: bodyUsed turned true"); + }); +} + +function checkBodyJSON(request, expectedBody) { + return request.json().then(function(bodyAsJSON) { + var strBody = JSON.stringify(bodyAsJSON) + assert_equals(strBody, expectedBody, "Retrieve and verify request's body"); + assert_true(request.bodyUsed, "body as json: bodyUsed turned true"); + }); +} + +function checkBodyFormData(request, expectedBody) { + return request.formData().then(function(bodyAsFormData) { + assert_true(bodyAsFormData instanceof FormData, "Should receive a FormData"); + assert_true(request.bodyUsed, "body as formData: bodyUsed turned true"); + }); +} + +function checkRequestBody(body, expected, bodyType) { + promise_test(function(test) { + var request = new Request("", {"method": "POST", "body": body, "headers": [["Content-Type", "text/PLAIN"]] }); + assert_false(request.bodyUsed, "bodyUsed is false at init"); + return checkBodyText(request, expected); + }, "Consume " + bodyType + " request's body as text"); + promise_test(function(test) { + var request = new Request("", {"method": "POST", "body": body }); + assert_false(request.bodyUsed, "bodyUsed is false at init"); + return checkBodyBlob(request, expected); + }, "Consume " + bodyType + " request's body as blob"); + promise_test(function(test) { + var request = new Request("", {"method": "POST", "body": body }); + assert_false(request.bodyUsed, "bodyUsed is false at init"); + return checkBodyArrayBuffer(request, expected); + }, "Consume " + bodyType + " request's body as arrayBuffer"); + promise_test(function(test) { + var request = new Request("", {"method": "POST", "body": body }); + assert_false(request.bodyUsed, "bodyUsed is false at init"); + return checkBodyJSON(request, expected); + }, "Consume " + bodyType + " request's body as JSON"); +} + +var textData = JSON.stringify("This is response's body"); +var blob = new Blob([textData], { "type" : "text/plain" }); + +checkRequestBody(textData, textData, "String"); + +var string = "\"123456\""; +function getArrayBuffer() { + var arrayBuffer = new ArrayBuffer(8); + var int8Array = new Int8Array(arrayBuffer); + for (var cptr = 0; cptr < 8; cptr++) + int8Array[cptr] = string.charCodeAt(cptr); + return arrayBuffer; +} + +function getArrayBufferWithZeros() { + var arrayBuffer = new ArrayBuffer(10); + var int8Array = new Int8Array(arrayBuffer); + for (var cptr = 0; cptr < 8; cptr++) + int8Array[cptr + 1] = string.charCodeAt(cptr); + return arrayBuffer; +} + +checkRequestBody(getArrayBuffer(), string, "ArrayBuffer"); +checkRequestBody(new Uint8Array(getArrayBuffer()), string, "Uint8Array"); +checkRequestBody(new Int8Array(getArrayBufferWithZeros(), 1, 8), string, "Int8Array"); +checkRequestBody(new Float32Array(getArrayBuffer()), string, "Float32Array"); +checkRequestBody(new DataView(getArrayBufferWithZeros(), 1, 8), string, "DataView"); + +promise_test(function(test) { + var formData = new FormData(); + formData.append("name", "value") + var request = new Request("", {"method": "POST", "body": formData }); + assert_false(request.bodyUsed, "bodyUsed is false at init"); + return checkBodyFormData(request, formData); +}, "Consume FormData request's body as FormData"); + +function checkBlobResponseBody(blobBody, blobData, bodyType, checkFunction) { + promise_test(function(test) { + var response = new Response(blobBody); + assert_false(response.bodyUsed, "bodyUsed is false at init"); + return checkFunction(response, blobData); + }, "Consume blob response's body as " + bodyType); +} + +checkBlobResponseBody(blob, textData, "blob", checkBodyBlob); +checkBlobResponseBody(blob, textData, "text", checkBodyText); +checkBlobResponseBody(blob, textData, "json", checkBodyJSON); +checkBlobResponseBody(blob, textData, "arrayBuffer", checkBodyArrayBuffer); +checkBlobResponseBody(new Blob([""]), "", "blob (empty blob as input)", checkBodyBlob); + +var goodJSONValues = ["null", "1", "true", "\"string\""]; +goodJSONValues.forEach(function(value) { + promise_test(function(test) { + var request = new Request("", {"method": "POST", "body": value}); + return request.json().then(function(v) { + assert_equals(v, JSON.parse(value)); + }); + }, "Consume JSON from text: '" + JSON.stringify(value) + "'"); +}); + +var badJSONValues = ["undefined", "{", "a", "["]; +badJSONValues.forEach(function(value) { + promise_test(function(test) { + var request = new Request("", {"method": "POST", "body": value}); + return promise_rejects_js(test, SyntaxError, request.json()); + }, "Trying to consume bad JSON text as JSON: '" + value + "'"); +}); diff --git a/test/wpt/tests/fetch/api/request/request-disturbed.any.js b/test/wpt/tests/fetch/api/request/request-disturbed.any.js new file mode 100644 index 0000000..8a11de7 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-disturbed.any.js @@ -0,0 +1,109 @@ +// META: global=window,worker +// META: title=Request disturbed +// META: script=../resources/utils.js + +var initValuesDict = {"method" : "POST", + "body" : "Request's body" +}; + +var noBodyConsumed = new Request(""); +var bodyConsumed = new Request("", initValuesDict); + +test(() => { + assert_equals(noBodyConsumed.body, null, "body's default value is null"); + assert_false(noBodyConsumed.bodyUsed , "bodyUsed is false when request is not disturbed"); + assert_not_equals(bodyConsumed.body, null, "non-null body"); + assert_true(bodyConsumed.body instanceof ReadableStream, "non-null body type"); + assert_false(noBodyConsumed.bodyUsed, "bodyUsed is false when request is not disturbed"); +}, "Request's body: initial state"); + +noBodyConsumed.blob(); +bodyConsumed.blob(); + +test(function() { + assert_false(noBodyConsumed.bodyUsed , "bodyUsed is false when request is not disturbed"); + try { + noBodyConsumed.clone(); + } catch (e) { + assert_unreached("Can use request not disturbed for creating or cloning request"); + } +}, "Request without body cannot be disturbed"); + +test(function() { + assert_true(bodyConsumed.bodyUsed , "bodyUsed is true when request is disturbed"); + assert_throws_js(TypeError, function() { bodyConsumed.clone(); }); +}, "Check cloning a disturbed request"); + +test(function() { + assert_true(bodyConsumed.bodyUsed , "bodyUsed is true when request is disturbed"); + assert_throws_js(TypeError, function() { new Request(bodyConsumed); }); +}, "Check creating a new request from a disturbed request"); + +promise_test(function() { + assert_true(bodyConsumed.bodyUsed , "bodyUsed is true when request is disturbed"); + const originalBody = bodyConsumed.body; + const bodyReplaced = new Request(bodyConsumed, { body: "Replaced body" }); + assert_not_equals(bodyReplaced.body, originalBody, "new request's body is new"); + assert_false(bodyReplaced.bodyUsed, "bodyUsed is false when request is not disturbed"); + return bodyReplaced.text().then(text => { + assert_equals(text, "Replaced body"); + }); +}, "Check creating a new request with a new body from a disturbed request"); + +promise_test(function() { + var bodyRequest = new Request("", initValuesDict); + const originalBody = bodyRequest.body; + assert_false(bodyRequest.bodyUsed , "bodyUsed is false when request is not disturbed"); + var requestFromRequest = new Request(bodyRequest); + assert_true(bodyRequest.bodyUsed , "bodyUsed is true when request is disturbed"); + assert_equals(bodyRequest.body, originalBody, "body should not change"); + assert_not_equals(originalBody, undefined, "body should not be undefined"); + assert_not_equals(originalBody, null, "body should not be null"); + assert_not_equals(requestFromRequest.body, originalBody, "new request's body is new"); + return requestFromRequest.text().then(text => { + assert_equals(text, "Request's body"); + }); +}, "Input request used for creating new request became disturbed"); + +promise_test(() => { + const bodyRequest = new Request("", initValuesDict); + const originalBody = bodyRequest.body; + assert_false(bodyRequest.bodyUsed , "bodyUsed is false when request is not disturbed"); + const requestFromRequest = new Request(bodyRequest, { body : "init body" }); + assert_true(bodyRequest.bodyUsed , "bodyUsed is true when request is disturbed"); + assert_equals(bodyRequest.body, originalBody, "body should not change"); + assert_not_equals(originalBody, undefined, "body should not be undefined"); + assert_not_equals(originalBody, null, "body should not be null"); + assert_not_equals(requestFromRequest.body, originalBody, "new request's body is new"); + + return requestFromRequest.text().then(text => { + assert_equals(text, "init body"); + }); +}, "Input request used for creating new request became disturbed even if body is not used"); + +promise_test(function(test) { + assert_true(bodyConsumed.bodyUsed , "bodyUsed is true when request is disturbed"); + return promise_rejects_js(test, TypeError, bodyConsumed.blob()); +}, "Check consuming a disturbed request"); + +test(function() { + var req = new Request(URL, {method: 'POST', body: 'hello'}); + assert_false(req.bodyUsed, + 'Request should not be flagged as used if it has not been ' + + 'consumed.'); + assert_throws_js(TypeError, + function() { new Request(req, {method: 'GET'}); }, + 'A get request may not have body.'); + + assert_false(req.bodyUsed, 'After the GET case'); + + assert_throws_js(TypeError, + function() { new Request(req, {method: 'CONNECT'}); }, + 'Request() with a forbidden method must throw.'); + + assert_false(req.bodyUsed, 'After the forbidden method case'); + + var req2 = new Request(req); + assert_true(req.bodyUsed, + 'Request should be flagged as used if it has been consumed.'); +}, 'Request construction failure should not set "bodyUsed"'); diff --git a/test/wpt/tests/fetch/api/request/request-error.any.js b/test/wpt/tests/fetch/api/request/request-error.any.js new file mode 100644 index 0000000..9ec8015 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-error.any.js @@ -0,0 +1,56 @@ +// META: global=window,worker +// META: title=Request error +// META: script=request-error.js + +// badRequestArgTests is from response-error.js +for (const { args, testName } of badRequestArgTests) { + test(() => { + assert_throws_js( + TypeError, + () => new Request(...args), + "Expect TypeError exception" + ); + }, testName); +} + +test(function() { + assert_throws_js( + TypeError, + () => Request("about:blank"), + "Calling Request constructor without 'new' must throw" + ); +}); + +test(function() { + var initialHeaders = new Headers([["Content-Type", "potato"]]); + var initialRequest = new Request("", {"headers" : initialHeaders}); + var request = new Request(initialRequest); + assert_equals(request.headers.get("Content-Type"), "potato"); +}, "Request should get its content-type from the init request"); + +test(function() { + var initialHeaders = new Headers([["Content-Type", "potato"]]); + var initialRequest = new Request("", {"headers" : initialHeaders}); + var headers = new Headers([]); + var request = new Request(initialRequest, {"headers" : headers}); + assert_false(request.headers.has("Content-Type")); +}, "Request should not get its content-type from the init request if init headers are provided"); + +test(function() { + var initialHeaders = new Headers([["Content-Type-Extra", "potato"]]); + var initialRequest = new Request("", {"headers" : initialHeaders, "body" : "this is my plate", "method" : "POST"}); + var request = new Request(initialRequest); + assert_equals(request.headers.get("Content-Type"), "text/plain;charset=UTF-8"); +}, "Request should get its content-type from the body if none is provided"); + +test(function() { + var initialHeaders = new Headers([["Content-Type", "potato"]]); + var initialRequest = new Request("", {"headers" : initialHeaders, "body" : "this is my plate", "method" : "POST"}); + var request = new Request(initialRequest); + assert_equals(request.headers.get("Content-Type"), "potato"); +}, "Request should get its content-type from init headers if one is provided"); + +test(function() { + var options = {"cache": "only-if-cached", "mode": "same-origin"}; + new Request("test", options); +}, "Request with cache mode: only-if-cached and fetch mode: same-origin"); diff --git a/test/wpt/tests/fetch/api/request/request-error.js b/test/wpt/tests/fetch/api/request/request-error.js new file mode 100644 index 0000000..cf77313 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-error.js @@ -0,0 +1,57 @@ +const badRequestArgTests = [ + { + args: ["", { "window": "http://test.url" }], + testName: "RequestInit's window is not null" + }, + { + args: ["http://:not a valid URL"], + testName: "Input URL is not valid" + }, + { + args: ["http://user:pass@test.url"], + testName: "Input URL has credentials" + }, + { + args: ["", { "mode": "navigate" }], + testName: "RequestInit's mode is navigate" + }, + { + args: ["", { "referrer": "http://:not a valid URL" }], + testName: "RequestInit's referrer is invalid" + }, + { + args: ["", { "method": "IN VALID" }], + testName: "RequestInit's method is invalid" + }, + { + args: ["", { "method": "TRACE" }], + testName: "RequestInit's method is forbidden" + }, + { + args: ["", { "mode": "no-cors", "method": "PUT" }], + testName: "RequestInit's mode is no-cors and method is not simple" + }, + { + args: ["", { "mode": "cors", "cache": "only-if-cached" }], + testName: "RequestInit's cache mode is only-if-cached and mode is not same-origin" + }, + { + args: ["test", { "cache": "only-if-cached", "mode": "cors" }], + testName: "Request with cache mode: only-if-cached and fetch mode cors" + }, + { + args: ["test", { "cache": "only-if-cached", "mode": "no-cors" }], + testName: "Request with cache mode: only-if-cached and fetch mode no-cors" + } +]; + +badRequestArgTests.push( + ...["referrerPolicy", "mode", "credentials", "cache", "redirect"].map(optionProp => { + const options = {}; + options[optionProp] = "BAD"; + return { + args: ["", options], + testName: `Bad ${optionProp} init parameter value` + }; + }) +); diff --git a/test/wpt/tests/fetch/api/request/request-headers.any.js b/test/wpt/tests/fetch/api/request/request-headers.any.js new file mode 100644 index 0000000..22925e0 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-headers.any.js @@ -0,0 +1,178 @@ +// META: global=window,worker +// META: title=Request Headers + +var validRequestHeaders = [ + ["Content-Type", "OK"], + ["Potato", "OK"], + ["proxy", "OK"], + ["proxya", "OK"], + ["sec", "OK"], + ["secb", "OK"], + ["Set-Cookie2", "OK"], + ["User-Agent", "OK"], +]; +var invalidRequestHeaders = [ + ["Accept-Charset", "KO"], + ["accept-charset", "KO"], + ["ACCEPT-ENCODING", "KO"], + ["Accept-Encoding", "KO"], + ["Access-Control-Request-Headers", "KO"], + ["Access-Control-Request-Method", "KO"], + ["Access-Control-Request-Private-Network", "KO"], + ["Connection", "KO"], + ["Content-Length", "KO"], + ["Cookie", "KO"], + ["Cookie2", "KO"], + ["Date", "KO"], + ["DNT", "KO"], + ["Expect", "KO"], + ["Host", "KO"], + ["Keep-Alive", "KO"], + ["Origin", "KO"], + ["Referer", "KO"], + ["Set-Cookie", "KO"], + ["TE", "KO"], + ["Trailer", "KO"], + ["Transfer-Encoding", "KO"], + ["Upgrade", "KO"], + ["Via", "KO"], + ["Proxy-", "KO"], + ["proxy-a", "KO"], + ["Sec-", "KO"], + ["sec-b", "KO"], +]; + +var validRequestNoCorsHeaders = [ + ["Accept", "OK"], + ["Accept-Language", "OK"], + ["content-language", "OK"], + ["content-type", "application/x-www-form-urlencoded"], + ["content-type", "application/x-www-form-urlencoded;charset=UTF-8"], + ["content-type", "multipart/form-data"], + ["content-type", "multipart/form-data;charset=UTF-8"], + ["content-TYPE", "text/plain"], + ["CONTENT-type", "text/plain;charset=UTF-8"], +]; +var invalidRequestNoCorsHeaders = [ + ["Content-Type", "KO"], + ["Potato", "KO"], + ["proxy", "KO"], + ["proxya", "KO"], + ["sec", "KO"], + ["secb", "KO"], + ["Empty-Value", ""], +]; + +validRequestHeaders.forEach(function(header) { + test(function() { + var request = new Request(""); + request.headers.set(header[0], header[1]); + assert_equals(request.headers.get(header[0]), header[1]); + }, "Adding valid request header \"" + header[0] + ": " + header[1] + "\""); +}); +invalidRequestHeaders.forEach(function(header) { + test(function() { + var request = new Request(""); + request.headers.set(header[0], header[1]); + assert_equals(request.headers.get(header[0]), null); + }, "Adding invalid request header \"" + header[0] + ": " + header[1] + "\""); +}); + +validRequestNoCorsHeaders.forEach(function(header) { + test(function() { + var requestNoCors = new Request("", {"mode": "no-cors"}); + requestNoCors.headers.set(header[0], header[1]); + assert_equals(requestNoCors.headers.get(header[0]), header[1]); + }, "Adding valid no-cors request header \"" + header[0] + ": " + header[1] + "\""); +}); +invalidRequestNoCorsHeaders.forEach(function(header) { + test(function() { + var requestNoCors = new Request("", {"mode": "no-cors"}); + requestNoCors.headers.set(header[0], header[1]); + assert_equals(requestNoCors.headers.get(header[0]), null); + }, "Adding invalid no-cors request header \"" + header[0] + ": " + header[1] + "\""); +}); + +test(function() { + var headers = new Headers([["Cookie2", "potato"]]); + var request = new Request("", {"headers": headers}); + assert_equals(request.headers.get("Cookie2"), null); +}, "Check that request constructor is filtering headers provided as init parameter"); + +test(function() { + var headers = new Headers([["Content-Type", "potato"]]); + var request = new Request("", {"headers": headers, "mode": "no-cors"}); + assert_equals(request.headers.get("Content-Type"), null); +}, "Check that no-cors request constructor is filtering headers provided as init parameter"); + +test(function() { + var headers = new Headers([["Content-Type", "potato"]]); + var initialRequest = new Request("", {"headers": headers}); + var request = new Request(initialRequest, {"mode": "no-cors"}); + assert_equals(request.headers.get("Content-Type"), null); +}, "Check that no-cors request constructor is filtering headers provided as part of request parameter"); + +test(function() { + var initialHeaders = new Headers([["Content-Type", "potato"]]); + var initialRequest = new Request("", {"headers" : initialHeaders}); + var request = new Request(initialRequest); + assert_equals(request.headers.get("Content-Type"), "potato"); +}, "Request should get its content-type from the init request"); + +test(function() { + var initialHeaders = new Headers([["Content-Type", "potato"]]); + var initialRequest = new Request("", {"headers" : initialHeaders}); + var headers = new Headers([]); + var request = new Request(initialRequest, {"headers" : headers}); + assert_false(request.headers.has("Content-Type")); +}, "Request should not get its content-type from the init request if init headers are provided"); + +test(function() { + var initialHeaders = new Headers([["Content-Type-Extra", "potato"]]); + var initialRequest = new Request("", {"headers" : initialHeaders, "body" : "this is my plate", "method" : "POST"}); + var request = new Request(initialRequest); + assert_equals(request.headers.get("Content-Type"), "text/plain;charset=UTF-8"); +}, "Request should get its content-type from the body if none is provided"); + +test(function() { + var initialHeaders = new Headers([["Content-Type", "potato"]]); + var initialRequest = new Request("", {"headers" : initialHeaders, "body" : "this is my plate", "method" : "POST"}); + var request = new Request(initialRequest); + assert_equals(request.headers.get("Content-Type"), "potato"); +}, "Request should get its content-type from init headers if one is provided"); + +test(function() { + var array = [["hello", "worldAHH"]]; + var object = {"hello": 'worldOOH'}; + var headers = new Headers(array); + + assert_equals(headers.get("hello"), "worldAHH"); + + var request1 = new Request("", {"headers": headers}); + var request2 = new Request("", {"headers": array}); + var request3 = new Request("", {"headers": object}); + + assert_equals(request1.headers.get("hello"), "worldAHH"); + assert_equals(request2.headers.get("hello"), "worldAHH"); + assert_equals(request3.headers.get("hello"), "worldOOH"); +}, "Testing request header creations with various objects"); + +promise_test(function(test) { + var request = new Request("", {"headers" : [["Content-Type", ""]], "body" : "this is my plate", "method" : "POST"}); + return request.blob().then(function(blob) { + assert_equals(blob.type, "", "Blob type should be the empty string"); + }); +}, "Testing empty Request Content-Type header"); + +test(function() { + const request1 = new Request(""); + assert_equals(request1.headers, request1.headers); + + const request2 = new Request("", {"headers": {"X-Foo": "bar"}}); + assert_equals(request2.headers, request2.headers); + const headers = request2.headers; + request2.headers.set("X-Foo", "quux"); + assert_equals(headers, request2.headers); + headers.set("X-Other-Header", "baz"); + assert_equals(headers, request2.headers); +}, "Test that Request.headers has the [SameObject] extended attribute"); diff --git a/test/wpt/tests/fetch/api/request/request-init-001.sub.html b/test/wpt/tests/fetch/api/request/request-init-001.sub.html new file mode 100644 index 0000000..cc495a6 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-init-001.sub.html @@ -0,0 +1,112 @@ + + + + + Request init: simple cases + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/request/request-init-002.any.js b/test/wpt/tests/fetch/api/request/request-init-002.any.js new file mode 100644 index 0000000..abb6689 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-init-002.any.js @@ -0,0 +1,60 @@ +// META: global=window,worker +// META: title=Request init: headers and body + +test(function() { + var headerDict = {"name1": "value1", + "name2": "value2", + "name3": "value3" + }; + var headers = new Headers(headerDict); + var request = new Request("", { "headers" : headers }) + for (var name in headerDict) { + assert_equals(request.headers.get(name), headerDict[name], + "request's headers has " + name + " : " + headerDict[name]); + } +}, "Initialize Request with headers values"); + +function makeRequestInit(body, method) { + return {"method": method, "body": body}; +} + +function checkRequestInit(body, bodyType, expectedTextBody) { + promise_test(function(test) { + var request = new Request("", makeRequestInit(body, "POST")); + if (body) { + assert_throws_js(TypeError, function() { new Request("", makeRequestInit(body, "GET")); }); + assert_throws_js(TypeError, function() { new Request("", makeRequestInit(body, "HEAD")); }); + } else { + new Request("", makeRequestInit(body, "GET")); // should not throw + } + var reqHeaders = request.headers; + var mime = reqHeaders.get("Content-Type"); + assert_true(!body || (mime && mime.search(bodyType) > -1), "Content-Type header should be \"" + bodyType + "\", not \"" + mime + "\""); + return request.text().then(function(bodyAsText) { + //not equals: cannot guess formData exact value + assert_true( bodyAsText.search(expectedTextBody) > -1, "Retrieve and verify request body"); + }); + }, `Initialize Request's body with "${body}", ${bodyType}`); +} + +var blob = new Blob(["This is a blob"], {type: "application/octet-binary"}); +var formaData = new FormData(); +formaData.append("name", "value"); +var usvString = "This is a USVString" + +checkRequestInit(undefined, undefined, ""); +checkRequestInit(null, null, ""); +checkRequestInit(blob, "application/octet-binary", "This is a blob"); +checkRequestInit(formaData, "multipart/form-data", "name=\"name\"\r\n\r\nvalue"); +checkRequestInit(usvString, "text/plain;charset=UTF-8", "This is a USVString"); +checkRequestInit({toString: () => "hi!"}, "text/plain;charset=UTF-8", "hi!"); + +// Ensure test does not time out in case of missing URLSearchParams support. +if (self.URLSearchParams) { + var urlSearchParams = new URLSearchParams("name=value"); + checkRequestInit(urlSearchParams, "application/x-www-form-urlencoded;charset=UTF-8", "name=value"); +} else { + promise_test(function(test) { + return Promise.reject("URLSearchParams not supported"); + }, "Initialize Request's body with application/x-www-form-urlencoded;charset=UTF-8"); +} diff --git a/test/wpt/tests/fetch/api/request/request-init-003.sub.html b/test/wpt/tests/fetch/api/request/request-init-003.sub.html new file mode 100644 index 0000000..79c91cd --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-init-003.sub.html @@ -0,0 +1,84 @@ + + + + + Request: init with request or url + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/request/request-init-contenttype.any.js b/test/wpt/tests/fetch/api/request/request-init-contenttype.any.js new file mode 100644 index 0000000..18a6969 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-init-contenttype.any.js @@ -0,0 +1,141 @@ +function requestFromBody(body) { + return new Request( + "https://example.com", + { + method: "POST", + body, + duplex: "half", + }, + ); +} + +test(() => { + const request = requestFromBody(undefined); + assert_equals(request.headers.get("Content-Type"), null); +}, "Default Content-Type for Request with empty body"); + +test(() => { + const blob = new Blob([]); + const request = requestFromBody(blob); + assert_equals(request.headers.get("Content-Type"), null); +}, "Default Content-Type for Request with Blob body (no type set)"); + +test(() => { + const blob = new Blob([], { type: "" }); + const request = requestFromBody(blob); + assert_equals(request.headers.get("Content-Type"), null); +}, "Default Content-Type for Request with Blob body (empty type)"); + +test(() => { + const blob = new Blob([], { type: "a/b; c=d" }); + const request = requestFromBody(blob); + assert_equals(request.headers.get("Content-Type"), "a/b; c=d"); +}, "Default Content-Type for Request with Blob body (set type)"); + +test(() => { + const buffer = new Uint8Array(); + const request = requestFromBody(buffer); + assert_equals(request.headers.get("Content-Type"), null); +}, "Default Content-Type for Request with buffer source body"); + +promise_test(async () => { + const formData = new FormData(); + formData.append("a", "b"); + const request = requestFromBody(formData); + const boundary = (await request.text()).split("\r\n")[0].slice(2); + assert_equals( + request.headers.get("Content-Type"), + `multipart/form-data; boundary=${boundary}`, + ); +}, "Default Content-Type for Request with FormData body"); + +test(() => { + const usp = new URLSearchParams(); + const request = requestFromBody(usp); + assert_equals( + request.headers.get("Content-Type"), + "application/x-www-form-urlencoded;charset=UTF-8", + ); +}, "Default Content-Type for Request with URLSearchParams body"); + +test(() => { + const request = requestFromBody(""); + assert_equals( + request.headers.get("Content-Type"), + "text/plain;charset=UTF-8", + ); +}, "Default Content-Type for Request with string body"); + +test(() => { + const stream = new ReadableStream(); + const request = requestFromBody(stream); + assert_equals(request.headers.get("Content-Type"), null); +}, "Default Content-Type for Request with ReadableStream body"); + +// ----------------------------------------------------------------------------- + +const OVERRIDE_MIME = "test/only; mime=type"; + +function requestFromBodyWithOverrideMime(body) { + return new Request( + "https://example.com", + { + method: "POST", + body, + headers: { "Content-Type": OVERRIDE_MIME }, + duplex: "half", + }, + ); +} + +test(() => { + const request = requestFromBodyWithOverrideMime(undefined); + assert_equals(request.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Request with empty body"); + +test(() => { + const blob = new Blob([]); + const request = requestFromBodyWithOverrideMime(blob); + assert_equals(request.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Request with Blob body (no type set)"); + +test(() => { + const blob = new Blob([], { type: "" }); + const request = requestFromBodyWithOverrideMime(blob); + assert_equals(request.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Request with Blob body (empty type)"); + +test(() => { + const blob = new Blob([], { type: "a/b; c=d" }); + const request = requestFromBodyWithOverrideMime(blob); + assert_equals(request.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Request with Blob body (set type)"); + +test(() => { + const buffer = new Uint8Array(); + const request = requestFromBodyWithOverrideMime(buffer); + assert_equals(request.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Request with buffer source body"); + +test(() => { + const formData = new FormData(); + const request = requestFromBodyWithOverrideMime(formData); + assert_equals(request.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Request with FormData body"); + +test(() => { + const usp = new URLSearchParams(); + const request = requestFromBodyWithOverrideMime(usp); + assert_equals(request.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Request with URLSearchParams body"); + +test(() => { + const request = requestFromBodyWithOverrideMime(""); + assert_equals(request.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Request with string body"); + +test(() => { + const stream = new ReadableStream(); + const request = requestFromBodyWithOverrideMime(stream); + assert_equals(request.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Request with ReadableStream body"); diff --git a/test/wpt/tests/fetch/api/request/request-init-priority.any.js b/test/wpt/tests/fetch/api/request/request-init-priority.any.js new file mode 100644 index 0000000..eb5073c --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-init-priority.any.js @@ -0,0 +1,26 @@ +var priorities = ["high", + "low", + "auto" + ]; + +for (idx in priorities) { + test(() => { + new Request("", {priority: priorities[idx]}); + }, "new Request() with a '" + priorities[idx] + "' priority does not throw an error"); +} + +test(() => { + assert_throws_js(TypeError, () => { + new Request("", {priority: 'invalid'}); + }, "a new Request() must throw a TypeError if RequestInit's priority is an invalid value"); +}, "new Request() throws a TypeError if any of RequestInit's members' values are invalid"); + +for (idx in priorities) { + promise_test(function(t) { + return fetch('hello.txt', { priority: priorities[idx] }); + }, "fetch() with a '" + priorities[idx] + "' priority completes successfully"); +} + +promise_test(function(t) { + return promise_rejects_js(t, TypeError, fetch('hello.txt', { priority: 'invalid' })); +}, "fetch() with an invalid priority returns a rejected promise with a TypeError"); diff --git a/test/wpt/tests/fetch/api/request/request-init-stream.any.js b/test/wpt/tests/fetch/api/request/request-init-stream.any.js new file mode 100644 index 0000000..f0ae441 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-init-stream.any.js @@ -0,0 +1,147 @@ +// META: global=window,worker + +"use strict"; + +const duplex = "half"; +const method = "POST"; + +test(() => { + const body = new ReadableStream(); + const request = new Request("...", { method, body, duplex }); + assert_equals(request.body, body); +}, "Constructing a Request with a stream holds the original object."); + +test((t) => { + const body = new ReadableStream(); + body.getReader(); + assert_throws_js(TypeError, + () => new Request("...", { method, body, duplex })); +}, "Constructing a Request with a stream on which getReader() is called"); + +test((t) => { + const body = new ReadableStream(); + body.getReader().read(); + assert_throws_js(TypeError, + () => new Request("...", { method, body, duplex })); +}, "Constructing a Request with a stream on which read() is called"); + +promise_test(async (t) => { + const body = new ReadableStream({ pull: c => c.enqueue(new Uint8Array()) }); + const reader = body.getReader(); + await reader.read(); + reader.releaseLock(); + assert_throws_js(TypeError, + () => new Request("...", { method, body, duplex })); +}, "Constructing a Request with a stream on which read() and releaseLock() are called"); + +test((t) => { + const request = new Request("...", { method: "POST", body: "..." }); + request.body.getReader(); + assert_throws_js(TypeError, () => new Request(request)); + // This doesn't throw. + new Request(request, { body: "..." }); +}, "Constructing a Request with a Request on which body.getReader() is called"); + +test((t) => { + const request = new Request("...", { method: "POST", body: "..." }); + request.body.getReader().read(); + assert_throws_js(TypeError, () => new Request(request)); + // This doesn't throw. + new Request(request, { body: "..." }); +}, "Constructing a Request with a Request on which body.getReader().read() is called"); + +promise_test(async (t) => { + const request = new Request("...", { method: "POST", body: "..." }); + const reader = request.body.getReader(); + await reader.read(); + reader.releaseLock(); + assert_throws_js(TypeError, () => new Request(request)); + // This doesn't throw. + new Request(request, { body: "..." }); +}, "Constructing a Request with a Request on which read() and releaseLock() are called"); + +test((t) => { + new Request("...", { method, body: null }); +}, "It is OK to omit .duplex when the body is null."); + +test((t) => { + new Request("...", { method, body: "..." }); +}, "It is OK to omit .duplex when the body is a string."); + +test((t) => { + new Request("...", { method, body: new Uint8Array(3) }); +}, "It is OK to omit .duplex when the body is a Uint8Array."); + +test((t) => { + new Request("...", { method, body: new Blob([]) }); +}, "It is OK to omit .duplex when the body is a Blob."); + +test((t) => { + const body = new ReadableStream(); + assert_throws_js(TypeError, + () => new Request("...", { method, body })); +}, "It is error to omit .duplex when the body is a ReadableStream."); + +test((t) => { + new Request("...", { method, body: null, duplex: "half" }); +}, "It is OK to set .duplex = 'half' when the body is null."); + +test((t) => { + new Request("...", { method, body: "...", duplex: "half" }); +}, "It is OK to set .duplex = 'half' when the body is a string."); + +test((t) => { + new Request("...", { method, body: new Uint8Array(3), duplex: "half" }); +}, "It is OK to set .duplex = 'half' when the body is a Uint8Array."); + +test((t) => { + new Request("...", { method, body: new Blob([]), duplex: "half" }); +}, "It is OK to set .duplex = 'half' when the body is a Blob."); + +test((t) => { + const body = new ReadableStream(); + new Request("...", { method, body, duplex: "half" }); +}, "It is OK to set .duplex = 'half' when the body is a ReadableStream."); + +test((t) => { + const body = null; + const duplex = "full"; + assert_throws_js(TypeError, + () => new Request("...", { method, body, duplex })); +}, "It is error to set .duplex = 'full' when the body is null."); + +test((t) => { + const body = "..."; + const duplex = "full"; + assert_throws_js(TypeError, + () => new Request("...", { method, body, duplex })); +}, "It is error to set .duplex = 'full' when the body is a string."); + +test((t) => { + const body = new Uint8Array(3); + const duplex = "full"; + assert_throws_js(TypeError, + () => new Request("...", { method, body, duplex })); +}, "It is error to set .duplex = 'full' when the body is a Uint8Array."); + +test((t) => { + const body = new Blob([]); + const duplex = "full"; + assert_throws_js(TypeError, + () => new Request("...", { method, body, duplex })); +}, "It is error to set .duplex = 'full' when the body is a Blob."); + +test((t) => { + const body = new ReadableStream(); + const duplex = "full"; + assert_throws_js(TypeError, + () => new Request("...", { method, body, duplex })); +}, "It is error to set .duplex = 'full' when the body is a ReadableStream."); + +test((t) => { + const body = new ReadableStream(); + const duplex = "half"; + const req1 = new Request("...", { method, body, duplex }); + const req2 = new Request(req1); +}, "It is OK to omit duplex when init.body is not given and input.body is given."); + diff --git a/test/wpt/tests/fetch/api/request/request-keepalive-quota.html b/test/wpt/tests/fetch/api/request/request-keepalive-quota.html new file mode 100644 index 0000000..548ab38 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-keepalive-quota.html @@ -0,0 +1,97 @@ + + + + + Request Keepalive Quota Tests + + + + + + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/request/request-keepalive.any.js b/test/wpt/tests/fetch/api/request/request-keepalive.any.js new file mode 100644 index 0000000..cb4506d --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-keepalive.any.js @@ -0,0 +1,17 @@ +// META: global=window,worker +// META: title=Request keepalive +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js + +test(() => { + assert_false(new Request('/').keepalive, 'default'); + assert_true(new Request('/', {keepalive: true}).keepalive, 'true'); + assert_false(new Request('/', {keepalive: false}).keepalive, 'false'); + assert_true(new Request('/', {keepalive: 1}).keepalive, 'truish'); + assert_false(new Request('/', {keepalive: 0}).keepalive, 'falsy'); +}, 'keepalive flag'); + +test(() => { + const init = {method: 'POST', keepalive: true, body: new ReadableStream()}; + assert_throws_js(TypeError, () => {new Request('/', init)}); +}, 'keepalive flag with stream body'); diff --git a/test/wpt/tests/fetch/api/request/request-reset-attributes.https.html b/test/wpt/tests/fetch/api/request/request-reset-attributes.https.html new file mode 100644 index 0000000..7be3608 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-reset-attributes.https.html @@ -0,0 +1,96 @@ + + + + + + + diff --git a/test/wpt/tests/fetch/api/request/request-structure.any.js b/test/wpt/tests/fetch/api/request/request-structure.any.js new file mode 100644 index 0000000..5e78553 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/request-structure.any.js @@ -0,0 +1,143 @@ +// META: global=window,worker +// META: title=Request structure + +var request = new Request(""); +var methods = ["clone", + //Request implements Body + "arrayBuffer", + "blob", + "formData", + "json", + "text" + ]; +var attributes = ["method", + "url", + "headers", + "destination", + "referrer", + "referrerPolicy", + "mode", + "credentials", + "cache", + "redirect", + "integrity", + "isReloadNavigation", + "isHistoryNavigation", + "duplex", + //Request implements Body + "bodyUsed" + ]; +var internalAttributes = ["priority", + "internalpriority", + "blocking" + ]; + +function isReadOnly(request, attributeToCheck) { + var defaultValue = undefined; + var newValue = undefined; + switch (attributeToCheck) { + case "method": + defaultValue = "GET"; + newValue = "POST"; + break; + + case "url": + //default value is base url + //i.e http://example.com/fetch/api/request-structure.html + newValue = "http://url.test"; + break; + + case "headers": + request.headers = new Headers ({"name":"value"}); + assert_false(request.headers.has("name"), "Headers attribute is read only"); + return; + + case "destination": + defaultValue = ""; + newValue = "worker"; + break; + + case "referrer": + defaultValue = "about:client"; + newValue = "http://url.test"; + break; + + case "referrerPolicy": + defaultValue = ""; + newValue = "unsafe-url"; + break; + + case "mode": + defaultValue = "cors"; + newValue = "navigate"; + break; + + case "credentials": + defaultValue = "same-origin"; + newValue = "cors"; + break; + + case "cache": + defaultValue = "default"; + newValue = "reload"; + break; + + case "redirect": + defaultValue = "follow"; + newValue = "manual"; + break; + + case "integrity": + newValue = "CannotWriteIntegrity"; + break; + + case "bodyUsed": + defaultValue = false; + newValue = true; + break; + + case "isReloadNavigation": + defaultValue = false; + newValue = true; + break; + + case "isHistoryNavigation": + defaultValue = false; + newValue = true; + break; + + case "duplex": + defaultValue = "half"; + newValue = "full"; + break; + + default: + return; + } + + request[attributeToCheck] = newValue; + if (defaultValue === undefined) + assert_not_equals(request[attributeToCheck], newValue, "Attribute " + attributeToCheck + " is read only"); + else + assert_equals(request[attributeToCheck], defaultValue, + "Attribute " + attributeToCheck + " is read only. Default value is " + defaultValue); +} + +for (var idx in methods) { + test(function() { + assert_true(methods[idx] in request, "request has " + methods[idx] + " method"); + }, "Request has " + methods[idx] + " method"); +} + +for (var idx in attributes) { + test(function() { + assert_true(attributes[idx] in request, "request has " + attributes[idx] + " attribute"); + isReadOnly(request, attributes[idx]); + }, "Check " + attributes[idx] + " attribute"); +} + +for (var idx in internalAttributes) { + test(function() { + assert_false(internalAttributes[idx] in request, "request does not expose " + internalAttributes[idx] + " attribute"); + }, "Request does not expose " + internalAttributes[idx] + " attribute"); +} \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/request/resources/cache.py b/test/wpt/tests/fetch/api/request/resources/cache.py new file mode 100644 index 0000000..ca0bd64 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/resources/cache.py @@ -0,0 +1,67 @@ +from wptserve.utils import isomorphic_decode + +def main(request, response): + token = request.GET.first(b"token", None) + if b"querystate" in request.GET: + from json import JSONEncoder + response.headers.set(b"Content-Type", b"text/plain") + return JSONEncoder().encode(request.server.stash.take(token)) + content = request.GET.first(b"content", None) + tag = request.GET.first(b"tag", None) + date = request.GET.first(b"date", None) + expires = request.GET.first(b"expires", None) + vary = request.GET.first(b"vary", None) + cc = request.GET.first(b"cache_control", None) + redirect = request.GET.first(b"redirect", None) + inm = request.headers.get(b"If-None-Match", None) + ims = request.headers.get(b"If-Modified-Since", None) + pragma = request.headers.get(b"Pragma", None) + cache_control = request.headers.get(b"Cache-Control", None) + ignore = b"ignore" in request.GET + + if tag: + tag = b'"%s"' % tag + + server_state = request.server.stash.take(token) + if not server_state: + server_state = [] + state = dict() + if not ignore: + if inm: + state[u"If-None-Match"] = isomorphic_decode(inm) + if ims: + state[u"If-Modified-Since"] = isomorphic_decode(ims) + if pragma: + state[u"Pragma"] = isomorphic_decode(pragma) + if cache_control: + state[u"Cache-Control"] = isomorphic_decode(cache_control) + server_state.append(state) + request.server.stash.put(token, server_state) + + if tag: + response.headers.set(b"ETag", b'%s' % tag) + elif date: + response.headers.set(b"Last-Modified", date) + if expires: + response.headers.set(b"Expires", expires) + if vary: + response.headers.set(b"Vary", vary) + if cc: + response.headers.set(b"Cache-Control", cc) + + # The only-if-cached redirect tests wants CORS to be okay, the other tests + # are all same-origin anyways and don't care. + response.headers.set(b"Access-Control-Allow-Origin", b"*") + + if redirect: + response.headers.set(b"Location", redirect) + response.status = (302, b"Redirect") + return b"" + elif ((inm is not None and inm == tag) or + (ims is not None and ims == date)): + response.status = (304, b"Not Modified") + return b"" + else: + response.status = (200, b"OK") + response.headers.set(b"Content-Type", b"text/plain") + return content diff --git a/test/wpt/tests/fetch/api/request/resources/hello.txt b/test/wpt/tests/fetch/api/request/resources/hello.txt new file mode 100644 index 0000000..ce01362 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/resources/hello.txt @@ -0,0 +1 @@ +hello diff --git a/test/wpt/tests/fetch/api/request/resources/request-reset-attributes-worker.js b/test/wpt/tests/fetch/api/request/resources/request-reset-attributes-worker.js new file mode 100644 index 0000000..4b264ca --- /dev/null +++ b/test/wpt/tests/fetch/api/request/resources/request-reset-attributes-worker.js @@ -0,0 +1,19 @@ +self.addEventListener('fetch', (event) => { + const params = new URL(event.request.url).searchParams; + if (params.has('ignore')) { + return; + } + if (!params.has('name')) { + event.respondWith(Promise.reject(TypeError('No name is provided.'))); + return; + } + + const name = params.get('name'); + const old_attribute = event.request[name]; + // If any of |init|'s member is present... + const init = {cache: 'no-store'} + const new_attribute = (new Request(event.request, init))[name]; + + event.respondWith( + new Response(`old: ${old_attribute}, new: ${new_attribute}`)); + }); diff --git a/test/wpt/tests/fetch/api/request/url-encoding.html b/test/wpt/tests/fetch/api/request/url-encoding.html new file mode 100644 index 0000000..31c1ed3 --- /dev/null +++ b/test/wpt/tests/fetch/api/request/url-encoding.html @@ -0,0 +1,25 @@ + + +Fetch: URL encoding + + + diff --git a/test/wpt/tests/fetch/api/resources/authentication.py b/test/wpt/tests/fetch/api/resources/authentication.py new file mode 100644 index 0000000..8b6b00b --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/authentication.py @@ -0,0 +1,14 @@ +def main(request, response): + user = request.auth.username + password = request.auth.password + + if user == b"user" and password == b"password": + return b"Authentication done" + + realm = b"test" + if b"realm" in request.GET: + realm = request.GET.first(b"realm") + + return ((401, b"Unauthorized"), + [(b"WWW-Authenticate", b'Basic realm="' + realm + b'"')], + b"Please login with credentials 'user' and 'password'") diff --git a/test/wpt/tests/fetch/api/resources/bad-chunk-encoding.py b/test/wpt/tests/fetch/api/resources/bad-chunk-encoding.py new file mode 100644 index 0000000..94a77ad --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/bad-chunk-encoding.py @@ -0,0 +1,13 @@ +import time + +def main(request, response): + delay = float(request.GET.first(b"ms", 1000)) / 1E3 + count = int(request.GET.first(b"count", 50)) + time.sleep(delay) + response.headers.set(b"Transfer-Encoding", b"chunked") + response.write_status_headers() + time.sleep(delay) + for i in range(count): + response.writer.write_content(b"a\r\nTEST_CHUNK\r\n") + time.sleep(delay) + response.writer.write_content(b"garbage") diff --git a/test/wpt/tests/fetch/api/resources/basic.html b/test/wpt/tests/fetch/api/resources/basic.html new file mode 100644 index 0000000..e23afd4 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/basic.html @@ -0,0 +1,5 @@ + + diff --git a/test/wpt/tests/fetch/api/resources/cache.py b/test/wpt/tests/fetch/api/resources/cache.py new file mode 100644 index 0000000..4de751e --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/cache.py @@ -0,0 +1,18 @@ +ETAG = b'"123abc"' +CONTENT_TYPE = b"text/plain" +CONTENT = b"lorem ipsum dolor sit amet" + + +def main(request, response): + # let caching kick in if possible (conditional GET) + etag = request.headers.get(b"If-None-Match", None) + if etag == ETAG: + response.headers.set(b"X-HTTP-STATUS", 304) + response.status = (304, b"Not Modified") + return b"" + + # cache miss, so respond with the actual content + response.status = (200, b"OK") + response.headers.set(b"ETag", ETAG) + response.headers.set(b"Content-Type", CONTENT_TYPE) + return CONTENT diff --git a/test/wpt/tests/fetch/api/resources/clean-stash.py b/test/wpt/tests/fetch/api/resources/clean-stash.py new file mode 100644 index 0000000..ee8c69a --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/clean-stash.py @@ -0,0 +1,6 @@ +def main(request, response): + token = request.GET.first(b"token") + if request.server.stash.take(token) is not None: + return b"1" + else: + return b"0" diff --git a/test/wpt/tests/fetch/api/resources/cors-top.txt b/test/wpt/tests/fetch/api/resources/cors-top.txt new file mode 100644 index 0000000..83a3157 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/cors-top.txt @@ -0,0 +1 @@ +top \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/resources/cors-top.txt.headers b/test/wpt/tests/fetch/api/resources/cors-top.txt.headers new file mode 100644 index 0000000..cb762ef --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/cors-top.txt.headers @@ -0,0 +1 @@ +Access-Control-Allow-Origin: * diff --git a/test/wpt/tests/fetch/api/resources/data.json b/test/wpt/tests/fetch/api/resources/data.json new file mode 100644 index 0000000..76519fa --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/data.json @@ -0,0 +1 @@ +{"key": "value"} diff --git a/test/wpt/tests/fetch/api/resources/dump-authorization-header.py b/test/wpt/tests/fetch/api/resources/dump-authorization-header.py new file mode 100644 index 0000000..a651aeb --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/dump-authorization-header.py @@ -0,0 +1,14 @@ +def main(request, response): + headers = [(b"Content-Type", "text/html"), + (b"Cache-Control", b"no-cache")] + + if b"Origin" in request.headers: + headers.append((b"Access-Control-Allow-Origin", request.headers.get(b"Origin", b""))) + headers.append((b"Access-Control-Allow-Credentials", b"true")) + else: + headers.append((b"Access-Control-Allow-Origin", b"*")) + headers.append((b"Access-Control-Allow-Headers", b'Authorization')) + + if b"authorization" in request.headers: + return 200, headers, request.headers.get(b"Authorization") + return 200, headers, "none" diff --git a/test/wpt/tests/fetch/api/resources/echo-content.h2.py b/test/wpt/tests/fetch/api/resources/echo-content.h2.py new file mode 100644 index 0000000..0be3ece --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/echo-content.h2.py @@ -0,0 +1,7 @@ +def handle_headers(frame, request, response): + response.status = 200 + response.headers.update([('Content-Type', 'text/plain')]) + response.write_status_headers() + +def handle_data(frame, request, response): + response.writer.write_data(frame.data) diff --git a/test/wpt/tests/fetch/api/resources/echo-content.py b/test/wpt/tests/fetch/api/resources/echo-content.py new file mode 100644 index 0000000..5e137e1 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/echo-content.py @@ -0,0 +1,12 @@ +from wptserve.utils import isomorphic_encode + +def main(request, response): + + headers = [(b"X-Request-Method", isomorphic_encode(request.method)), + (b"X-Request-Content-Length", request.headers.get(b"Content-Length", b"NO")), + (b"X-Request-Content-Type", request.headers.get(b"Content-Type", b"NO")), + # Avoid any kind of content sniffing on the response. + (b"Content-Type", b"text/plain")] + content = request.body + + return headers, content diff --git a/test/wpt/tests/fetch/api/resources/empty.txt b/test/wpt/tests/fetch/api/resources/empty.txt new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/fetch/api/resources/infinite-slow-response.py b/test/wpt/tests/fetch/api/resources/infinite-slow-response.py new file mode 100644 index 0000000..a26cd80 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/infinite-slow-response.py @@ -0,0 +1,35 @@ +import time + + +def url_dir(request): + return u'/'.join(request.url_parts.path.split(u'/')[:-1]) + u'/' + + +def stash_write(request, key, value): + """Write to the stash, overwriting any previous value""" + request.server.stash.take(key, url_dir(request)) + request.server.stash.put(key, value, url_dir(request)) + + +def main(request, response): + stateKey = request.GET.first(b"stateKey", b"") + abortKey = request.GET.first(b"abortKey", b"") + + if stateKey: + stash_write(request, stateKey, 'open') + + response.headers.set(b"Content-type", b"text/plain") + response.write_status_headers() + + # Writing an initial 2k so browsers realise it's there. *shrug* + response.writer.write(b"." * 2048) + + while True: + if not response.writer.write(b"."): + break + if abortKey and request.server.stash.take(abortKey, url_dir(request)): + break + time.sleep(0.01) + + if stateKey: + stash_write(request, stateKey, 'closed') diff --git a/test/wpt/tests/fetch/api/resources/inspect-headers.py b/test/wpt/tests/fetch/api/resources/inspect-headers.py new file mode 100644 index 0000000..9ed566e --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/inspect-headers.py @@ -0,0 +1,24 @@ +def main(request, response): + headers = [] + if b"headers" in request.GET: + checked_headers = request.GET.first(b"headers").split(b"|") + for header in checked_headers: + if header in request.headers: + headers.append((b"x-request-" + header, request.headers.get(header, b""))) + + if b"cors" in request.GET: + if b"Origin" in request.headers: + headers.append((b"Access-Control-Allow-Origin", request.headers.get(b"Origin", b""))) + else: + headers.append((b"Access-Control-Allow-Origin", b"*")) + headers.append((b"Access-Control-Allow-Credentials", b"true")) + headers.append((b"Access-Control-Allow-Methods", b"GET, POST, HEAD")) + exposed_headers = [b"x-request-" + header for header in checked_headers] + headers.append((b"Access-Control-Expose-Headers", b", ".join(exposed_headers))) + if b"allow_headers" in request.GET: + headers.append((b"Access-Control-Allow-Headers", request.GET[b'allow_headers'])) + else: + headers.append((b"Access-Control-Allow-Headers", b", ".join(request.headers))) + + headers.append((b"content-type", b"text/plain")) + return headers, b"" diff --git a/test/wpt/tests/fetch/api/resources/keepalive-helper.js b/test/wpt/tests/fetch/api/resources/keepalive-helper.js new file mode 100644 index 0000000..ad1d4b2 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/keepalive-helper.js @@ -0,0 +1,99 @@ +// Utility functions to help testing keepalive requests. + +// Returns a URL to an iframe that loads a keepalive URL on iframe loaded. +// +// The keepalive URL points to a target that stores `token`. The token will then +// be posted back on iframe loaded to the parent document. +// `method` defaults to GET. +// `frameOrigin` to specify the origin of the iframe to load. If not set, +// default to a different site origin. +// `requestOrigin` to specify the origin of the fetch request target. +// `sendOn` to specify the name of the event when the keepalive request should +// be sent instead of the default 'load'. +// `mode` to specify the fetch request's CORS mode. +// `disallowOrigin` to ask the iframe to set up a server that forbids CORS +// requests. +function getKeepAliveIframeUrl(token, method, { + frameOrigin = 'DEFAULT', + requestOrigin = '', + sendOn = 'load', + mode = 'cors', + disallowOrigin = false +} = {}) { + const https = location.protocol.startsWith('https'); + frameOrigin = frameOrigin === 'DEFAULT' ? + get_host_info()[https ? 'HTTPS_NOTSAMESITE_ORIGIN' : 'HTTP_NOTSAMESITE_ORIGIN'] : + frameOrigin; + return `${frameOrigin}/fetch/api/resources/keepalive-iframe.html?` + + `token=${token}&` + + `method=${method}&` + + `sendOn=${sendOn}&` + + `mode=${mode}&` + (disallowOrigin ? `disallowOrigin=1&` : ``) + + `origin=${requestOrigin}`; +} + +// Returns a different-site URL to an iframe that loads a keepalive URL. +// +// By default, the keepalive URL points to a target that redirects to another +// same-origin destination storing `token`. The token will then be posted back +// to parent document. +// +// The URL redirects can be customized from `origin1` to `origin2` if provided. +// Sets `withPreflight` to true to get URL enabling preflight. +function getKeepAliveAndRedirectIframeUrl( + token, origin1, origin2, withPreflight) { + const https = location.protocol.startsWith('https'); + const frameOrigin = + get_host_info()[https ? 'HTTPS_NOTSAMESITE_ORIGIN' : 'HTTP_NOTSAMESITE_ORIGIN']; + return `${frameOrigin}/fetch/api/resources/keepalive-redirect-iframe.html?` + + `token=${token}&` + + `origin1=${origin1}&` + + `origin2=${origin2}&` + (withPreflight ? `with-headers` : ``); +} + +async function iframeLoaded(iframe) { + return new Promise((resolve) => iframe.addEventListener('load', resolve)); +} + +// Obtains the token from the message posted by iframe after loading +// `getKeepAliveAndRedirectIframeUrl()`. +async function getTokenFromMessage() { + return new Promise((resolve) => { + window.addEventListener('message', (event) => { + resolve(event.data); + }, {once: true}); + }); +} + +// Tells if `token` has been stored in the server. +async function queryToken(token) { + const response = await fetch(`../resources/stash-take.py?key=${token}`); + const json = await response.json(); + return json; +} + +// In order to parallelize the work, we are going to have an async_test +// for the rest of the work. Note that we want the serialized behavior +// for the steps so far, so we don't want to make the entire test case +// an async_test. +function assertStashedTokenAsync(testName, token, {shouldPass = true} = {}) { + async_test((test) => { + new Promise((resolve) => test.step_timeout(resolve, 3000)) + .then(() => { + return queryToken(token); + }) + .then((result) => { + assert_equals(result, 'on'); + }) + .then(() => { + test.done(); + }) + .catch(test.step_func((e) => { + if (shouldPass) { + assert_unreached(e); + } else { + test.done(); + } + })); + }, testName); +} diff --git a/test/wpt/tests/fetch/api/resources/keepalive-iframe.html b/test/wpt/tests/fetch/api/resources/keepalive-iframe.html new file mode 100644 index 0000000..335a1f8 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/keepalive-iframe.html @@ -0,0 +1,21 @@ + + + + + diff --git a/test/wpt/tests/fetch/api/resources/keepalive-redirect-iframe.html b/test/wpt/tests/fetch/api/resources/keepalive-redirect-iframe.html new file mode 100644 index 0000000..fdee00f --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/keepalive-redirect-iframe.html @@ -0,0 +1,23 @@ + + + + + diff --git a/test/wpt/tests/fetch/api/resources/keepalive-redirect-window.html b/test/wpt/tests/fetch/api/resources/keepalive-redirect-window.html new file mode 100644 index 0000000..c186507 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/keepalive-redirect-window.html @@ -0,0 +1,42 @@ + + + + + + + diff --git a/test/wpt/tests/fetch/api/resources/method.py b/test/wpt/tests/fetch/api/resources/method.py new file mode 100644 index 0000000..c1a111b --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/method.py @@ -0,0 +1,18 @@ +from wptserve.utils import isomorphic_encode + +def main(request, response): + headers = [] + if b"cors" in request.GET: + headers.append((b"Access-Control-Allow-Origin", b"*")) + headers.append((b"Access-Control-Allow-Credentials", b"true")) + headers.append((b"Access-Control-Allow-Methods", b"GET, POST, PUT, FOO")) + headers.append((b"Access-Control-Allow-Headers", b"x-test, x-foo")) + headers.append((b"Access-Control-Expose-Headers", b"x-request-method")) + + headers.append((b"x-request-method", isomorphic_encode(request.method))) + headers.append((b"x-request-content-type", request.headers.get(b"Content-Type", b"NO"))) + headers.append((b"x-request-content-length", request.headers.get(b"Content-Length", b"NO"))) + headers.append((b"x-request-content-encoding", request.headers.get(b"Content-Encoding", b"NO"))) + headers.append((b"x-request-content-language", request.headers.get(b"Content-Language", b"NO"))) + headers.append((b"x-request-content-location", request.headers.get(b"Content-Location", b"NO"))) + return headers, request.body diff --git a/test/wpt/tests/fetch/api/resources/preflight.py b/test/wpt/tests/fetch/api/resources/preflight.py new file mode 100644 index 0000000..f983ef9 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/preflight.py @@ -0,0 +1,78 @@ +def main(request, response): + headers = [(b"Content-Type", b"text/plain")] + stashed_data = {b'control_request_headers': b"", b'preflight': b"0", b'preflight_referrer': b""} + + token = None + if b"token" in request.GET: + token = request.GET.first(b"token") + + if b"origin" in request.GET: + for origin in request.GET[b'origin'].split(b", "): + headers.append((b"Access-Control-Allow-Origin", origin)) + else: + headers.append((b"Access-Control-Allow-Origin", b"*")) + + if b"clear-stash" in request.GET: + if request.server.stash.take(token) is not None: + return headers, b"1" + else: + return headers, b"0" + + if b"credentials" in request.GET: + headers.append((b"Access-Control-Allow-Credentials", b"true")) + + if request.method == u"OPTIONS": + if not b"Access-Control-Request-Method" in request.headers: + response.set_error(400, u"No Access-Control-Request-Method header") + return b"ERROR: No access-control-request-method in preflight!" + + if request.headers.get(b"Accept", b"") != b"*/*": + response.set_error(400, u"Request does not have 'Accept: */*' header") + return b"ERROR: Invalid access in preflight!" + + if b"control_request_headers" in request.GET: + stashed_data[b'control_request_headers'] = request.headers.get(b"Access-Control-Request-Headers", None) + + if b"max_age" in request.GET: + headers.append((b"Access-Control-Max-Age", request.GET[b'max_age'])) + + if b"allow_headers" in request.GET: + headers.append((b"Access-Control-Allow-Headers", request.GET[b'allow_headers'])) + + if b"allow_methods" in request.GET: + headers.append((b"Access-Control-Allow-Methods", request.GET[b'allow_methods'])) + + preflight_status = 200 + if b"preflight_status" in request.GET: + preflight_status = int(request.GET.first(b"preflight_status")) + + stashed_data[b'preflight'] = b"1" + stashed_data[b'preflight_referrer'] = request.headers.get(b"Referer", b"") + stashed_data[b'preflight_user_agent'] = request.headers.get(b"User-Agent", b"") + if token: + request.server.stash.put(token, stashed_data) + + return preflight_status, headers, b"" + + + if token: + data = request.server.stash.take(token) + if data: + stashed_data = data + + if b"checkUserAgentHeaderInPreflight" in request.GET and request.headers.get(b"User-Agent") != stashed_data[b'preflight_user_agent']: + return 400, headers, b"ERROR: No user-agent header in preflight" + + #use x-* headers for returning value to bodyless responses + headers.append((b"Access-Control-Expose-Headers", b"x-did-preflight, x-control-request-headers, x-referrer, x-preflight-referrer, x-origin")) + headers.append((b"x-did-preflight", stashed_data[b'preflight'])) + if stashed_data[b'control_request_headers'] != None: + headers.append((b"x-control-request-headers", stashed_data[b'control_request_headers'])) + headers.append((b"x-preflight-referrer", stashed_data[b'preflight_referrer'])) + headers.append((b"x-referrer", request.headers.get(b"Referer", b""))) + headers.append((b"x-origin", request.headers.get(b"Origin", b""))) + + if token: + request.server.stash.put(token, stashed_data) + + return headers, b"" diff --git a/test/wpt/tests/fetch/api/resources/redirect-empty-location.py b/test/wpt/tests/fetch/api/resources/redirect-empty-location.py new file mode 100644 index 0000000..1a5f7fe --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/redirect-empty-location.py @@ -0,0 +1,3 @@ +def main(request, response): + headers = [(b"Location", b"")] + return 302, headers, b"" diff --git a/test/wpt/tests/fetch/api/resources/redirect.h2.py b/test/wpt/tests/fetch/api/resources/redirect.h2.py new file mode 100644 index 0000000..6937014 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/redirect.h2.py @@ -0,0 +1,14 @@ +from wptserve.utils import isomorphic_decode, isomorphic_encode + +def handle_headers(frame, request, response): + status = 302 + if b'redirect_status' in request.GET: + status = int(request.GET[b'redirect_status']) + response.status = status + + if b'location' in request.GET: + url = isomorphic_decode(request.GET[b'location']) + response.headers[b'Location'] = isomorphic_encode(url) + + response.headers.update([('Content-Type', 'text/plain')]) + response.write_status_headers() diff --git a/test/wpt/tests/fetch/api/resources/redirect.py b/test/wpt/tests/fetch/api/resources/redirect.py new file mode 100644 index 0000000..d52ab5f --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/redirect.py @@ -0,0 +1,73 @@ +import time + +from urllib.parse import urlencode, urlparse + +from wptserve.utils import isomorphic_decode, isomorphic_encode + +def main(request, response): + stashed_data = {b'count': 0, b'preflight': b"0"} + status = 302 + headers = [(b"Content-Type", b"text/plain"), + (b"Cache-Control", b"no-cache"), + (b"Pragma", b"no-cache")] + if b"Origin" in request.headers: + headers.append((b"Access-Control-Allow-Origin", request.headers.get(b"Origin", b""))) + headers.append((b"Access-Control-Allow-Credentials", b"true")) + else: + headers.append((b"Access-Control-Allow-Origin", b"*")) + + token = None + if b"token" in request.GET: + token = request.GET.first(b"token") + data = request.server.stash.take(token) + if data: + stashed_data = data + + if request.method == u"OPTIONS": + if b"allow_headers" in request.GET: + headers.append((b"Access-Control-Allow-Headers", request.GET[b'allow_headers'])) + stashed_data[b'preflight'] = b"1" + #Preflight is not redirected: return 200 + if not b"redirect_preflight" in request.GET: + if token: + request.server.stash.put(request.GET.first(b"token"), stashed_data) + return 200, headers, u"" + + if b"redirect_status" in request.GET: + status = int(request.GET[b'redirect_status']) + elif b"redirect_status" in request.POST: + status = int(request.POST[b'redirect_status']) + + stashed_data[b'count'] += 1 + + if b"location" in request.GET: + url = isomorphic_decode(request.GET[b'location']) + if b"simple" not in request.GET: + scheme = urlparse(url).scheme + if scheme == u"" or scheme == u"http" or scheme == u"https": + url += u"&" if u'?' in url else u"?" + #keep url parameters in location + url_parameters = {} + for item in request.GET.items(): + url_parameters[isomorphic_decode(item[0])] = isomorphic_decode(item[1][0]) + url += urlencode(url_parameters) + #make sure location changes during redirection loop + url += u"&count=" + str(stashed_data[b'count']) + headers.append((b"Location", isomorphic_encode(url))) + + if b"redirect_referrerpolicy" in request.GET: + headers.append((b"Referrer-Policy", request.GET[b'redirect_referrerpolicy'])) + + if b"delay" in request.GET: + time.sleep(float(request.GET.first(b"delay", 0)) / 1E3) + + if token: + request.server.stash.put(request.GET.first(b"token"), stashed_data) + if b"max_count" in request.GET: + max_count = int(request.GET[b'max_count']) + #stop redirecting and return count + if stashed_data[b'count'] > max_count: + # -1 because the last is not a redirection + return str(stashed_data[b'count'] - 1) + + return status, headers, u"" diff --git a/test/wpt/tests/fetch/api/resources/sandboxed-iframe.html b/test/wpt/tests/fetch/api/resources/sandboxed-iframe.html new file mode 100644 index 0000000..6e5d506 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/sandboxed-iframe.html @@ -0,0 +1,34 @@ + + + + diff --git a/test/wpt/tests/fetch/api/resources/script-with-header.py b/test/wpt/tests/fetch/api/resources/script-with-header.py new file mode 100644 index 0000000..9a9c70e --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/script-with-header.py @@ -0,0 +1,7 @@ +def main(request, response): + headers = [(b"Content-type", request.GET.first(b"mime"))] + if b"content" in request.GET and request.GET.first(b"content") == b"empty": + content = b'' + else: + content = b"console.log('Script loaded')" + return 200, headers, content diff --git a/test/wpt/tests/fetch/api/resources/stash-put.py b/test/wpt/tests/fetch/api/resources/stash-put.py new file mode 100644 index 0000000..0530e1b --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/stash-put.py @@ -0,0 +1,19 @@ +from wptserve.utils import isomorphic_decode + +def main(request, response): + if request.method == u'OPTIONS': + # CORS preflight + response.headers.set(b'Access-Control-Allow-Origin', b'*') + response.headers.set(b'Access-Control-Allow-Methods', b'*') + response.headers.set(b'Access-Control-Allow-Headers', b'*') + return 'done' + + url_dir = u'/'.join(request.url_parts.path.split(u'/')[:-1]) + u'/' + key = request.GET.first(b'key') + value = request.GET.first(b'value') + # value here must be a text string. It will be json.dump()'ed in stash-take.py. + request.server.stash.put(key, isomorphic_decode(value), url_dir) + + if b'disallow_origin' not in request.GET: + response.headers.set(b'Access-Control-Allow-Origin', b'*') + return 'done' diff --git a/test/wpt/tests/fetch/api/resources/stash-take.py b/test/wpt/tests/fetch/api/resources/stash-take.py new file mode 100644 index 0000000..e6db80d --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/stash-take.py @@ -0,0 +1,9 @@ +from wptserve.handlers import json_handler + + +@json_handler +def main(request, response): + dir = u'/'.join(request.url_parts.path.split(u'/')[:-1]) + u'/' + key = request.GET.first(b"key") + response.headers.set(b'Access-Control-Allow-Origin', b'*') + return request.server.stash.take(key, dir) diff --git a/test/wpt/tests/fetch/api/resources/status.py b/test/wpt/tests/fetch/api/resources/status.py new file mode 100644 index 0000000..05a59d5 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/status.py @@ -0,0 +1,11 @@ +from wptserve.utils import isomorphic_encode + +def main(request, response): + code = int(request.GET.first(b"code", 200)) + text = request.GET.first(b"text", b"OMG") + content = request.GET.first(b"content", b"") + type = request.GET.first(b"type", b"") + status = (code, text) + headers = [(b"Content-Type", type), + (b"X-Request-Method", isomorphic_encode(request.method))] + return status, headers, content diff --git a/test/wpt/tests/fetch/api/resources/sw-intercept-abort.js b/test/wpt/tests/fetch/api/resources/sw-intercept-abort.js new file mode 100644 index 0000000..19d4b18 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/sw-intercept-abort.js @@ -0,0 +1,19 @@ +async function messageClient(clientId, message) { + const client = await clients.get(clientId); + client.postMessage(message); +} + +addEventListener('fetch', event => { + let resolve; + const promise = new Promise(r => resolve = r); + + function onAborted() { + messageClient(event.clientId, event.request.signal.reason); + resolve(); + } + + messageClient(event.clientId, 'fetch event has arrived'); + + event.respondWith(promise.then(() => new Response('hello'))); + event.request.signal.addEventListener('abort', onAborted); +}); diff --git a/test/wpt/tests/fetch/api/resources/sw-intercept.js b/test/wpt/tests/fetch/api/resources/sw-intercept.js new file mode 100644 index 0000000..b8166b6 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/sw-intercept.js @@ -0,0 +1,10 @@ +async function broadcast(msg) { + for (const client of await clients.matchAll()) { + client.postMessage(msg); + } +} + +addEventListener('fetch', event => { + event.waitUntil(broadcast(event.request.url)); + event.respondWith(fetch(event.request)); +}); diff --git a/test/wpt/tests/fetch/api/resources/top.txt b/test/wpt/tests/fetch/api/resources/top.txt new file mode 100644 index 0000000..83a3157 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/top.txt @@ -0,0 +1 @@ +top \ No newline at end of file diff --git a/test/wpt/tests/fetch/api/resources/trickle.py b/test/wpt/tests/fetch/api/resources/trickle.py new file mode 100644 index 0000000..99833f1 --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/trickle.py @@ -0,0 +1,15 @@ +import time + +def main(request, response): + delay = float(request.GET.first(b"ms", 500)) / 1E3 + count = int(request.GET.first(b"count", 50)) + # Read request body + request.body + time.sleep(delay) + if not b"notype" in request.GET: + response.headers.set(b"Content-type", b"text/plain") + response.write_status_headers() + time.sleep(delay) + for i in range(count): + response.writer.write_content(b"TEST_TRICKLE\n") + time.sleep(delay) diff --git a/test/wpt/tests/fetch/api/resources/utils.js b/test/wpt/tests/fetch/api/resources/utils.js new file mode 100644 index 0000000..3b20ecc --- /dev/null +++ b/test/wpt/tests/fetch/api/resources/utils.js @@ -0,0 +1,105 @@ +var RESOURCES_DIR = "../resources/"; + +function dirname(path) { + return path.replace(/\/[^\/]*$/, '/') +} + +function checkRequest(request, ExpectedValuesDict) { + for (var attribute in ExpectedValuesDict) { + switch(attribute) { + case "headers": + for (var key in ExpectedValuesDict["headers"].keys()) { + assert_equals(request["headers"].get(key), ExpectedValuesDict["headers"].get(key), + "Check headers attribute has " + key + ":" + ExpectedValuesDict["headers"].get(key)); + } + break; + + case "body": + //for checking body's content, a dedicated asyncronous/promise test should be used + assert_true(request["headers"].has("Content-Type") , "Check request has body using Content-Type header") + break; + + case "method": + case "referrer": + case "referrerPolicy": + case "credentials": + case "cache": + case "redirect": + case "integrity": + case "url": + case "destination": + assert_equals(request[attribute], ExpectedValuesDict[attribute], "Check " + attribute + " attribute") + break; + + default: + break; + } + } +} + +function stringToArray(str) { + var array = new Uint8Array(str.length); + for (var i=0, strLen = str.length; i < strLen; i++) + array[i] = str.charCodeAt(i); + return array; +} + +function encode_utf8(str) +{ + if (self.TextEncoder) + return (new TextEncoder).encode(str); + return stringToArray(unescape(encodeURIComponent(str))); +} + +function validateBufferFromString(buffer, expectedValue, message) +{ + return assert_array_equals(new Uint8Array(buffer !== undefined ? buffer : []), stringToArray(expectedValue), message); +} + +function validateStreamFromString(reader, expectedValue, retrievedArrayBuffer) { + // Passing Uint8Array for byte streams; non-byte streams will simply ignore it + return reader.read(new Uint8Array(64)).then(function(data) { + if (!data.done) { + assert_true(data.value instanceof Uint8Array, "Fetch ReadableStream chunks should be Uint8Array"); + var newBuffer; + if (retrievedArrayBuffer) { + newBuffer = new Uint8Array(data.value.length + retrievedArrayBuffer.length); + newBuffer.set(retrievedArrayBuffer, 0); + newBuffer.set(data.value, retrievedArrayBuffer.length); + } else { + newBuffer = data.value; + } + return validateStreamFromString(reader, expectedValue, newBuffer); + } + validateBufferFromString(retrievedArrayBuffer, expectedValue, "Retrieve and verify stream"); + }); +} + +function validateStreamFromPartialString(reader, expectedValue, retrievedArrayBuffer) { + // Passing Uint8Array for byte streams; non-byte streams will simply ignore it + return reader.read(new Uint8Array(64)).then(function(data) { + if (!data.done) { + assert_true(data.value instanceof Uint8Array, "Fetch ReadableStream chunks should be Uint8Array"); + var newBuffer; + if (retrievedArrayBuffer) { + newBuffer = new Uint8Array(data.value.length + retrievedArrayBuffer.length); + newBuffer.set(retrievedArrayBuffer, 0); + newBuffer.set(data.value, retrievedArrayBuffer.length); + } else { + newBuffer = data.value; + } + return validateStreamFromPartialString(reader, expectedValue, newBuffer); + } + + var string = new TextDecoder("utf-8").decode(retrievedArrayBuffer); + return assert_true(string.search(expectedValue) != -1, "Retrieve and verify stream"); + }); +} + +// From streams tests +function delay(milliseconds) +{ + return new Promise(function(resolve) { + step_timeout(resolve, milliseconds); + }); +} diff --git a/test/wpt/tests/fetch/api/response/json.any.js b/test/wpt/tests/fetch/api/response/json.any.js new file mode 100644 index 0000000..15f050e --- /dev/null +++ b/test/wpt/tests/fetch/api/response/json.any.js @@ -0,0 +1,14 @@ +// See also /xhr/json.any.js + +promise_test(async t => { + const response = await fetch(`data:,\uFEFF{ "b": 1, "a": 2, "b": 3 }`); + const json = await response.json(); + assert_array_equals(Object.keys(json), ["b", "a"]); + assert_equals(json.a, 2); + assert_equals(json.b, 3); +}, "Ensure the correct JSON parser is used"); + +promise_test(async t => { + const response = await fetch("/xhr/resources/utf16-bom.json"); + return promise_rejects_js(t, SyntaxError, response.json()); +}, "Ensure UTF-16 results in an error"); diff --git a/test/wpt/tests/fetch/api/response/many-empty-chunks-crash.html b/test/wpt/tests/fetch/api/response/many-empty-chunks-crash.html new file mode 100644 index 0000000..fe5e7d4 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/many-empty-chunks-crash.html @@ -0,0 +1,14 @@ + + + diff --git a/test/wpt/tests/fetch/api/response/multi-globals/current/current.html b/test/wpt/tests/fetch/api/response/multi-globals/current/current.html new file mode 100644 index 0000000..9bb6e0b --- /dev/null +++ b/test/wpt/tests/fetch/api/response/multi-globals/current/current.html @@ -0,0 +1,3 @@ + +Current page used as a test helper + diff --git a/test/wpt/tests/fetch/api/response/multi-globals/incumbent/incumbent.html b/test/wpt/tests/fetch/api/response/multi-globals/incumbent/incumbent.html new file mode 100644 index 0000000..f63372e --- /dev/null +++ b/test/wpt/tests/fetch/api/response/multi-globals/incumbent/incumbent.html @@ -0,0 +1,16 @@ + +Incumbent page used as a test helper + + + + + diff --git a/test/wpt/tests/fetch/api/response/multi-globals/relevant/relevant.html b/test/wpt/tests/fetch/api/response/multi-globals/relevant/relevant.html new file mode 100644 index 0000000..44f42ed --- /dev/null +++ b/test/wpt/tests/fetch/api/response/multi-globals/relevant/relevant.html @@ -0,0 +1,2 @@ + +Relevant page used as a test helper diff --git a/test/wpt/tests/fetch/api/response/multi-globals/url-parsing.html b/test/wpt/tests/fetch/api/response/multi-globals/url-parsing.html new file mode 100644 index 0000000..5f2f42a --- /dev/null +++ b/test/wpt/tests/fetch/api/response/multi-globals/url-parsing.html @@ -0,0 +1,27 @@ + +Response.redirect URL parsing, with multiple globals in play + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/response/response-body-read-task-handling.html b/test/wpt/tests/fetch/api/response/response-body-read-task-handling.html new file mode 100644 index 0000000..64b0755 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-body-read-task-handling.html @@ -0,0 +1,86 @@ + + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/response/response-cancel-stream.any.js b/test/wpt/tests/fetch/api/response/response-cancel-stream.any.js new file mode 100644 index 0000000..91140d1 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-cancel-stream.any.js @@ -0,0 +1,64 @@ +// META: global=window,worker +// META: title=Response consume blob and http bodies +// META: script=../resources/utils.js + +promise_test(function(test) { + return new Response(new Blob([], { "type" : "text/plain" })).body.cancel(); +}, "Cancelling a starting blob Response stream"); + +promise_test(function(test) { + var response = new Response(new Blob(["This is data"], { "type" : "text/plain" })); + var reader = response.body.getReader(); + reader.read(); + return reader.cancel(); +}, "Cancelling a loading blob Response stream"); + +promise_test(function(test) { + var response = new Response(new Blob(["T"], { "type" : "text/plain" })); + var reader = response.body.getReader(); + + var closedPromise = reader.closed.then(function() { + return reader.cancel(); + }); + reader.read().then(function readMore({done, value}) { + if (!done) return reader.read().then(readMore); + }); + return closedPromise; +}, "Cancelling a closed blob Response stream"); + +promise_test(function(test) { + return fetch(RESOURCES_DIR + "trickle.py?ms=30&count=100").then(function(response) { + return response.body.cancel(); + }); +}, "Cancelling a starting Response stream"); + +promise_test(function() { + return fetch(RESOURCES_DIR + "trickle.py?ms=30&count=100").then(function(response) { + var reader = response.body.getReader(); + return reader.read().then(function() { + return reader.cancel(); + }); + }); +}, "Cancelling a loading Response stream"); + +promise_test(function() { + async function readAll(reader) { + while (true) { + const {value, done} = await reader.read(); + if (done) + return; + } + } + + return fetch(RESOURCES_DIR + "top.txt").then(function(response) { + var reader = response.body.getReader(); + return readAll(reader).then(() => reader.cancel()); + }); +}, "Cancelling a closed Response stream"); + +promise_test(async () => { + const response = await fetch(RESOURCES_DIR + "top.txt"); + const { body } = response; + await body.cancel(); + assert_equals(body, response.body, ".body should not change after cancellation"); +}, "Accessing .body after canceling it"); diff --git a/test/wpt/tests/fetch/api/response/response-clone-iframe.window.js b/test/wpt/tests/fetch/api/response/response-clone-iframe.window.js new file mode 100644 index 0000000..da54616 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-clone-iframe.window.js @@ -0,0 +1,32 @@ +// Verify that calling Response clone() in a detached iframe doesn't crash. +// Regression test for https://crbug.com/1082688. + +'use strict'; + +promise_test(async () => { + // Wait for the document body to be available. + await new Promise(resolve => { + onload = resolve; + }); + + window.iframe = document.createElement('iframe'); + document.body.appendChild(iframe); + iframe.srcdoc = ` + +`; + + await new Promise(resolve => { + onmessage = evt => { + if (evt.data === 'okay') { + resolve(); + } + }; + }); + + // If it got here without crashing, the test passed. +}, 'clone within removed iframe should not crash'); diff --git a/test/wpt/tests/fetch/api/response/response-clone.any.js b/test/wpt/tests/fetch/api/response/response-clone.any.js new file mode 100644 index 0000000..f5cda75 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-clone.any.js @@ -0,0 +1,140 @@ +// META: global=window,worker +// META: title=Response clone +// META: script=../resources/utils.js + +var defaultValues = { "type" : "default", + "url" : "", + "ok" : true, + "status" : 200, + "statusText" : "" +}; + +var response = new Response(); +var clonedResponse = response.clone(); +test(function() { + for (var attributeName in defaultValues) { + var expectedValue = defaultValues[attributeName]; + assert_equals(clonedResponse[attributeName], expectedValue, + "Expect default response." + attributeName + " is " + expectedValue); + } +}, "Check Response's clone with default values, without body"); + +var body = "This is response body"; +var headersInit = { "name" : "value" }; +var responseInit = { "status" : 200, + "statusText" : "GOOD", + "headers" : headersInit +}; +var response = new Response(body, responseInit); +var clonedResponse = response.clone(); +test(function() { + assert_equals(clonedResponse.status, responseInit["status"], + "Expect response.status is " + responseInit["status"]); + assert_equals(clonedResponse.statusText, responseInit["statusText"], + "Expect response.statusText is " + responseInit["statusText"]); + assert_equals(clonedResponse.headers.get("name"), "value", + "Expect response.headers has name:value header"); +}, "Check Response's clone has the expected attribute values"); + +promise_test(function(test) { + return validateStreamFromString(response.body.getReader(), body); +}, "Check orginal response's body after cloning"); + +promise_test(function(test) { + return validateStreamFromString(clonedResponse.body.getReader(), body); +}, "Check cloned response's body"); + +promise_test(function(test) { + var disturbedResponse = new Response("data"); + return disturbedResponse.text().then(function() { + assert_true(disturbedResponse.bodyUsed, "response is disturbed"); + assert_throws_js(TypeError, function() { disturbedResponse.clone(); }, + "Expect TypeError exception"); + }); +}, "Cannot clone a disturbed response"); + +promise_test(function(t) { + var clone; + var result; + var response; + return fetch('../resources/trickle.py?count=2&delay=100').then(function(res) { + clone = res.clone(); + response = res; + return clone.text(); + }).then(function(r) { + assert_equals(r.length, 26); + result = r; + return response.text(); + }).then(function(r) { + assert_equals(r, result, "cloned responses should provide the same data"); + }); + }, 'Cloned responses should provide the same data'); + +promise_test(function(t) { + var clone; + return fetch('../resources/trickle.py?count=2&delay=100').then(function(res) { + clone = res.clone(); + res.body.cancel(); + assert_true(res.bodyUsed); + assert_false(clone.bodyUsed); + return clone.arrayBuffer(); + }).then(function(r) { + assert_equals(r.byteLength, 26); + assert_true(clone.bodyUsed); + }); +}, 'Cancelling stream should not affect cloned one'); + +function testReadableStreamClone(initialBuffer, bufferType) +{ + promise_test(function(test) { + var response = new Response(new ReadableStream({start : function(controller) { + controller.enqueue(initialBuffer); + controller.close(); + }})); + + var clone = response.clone(); + var stream1 = response.body; + var stream2 = clone.body; + + var buffer; + return stream1.getReader().read().then(function(data) { + assert_false(data.done); + assert_equals(data.value, initialBuffer, "Buffer of being-cloned response stream is the same as the original buffer"); + return stream2.getReader().read(); + }).then(function(data) { + assert_false(data.done); + if (initialBuffer instanceof ArrayBuffer) { + assert_true(data.value instanceof ArrayBuffer, "Cloned buffer is ArrayBufer"); + assert_equals(initialBuffer.byteLength, data.value.byteLength, "Length equal"); + assert_array_equals(new Uint8Array(data.value), new Uint8Array(initialBuffer), "Cloned buffer chunks have the same content"); + } else if (initialBuffer instanceof DataView) { + assert_true(data.value instanceof DataView, "Cloned buffer is DataView"); + assert_equals(initialBuffer.byteLength, data.value.byteLength, "Lengths equal"); + assert_equals(initialBuffer.byteOffset, data.value.byteOffset, "Offsets equal"); + for (let i = 0; i < initialBuffer.byteLength; ++i) { + assert_equals( + data.value.getUint8(i), initialBuffer.getUint8(i), "Mismatch at byte ${i}"); + } + } else { + assert_array_equals(data.value, initialBuffer, "Cloned buffer chunks have the same content"); + } + assert_equals(Object.getPrototypeOf(data.value), Object.getPrototypeOf(initialBuffer), "Cloned buffers have the same type"); + assert_not_equals(data.value, initialBuffer, "Buffer of cloned response stream is a clone of the original buffer"); + }); + }, "Check response clone use structureClone for teed ReadableStreams (" + bufferType + "chunk)"); +} + +var arrayBuffer = new ArrayBuffer(16); +testReadableStreamClone(new Int8Array(arrayBuffer, 1), "Int8Array"); +testReadableStreamClone(new Int16Array(arrayBuffer, 2, 2), "Int16Array"); +testReadableStreamClone(new Int32Array(arrayBuffer), "Int32Array"); +testReadableStreamClone(arrayBuffer, "ArrayBuffer"); +testReadableStreamClone(new Uint8Array(arrayBuffer), "Uint8Array"); +testReadableStreamClone(new Uint8ClampedArray(arrayBuffer), "Uint8ClampedArray"); +testReadableStreamClone(new Uint16Array(arrayBuffer, 2), "Uint16Array"); +testReadableStreamClone(new Uint32Array(arrayBuffer), "Uint32Array"); +testReadableStreamClone(typeof BigInt64Array === "function" ? new BigInt64Array(arrayBuffer) : undefined, "BigInt64Array"); +testReadableStreamClone(typeof BigUint64Array === "function" ? new BigUint64Array(arrayBuffer) : undefined, "BigUint64Array"); +testReadableStreamClone(new Float32Array(arrayBuffer), "Float32Array"); +testReadableStreamClone(new Float64Array(arrayBuffer), "Float64Array"); +testReadableStreamClone(new DataView(arrayBuffer, 2, 8), "DataView"); diff --git a/test/wpt/tests/fetch/api/response/response-consume-empty.any.js b/test/wpt/tests/fetch/api/response/response-consume-empty.any.js new file mode 100644 index 0000000..0fa85ec --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-consume-empty.any.js @@ -0,0 +1,99 @@ +// META: global=window,worker +// META: title=Response consume empty bodies + +function checkBodyText(test, response) { + return response.text().then(function(bodyAsText) { + assert_equals(bodyAsText, "", "Resolved value should be empty"); + assert_false(response.bodyUsed); + }); +} + +function checkBodyBlob(test, response) { + return response.blob().then(function(bodyAsBlob) { + var promise = new Promise(function(resolve, reject) { + var reader = new FileReader(); + reader.onload = function(evt) { + resolve(reader.result) + }; + reader.onerror = function() { + reject("Blob's reader failed"); + }; + reader.readAsText(bodyAsBlob); + }); + return promise.then(function(body) { + assert_equals(body, "", "Resolved value should be empty"); + assert_false(response.bodyUsed); + }); + }); +} + +function checkBodyArrayBuffer(test, response) { + return response.arrayBuffer().then(function(bodyAsArrayBuffer) { + assert_equals(bodyAsArrayBuffer.byteLength, 0, "Resolved value should be empty"); + assert_false(response.bodyUsed); + }); +} + +function checkBodyJSON(test, response) { + return response.json().then( + function(bodyAsJSON) { + assert_unreached("JSON parsing should fail"); + }, + function() { + assert_false(response.bodyUsed); + }); +} + +function checkBodyFormData(test, response) { + return response.formData().then(function(bodyAsFormData) { + assert_true(bodyAsFormData instanceof FormData, "Should receive a FormData"); + assert_false(response.bodyUsed); + }); +} + +function checkBodyFormDataError(test, response) { + return promise_rejects_js(test, TypeError, response.formData()).then(function() { + assert_false(response.bodyUsed); + }); +} + +function checkResponseWithNoBody(bodyType, checkFunction, headers = []) { + promise_test(function(test) { + var response = new Response(undefined, { "headers": headers }); + assert_false(response.bodyUsed); + return checkFunction(test, response); + }, "Consume response's body as " + bodyType); +} + +checkResponseWithNoBody("text", checkBodyText); +checkResponseWithNoBody("blob", checkBodyBlob); +checkResponseWithNoBody("arrayBuffer", checkBodyArrayBuffer); +checkResponseWithNoBody("json (error case)", checkBodyJSON); +checkResponseWithNoBody("formData with correct multipart type (error case)", checkBodyFormDataError, [["Content-Type", 'multipart/form-data; boundary="boundary"']]); +checkResponseWithNoBody("formData with correct urlencoded type", checkBodyFormData, [["Content-Type", "application/x-www-form-urlencoded;charset=UTF-8"]]); +checkResponseWithNoBody("formData without correct type (error case)", checkBodyFormDataError); + +function checkResponseWithEmptyBody(bodyType, body, asText) { + promise_test(function(test) { + var response = new Response(body); + assert_false(response.bodyUsed, "bodyUsed is false at init"); + if (asText) { + return response.text().then(function(bodyAsString) { + assert_equals(bodyAsString.length, 0, "Resolved value should be empty"); + assert_true(response.bodyUsed, "bodyUsed is true after being consumed"); + }); + } + return response.arrayBuffer().then(function(bodyAsArrayBuffer) { + assert_equals(bodyAsArrayBuffer.byteLength, 0, "Resolved value should be empty"); + assert_true(response.bodyUsed, "bodyUsed is true after being consumed"); + }); + }, "Consume empty " + bodyType + " response body as " + (asText ? "text" : "arrayBuffer")); +} + +checkResponseWithEmptyBody("blob", new Blob([], { "type" : "text/plain" }), false); +checkResponseWithEmptyBody("text", "", false); +checkResponseWithEmptyBody("blob", new Blob([], { "type" : "text/plain" }), true); +checkResponseWithEmptyBody("text", "", true); +checkResponseWithEmptyBody("URLSearchParams", new URLSearchParams(""), true); +checkResponseWithEmptyBody("FormData", new FormData(), true); +checkResponseWithEmptyBody("ArrayBuffer", new ArrayBuffer(), true); diff --git a/test/wpt/tests/fetch/api/response/response-consume-stream.any.js b/test/wpt/tests/fetch/api/response/response-consume-stream.any.js new file mode 100644 index 0000000..befce62 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-consume-stream.any.js @@ -0,0 +1,61 @@ +// META: global=window,worker +// META: title=Response consume +// META: script=../resources/utils.js + +promise_test(function(test) { + var body = ""; + var response = new Response(""); + return validateStreamFromString(response.body.getReader(), ""); +}, "Read empty text response's body as readableStream"); + +promise_test(function(test) { + var response = new Response(new Blob([], { "type" : "text/plain" })); + return validateStreamFromString(response.body.getReader(), ""); +}, "Read empty blob response's body as readableStream"); + +var formData = new FormData(); +formData.append("name", "value"); +var textData = JSON.stringify("This is response's body"); +var blob = new Blob([textData], { "type" : "text/plain" }); +var urlSearchParamsData = "name=value"; +var urlSearchParams = new URLSearchParams(urlSearchParamsData); + +for (const mode of [undefined, "byob"]) { + promise_test(function(test) { + var response = new Response(blob); + return validateStreamFromString(response.body.getReader({ mode }), textData); + }, `Read blob response's body as readableStream with mode=${mode}`); + + promise_test(function(test) { + var response = new Response(textData); + return validateStreamFromString(response.body.getReader({ mode }), textData); + }, `Read text response's body as readableStream with mode=${mode}`); + + promise_test(function(test) { + var response = new Response(urlSearchParams); + return validateStreamFromString(response.body.getReader({ mode }), urlSearchParamsData); + }, `Read URLSearchParams response's body as readableStream with mode=${mode}`); + + promise_test(function(test) { + var arrayBuffer = new ArrayBuffer(textData.length); + var int8Array = new Int8Array(arrayBuffer); + for (var cptr = 0; cptr < textData.length; cptr++) + int8Array[cptr] = textData.charCodeAt(cptr); + + return validateStreamFromString(new Response(arrayBuffer).body.getReader({ mode }), textData); + }, `Read array buffer response's body as readableStream with mode=${mode}`); + + promise_test(function(test) { + var response = new Response(formData); + return validateStreamFromPartialString(response.body.getReader({ mode }), + "Content-Disposition: form-data; name=\"name\"\r\n\r\nvalue"); + }, `Read form data response's body as readableStream with mode=${mode}`); +} + +test(function() { + assert_equals(Response.error().body, null); +}, "Getting an error Response stream"); + +test(function() { + assert_equals(Response.redirect("/").body, null); +}, "Getting a redirect Response stream"); diff --git a/test/wpt/tests/fetch/api/response/response-consume.html b/test/wpt/tests/fetch/api/response/response-consume.html new file mode 100644 index 0000000..89fc49f --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-consume.html @@ -0,0 +1,317 @@ + + + + + Response consume + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/api/response/response-error-from-stream.any.js b/test/wpt/tests/fetch/api/response/response-error-from-stream.any.js new file mode 100644 index 0000000..118eb7d --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-error-from-stream.any.js @@ -0,0 +1,59 @@ +// META: global=window,worker +// META: title=Response Receives Propagated Error from ReadableStream + +function newStreamWithStartError() { + var err = new Error("Start error"); + return [new ReadableStream({ + start(controller) { + controller.error(err); + } + }), + err] +} + +function newStreamWithPullError() { + var err = new Error("Pull error"); + return [new ReadableStream({ + pull(controller) { + controller.error(err); + } + }), + err] +} + +function runRequestPromiseTest([stream, err], responseReaderMethod, testDescription) { + promise_test(test => { + return promise_rejects_exactly( + test, + err, + new Response(stream)[responseReaderMethod](), + 'CustomTestError should propagate' + ) + }, testDescription) +} + + +promise_test(test => { + var [stream, err] = newStreamWithStartError(); + return promise_rejects_exactly(test, err, stream.getReader().read(), 'CustomTestError should propagate') +}, "ReadableStreamDefaultReader Promise receives ReadableStream start() Error") + +promise_test(test => { + var [stream, err] = newStreamWithPullError(); + return promise_rejects_exactly(test, err, stream.getReader().read(), 'CustomTestError should propagate') +}, "ReadableStreamDefaultReader Promise receives ReadableStream pull() Error") + + +// test start() errors for all Body reader methods +runRequestPromiseTest(newStreamWithStartError(), 'arrayBuffer', 'ReadableStream start() Error propagates to Response.arrayBuffer() Promise'); +runRequestPromiseTest(newStreamWithStartError(), 'blob', 'ReadableStream start() Error propagates to Response.blob() Promise'); +runRequestPromiseTest(newStreamWithStartError(), 'formData', 'ReadableStream start() Error propagates to Response.formData() Promise'); +runRequestPromiseTest(newStreamWithStartError(), 'json', 'ReadableStream start() Error propagates to Response.json() Promise'); +runRequestPromiseTest(newStreamWithStartError(), 'text', 'ReadableStream start() Error propagates to Response.text() Promise'); + +// test pull() errors for all Body reader methods +runRequestPromiseTest(newStreamWithPullError(), 'arrayBuffer', 'ReadableStream pull() Error propagates to Response.arrayBuffer() Promise'); +runRequestPromiseTest(newStreamWithPullError(), 'blob', 'ReadableStream pull() Error propagates to Response.blob() Promise'); +runRequestPromiseTest(newStreamWithPullError(), 'formData', 'ReadableStream pull() Error propagates to Response.formData() Promise'); +runRequestPromiseTest(newStreamWithPullError(), 'json', 'ReadableStream pull() Error propagates to Response.json() Promise'); +runRequestPromiseTest(newStreamWithPullError(), 'text', 'ReadableStream pull() Error propagates to Response.text() Promise'); diff --git a/test/wpt/tests/fetch/api/response/response-error.any.js b/test/wpt/tests/fetch/api/response/response-error.any.js new file mode 100644 index 0000000..a76bc43 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-error.any.js @@ -0,0 +1,27 @@ +// META: global=window,worker +// META: title=Response error + +var invalidStatus = [0, 100, 199, 600, 1000]; +invalidStatus.forEach(function(status) { + test(function() { + assert_throws_js(RangeError, function() { new Response("", { "status" : status }); }, + "Expect RangeError exception when status is " + status); + },"Throws RangeError when responseInit's status is " + status); +}); + +var invalidStatusText = ["\n", "Ä€"]; +invalidStatusText.forEach(function(statusText) { + test(function() { + assert_throws_js(TypeError, function() { new Response("", { "statusText" : statusText }); }, + "Expect TypeError exception " + statusText); + },"Throws TypeError when responseInit's statusText is " + statusText); +}); + +var nullBodyStatus = [204, 205, 304]; +nullBodyStatus.forEach(function(status) { + test(function() { + assert_throws_js(TypeError, + function() { new Response("body", {"status" : status }); }, + "Expect TypeError exception "); + },"Throws TypeError when building a response with body and a body status of " + status); +}); diff --git a/test/wpt/tests/fetch/api/response/response-from-stream.any.js b/test/wpt/tests/fetch/api/response/response-from-stream.any.js new file mode 100644 index 0000000..ea5192b --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-from-stream.any.js @@ -0,0 +1,23 @@ +// META: global=window,worker + +"use strict"; + +test(() => { + const stream = new ReadableStream(); + stream.getReader(); + assert_throws_js(TypeError, () => new Response(stream)); +}, "Constructing a Response with a stream on which getReader() is called"); + +test(() => { + const stream = new ReadableStream(); + stream.getReader().read(); + assert_throws_js(TypeError, () => new Response(stream)); +}, "Constructing a Response with a stream on which read() is called"); + +promise_test(async () => { + const stream = new ReadableStream({ pull: c => c.enqueue(new Uint8Array()) }), + reader = stream.getReader(); + await reader.read(); + reader.releaseLock(); + assert_throws_js(TypeError, () => new Response(stream)); +}, "Constructing a Response with a stream on which read() and releaseLock() are called"); diff --git a/test/wpt/tests/fetch/api/response/response-init-001.any.js b/test/wpt/tests/fetch/api/response/response-init-001.any.js new file mode 100644 index 0000000..559e49a --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-init-001.any.js @@ -0,0 +1,64 @@ +// META: global=window,worker +// META: title=Response init: simple cases + +var defaultValues = { "type" : "default", + "url" : "", + "ok" : true, + "status" : 200, + "statusText" : "", + "body" : null +}; + +var statusCodes = { "givenValues" : [200, 300, 400, 500, 599], + "expectedValues" : [200, 300, 400, 500, 599] +}; +var statusTexts = { "givenValues" : ["", "OK", "with space", String.fromCharCode(0x80)], + "expectedValues" : ["", "OK", "with space", String.fromCharCode(0x80)] +}; +var initValuesDict = { "status" : statusCodes, + "statusText" : statusTexts +}; + +function isOkStatus(status) { + return 200 <= status && 299 >= status; +} + +var response = new Response(); +for (var attributeName in defaultValues) { + test(function() { + var expectedValue = defaultValues[attributeName]; + assert_equals(response[attributeName], expectedValue, + "Expect default response." + attributeName + " is " + expectedValue); + }, "Check default value for " + attributeName + " attribute"); +} + +for (var attributeName in initValuesDict) { + test(function() { + var valuesToTest = initValuesDict[attributeName]; + for (var valueIdx in valuesToTest["givenValues"]) { + var givenValue = valuesToTest["givenValues"][valueIdx]; + var expectedValue = valuesToTest["expectedValues"][valueIdx]; + var responseInit = {}; + responseInit[attributeName] = givenValue; + var response = new Response("", responseInit); + assert_equals(response[attributeName], expectedValue, + "Expect response." + attributeName + " is " + expectedValue + + " when initialized with " + givenValue); + assert_equals(response.ok, isOkStatus(response.status), + "Expect response.ok is " + isOkStatus(response.status)); + } + }, "Check " + attributeName + " init values and associated getter"); +} + +test(function() { + const response1 = new Response(""); + assert_equals(response1.headers, response1.headers); + + const response2 = new Response("", {"headers": {"X-Foo": "bar"}}); + assert_equals(response2.headers, response2.headers); + const headers = response2.headers; + response2.headers.set("X-Foo", "quux"); + assert_equals(headers, response2.headers); + headers.set("X-Other-Header", "baz"); + assert_equals(headers, response2.headers); +}, "Test that Response.headers has the [SameObject] extended attribute"); diff --git a/test/wpt/tests/fetch/api/response/response-init-002.any.js b/test/wpt/tests/fetch/api/response/response-init-002.any.js new file mode 100644 index 0000000..6c0a46e --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-init-002.any.js @@ -0,0 +1,61 @@ +// META: global=window,worker +// META: title=Response init: body and headers +// META: script=../resources/utils.js + +test(function() { + var headerDict = {"name1": "value1", + "name2": "value2", + "name3": "value3" + }; + var headers = new Headers(headerDict); + var response = new Response("", { "headers" : headers }) + for (var name in headerDict) { + assert_equals(response.headers.get(name), headerDict[name], + "response's headers has " + name + " : " + headerDict[name]); + } +}, "Initialize Response with headers values"); + +function checkResponseInit(body, bodyType, expectedTextBody) { + promise_test(function(test) { + var response = new Response(body); + var resHeaders = response.headers; + var mime = resHeaders.get("Content-Type"); + assert_true(mime && mime.search(bodyType) > -1, "Content-Type header should be \"" + bodyType + "\" "); + return response.text().then(function(bodyAsText) { + //not equals: cannot guess formData exact value + assert_true(bodyAsText.search(expectedTextBody) > -1, "Retrieve and verify response body"); + }); + }, "Initialize Response's body with " + bodyType); +} + +var blob = new Blob(["This is a blob"], {type: "application/octet-binary"}); +var formaData = new FormData(); +formaData.append("name", "value"); +var urlSearchParams = "URLSearchParams are not supported"; +//avoid test timeout if not implemented +if (self.URLSearchParams) + urlSearchParams = new URLSearchParams("name=value"); +var usvString = "This is a USVString" + +checkResponseInit(blob, "application/octet-binary", "This is a blob"); +checkResponseInit(formaData, "multipart/form-data", "name=\"name\"\r\n\r\nvalue"); +checkResponseInit(urlSearchParams, "application/x-www-form-urlencoded;charset=UTF-8", "name=value"); +checkResponseInit(usvString, "text/plain;charset=UTF-8", "This is a USVString"); + +promise_test(function(test) { + var body = "This is response body"; + var response = new Response(body); + return validateStreamFromString(response.body.getReader(), body); +}, "Read Response's body as readableStream"); + +promise_test(function(test) { + var response = new Response("This is my fork", {"headers" : [["Content-Type", ""]]}); + return response.blob().then(function(blob) { + assert_equals(blob.type, "", "Blob type should be the empty string"); + }); +}, "Testing empty Response Content-Type header"); + +test(function() { + var response = new Response(null, {status: 204}); + assert_equals(response.body, null); +}, "Testing null Response body"); diff --git a/test/wpt/tests/fetch/api/response/response-init-contenttype.any.js b/test/wpt/tests/fetch/api/response/response-init-contenttype.any.js new file mode 100644 index 0000000..3a7744c --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-init-contenttype.any.js @@ -0,0 +1,125 @@ +test(() => { + const response = new Response(); + assert_equals(response.headers.get("Content-Type"), null); +}, "Default Content-Type for Response with empty body"); + +test(() => { + const blob = new Blob([]); + const response = new Response(blob); + assert_equals(response.headers.get("Content-Type"), null); +}, "Default Content-Type for Response with Blob body (no type set)"); + +test(() => { + const blob = new Blob([], { type: "" }); + const response = new Response(blob); + assert_equals(response.headers.get("Content-Type"), null); +}, "Default Content-Type for Response with Blob body (empty type)"); + +test(() => { + const blob = new Blob([], { type: "a/b; c=d" }); + const response = new Response(blob); + assert_equals(response.headers.get("Content-Type"), "a/b; c=d"); +}, "Default Content-Type for Response with Blob body (set type)"); + +test(() => { + const buffer = new Uint8Array(); + const response = new Response(buffer); + assert_equals(response.headers.get("Content-Type"), null); +}, "Default Content-Type for Response with buffer source body"); + +promise_test(async () => { + const formData = new FormData(); + formData.append("a", "b"); + const response = new Response(formData); + const boundary = (await response.text()).split("\r\n")[0].slice(2); + assert_equals( + response.headers.get("Content-Type"), + `multipart/form-data; boundary=${boundary}`, + ); +}, "Default Content-Type for Response with FormData body"); + +test(() => { + const usp = new URLSearchParams(); + const response = new Response(usp); + assert_equals( + response.headers.get("Content-Type"), + "application/x-www-form-urlencoded;charset=UTF-8", + ); +}, "Default Content-Type for Response with URLSearchParams body"); + +test(() => { + const response = new Response(""); + assert_equals( + response.headers.get("Content-Type"), + "text/plain;charset=UTF-8", + ); +}, "Default Content-Type for Response with string body"); + +test(() => { + const stream = new ReadableStream(); + const response = new Response(stream); + assert_equals(response.headers.get("Content-Type"), null); +}, "Default Content-Type for Response with ReadableStream body"); + +// ----------------------------------------------------------------------------- + +const OVERRIDE_MIME = "test/only; mime=type"; + +function responseWithOverrideMime(body) { + return new Response( + body, + { headers: { "Content-Type": OVERRIDE_MIME } }, + ); +} + +test(() => { + const response = responseWithOverrideMime(undefined); + assert_equals(response.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Response with empty body"); + +test(() => { + const blob = new Blob([]); + const response = responseWithOverrideMime(blob); + assert_equals(response.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Response with Blob body (no type set)"); + +test(() => { + const blob = new Blob([], { type: "" }); + const response = responseWithOverrideMime(blob); + assert_equals(response.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Response with Blob body (empty type)"); + +test(() => { + const blob = new Blob([], { type: "a/b; c=d" }); + const response = responseWithOverrideMime(blob); + assert_equals(response.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Response with Blob body (set type)"); + +test(() => { + const buffer = new Uint8Array(); + const response = responseWithOverrideMime(buffer); + assert_equals(response.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Response with buffer source body"); + +test(() => { + const formData = new FormData(); + const response = responseWithOverrideMime(formData); + assert_equals(response.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Response with FormData body"); + +test(() => { + const usp = new URLSearchParams(); + const response = responseWithOverrideMime(usp); + assert_equals(response.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Response with URLSearchParams body"); + +test(() => { + const response = responseWithOverrideMime(""); + assert_equals(response.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Response with string body"); + +test(() => { + const stream = new ReadableStream(); + const response = responseWithOverrideMime(stream); + assert_equals(response.headers.get("Content-Type"), OVERRIDE_MIME); +}, "Can override Content-Type for Response with ReadableStream body"); diff --git a/test/wpt/tests/fetch/api/response/response-static-error.any.js b/test/wpt/tests/fetch/api/response/response-static-error.any.js new file mode 100644 index 0000000..1f8c49a --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-static-error.any.js @@ -0,0 +1,34 @@ +// META: global=window,worker +// META: title=Response: error static method + +test(function() { + var responseError = Response.error(); + assert_equals(responseError.type, "error", "Network error response's type is error"); + assert_equals(responseError.status, 0, "Network error response's status is 0"); + assert_equals(responseError.statusText, "", "Network error response's statusText is empty"); + assert_equals(responseError.body, null, "Network error response's body is null"); + + assert_true(responseError.headers.entries().next().done, "Headers should be empty"); +}, "Check response returned by static method error()"); + +promise_test (async function() { + let response = await fetch("../resources/data.json"); + + try { + response.headers.append('name', 'value'); + } catch (e) { + assert_equals(e.constructor.name, "TypeError"); + } + + assert_not_equals(response.headers.get("name"), "value", "response headers should be immutable"); +}, "Ensure response headers are immutable"); + +test(function() { + const headers = Response.error().headers; + + // Avoid false positives if expected API is not available + assert_true(!!headers); + assert_equals(typeof headers.append, 'function'); + + assert_throws_js(TypeError, function () { headers.append('name', 'value'); }); +}, "the 'guard' of the Headers instance should be immutable"); diff --git a/test/wpt/tests/fetch/api/response/response-static-json.any.js b/test/wpt/tests/fetch/api/response/response-static-json.any.js new file mode 100644 index 0000000..5ec79e6 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-static-json.any.js @@ -0,0 +1,96 @@ +// META: global=window,worker +// META: title=Response: json static method + +const APPLICATION_JSON = "application/json"; +const FOO_BAR = "foo/bar"; + +const INIT_TESTS = [ + [undefined, 200, "", APPLICATION_JSON, {}], + [{ status: 400 }, 400, "", APPLICATION_JSON, {}], + [{ statusText: "foo" }, 200, "foo", APPLICATION_JSON, {}], + [{ headers: {} }, 200, "", APPLICATION_JSON, {}], + [{ headers: { "content-type": FOO_BAR } }, 200, "", FOO_BAR, {}], + [{ headers: { "x-foo": "bar" } }, 200, "", APPLICATION_JSON, { "x-foo": "bar" }], +]; + +for (const [init, expectedStatus, expectedStatusText, expectedContentType, expectedHeaders] of INIT_TESTS) { + promise_test(async function () { + const response = Response.json("hello world", init); + assert_equals(response.type, "default", "Response's type is default"); + assert_equals(response.status, expectedStatus, "Response's status is " + expectedStatus); + assert_equals(response.statusText, expectedStatusText, "Response's statusText is " + JSON.stringify(expectedStatusText)); + assert_equals(response.headers.get("content-type"), expectedContentType, "Response's content-type is " + expectedContentType); + for (const key in expectedHeaders) { + assert_equals(response.headers.get(key), expectedHeaders[key], "Response's header " + key + " is " + JSON.stringify(expectedHeaders[key])); + } + + const data = await response.json(); + assert_equals(data, "hello world", "Response's body is 'hello world'"); + }, `Check response returned by static json() with init ${JSON.stringify(init)}`); +} + +const nullBodyStatus = [204, 205, 304]; +for (const status of nullBodyStatus) { + test(function () { + assert_throws_js( + TypeError, + function () { + Response.json("hello world", { status: status }); + }, + ); + }, `Throws TypeError when calling static json() with a status of ${status}`); +} + +promise_test(async function () { + const response = Response.json({ foo: "bar" }); + const data = await response.json(); + assert_equals(typeof data, "object", "Response's json body is an object"); + assert_equals(data.foo, "bar", "Response's json body is { foo: 'bar' }"); +}, "Check static json() encodes JSON objects correctly"); + +test(function () { + assert_throws_js( + TypeError, + function () { + Response.json(Symbol("foo")); + }, + ); +}, "Check static json() throws when data is not encodable"); + +test(function () { + const a = { b: 1 }; + a.a = a; + assert_throws_js( + TypeError, + function () { + Response.json(a); + }, + ); +}, "Check static json() throws when data is circular"); + +promise_test(async function () { + class CustomError extends Error { + name = "CustomError"; + } + assert_throws_js( + CustomError, + function () { + Response.json({ get foo() { throw new CustomError("bar") }}); + } + ) +}, "Check static json() propagates JSON serializer errors"); + +const encodingChecks = [ + ["ðŒ†", [34, 240, 157, 140, 134, 34]], + ["\uDF06\uD834", [34, 92, 117, 100, 102, 48, 54, 92, 117, 100, 56, 51, 52, 34]], + ["\uDEAD", [34, 92, 117, 100, 101, 97, 100, 34]], +]; + +for (const [input, expected] of encodingChecks) { + promise_test(async function () { + const response = Response.json(input); + const buffer = await response.arrayBuffer(); + const data = new Uint8Array(buffer); + assert_array_equals(data, expected); + }, `Check response returned by static json() with input ${input}`); +} diff --git a/test/wpt/tests/fetch/api/response/response-static-redirect.any.js b/test/wpt/tests/fetch/api/response/response-static-redirect.any.js new file mode 100644 index 0000000..b16c56d --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-static-redirect.any.js @@ -0,0 +1,40 @@ +// META: global=window,worker +// META: title=Response: redirect static method + +var url = "http://test.url:1234/"; +test(function() { + const redirectResponse = Response.redirect(url); + assert_equals(redirectResponse.type, "default"); + assert_false(redirectResponse.redirected); + assert_false(redirectResponse.ok); + assert_equals(redirectResponse.status, 302, "Default redirect status is 302"); + assert_equals(redirectResponse.headers.get("Location"), url, + "redirected response has Location header with the correct url"); + assert_equals(redirectResponse.statusText, ""); +}, "Check default redirect response"); + +[301, 302, 303, 307, 308].forEach(function(status) { + test(function() { + const redirectResponse = Response.redirect(url, status); + assert_equals(redirectResponse.type, "default"); + assert_false(redirectResponse.redirected); + assert_false(redirectResponse.ok); + assert_equals(redirectResponse.status, status, "Redirect status is " + status); + assert_equals(redirectResponse.headers.get("Location"), url); + assert_equals(redirectResponse.statusText, ""); + }, "Check response returned by static method redirect(), status = " + status); +}); + +test(function() { + var invalidUrl = "http://:This is not an url"; + assert_throws_js(TypeError, function() { Response.redirect(invalidUrl); }, + "Expect TypeError exception"); +}, "Check error returned when giving invalid url to redirect()"); + +var invalidRedirectStatus = [200, 309, 400, 500]; +invalidRedirectStatus.forEach(function(invalidStatus) { + test(function() { + assert_throws_js(RangeError, function() { Response.redirect(url, invalidStatus); }, + "Expect RangeError exception"); + }, "Check error returned when giving invalid status to redirect(), status = " + invalidStatus); +}); diff --git a/test/wpt/tests/fetch/api/response/response-stream-bad-chunk.any.js b/test/wpt/tests/fetch/api/response/response-stream-bad-chunk.any.js new file mode 100644 index 0000000..d3d92e1 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-stream-bad-chunk.any.js @@ -0,0 +1,24 @@ +// META: global=window,worker +// META: title=Response causes TypeError from bad chunk type + +function runChunkTest(responseReaderMethod, testDescription) { + promise_test(test => { + let stream = new ReadableStream({ + start(controller) { + controller.enqueue("not Uint8Array"); + controller.close(); + } + }); + + return promise_rejects_js(test, TypeError, + new Response(stream)[responseReaderMethod](), + 'TypeError should propagate' + ) + }, testDescription) +} + +runChunkTest('arrayBuffer', 'ReadableStream with non-Uint8Array chunk passed to Response.arrayBuffer() causes TypeError'); +runChunkTest('blob', 'ReadableStream with non-Uint8Array chunk passed to Response.blob() causes TypeError'); +runChunkTest('formData', 'ReadableStream with non-Uint8Array chunk passed to Response.formData() causes TypeError'); +runChunkTest('json', 'ReadableStream with non-Uint8Array chunk passed to Response.json() causes TypeError'); +runChunkTest('text', 'ReadableStream with non-Uint8Array chunk passed to Response.text() causes TypeError'); diff --git a/test/wpt/tests/fetch/api/response/response-stream-disturbed-1.any.js b/test/wpt/tests/fetch/api/response/response-stream-disturbed-1.any.js new file mode 100644 index 0000000..64f65f1 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-stream-disturbed-1.any.js @@ -0,0 +1,44 @@ +// META: global=window,worker +// META: title=Consuming Response body after getting a ReadableStream +// META: script=./response-stream-disturbed-util.js + +async function createResponseWithReadableStream(bodySource, callback) { + const response = await responseFromBodySource(bodySource); + const reader = response.body.getReader(); + reader.releaseLock(); + return callback(response); +} + +for (const bodySource of ["fetch", "stream", "string"]) { + promise_test(function() { + return createResponseWithReadableStream(bodySource, function(response) { + return response.blob().then(function(blob) { + assert_true(blob instanceof Blob); + }); + }); + }, `Getting blob after getting the Response body - not disturbed, not locked (body source: ${bodySource})`); + + promise_test(function() { + return createResponseWithReadableStream(bodySource, function(response) { + return response.text().then(function(text) { + assert_true(text.length > 0); + }); + }); + }, `Getting text after getting the Response body - not disturbed, not locked (body source: ${bodySource})`); + + promise_test(function() { + return createResponseWithReadableStream(bodySource, function(response) { + return response.json().then(function(json) { + assert_equals(typeof json, "object"); + }); + }); + }, `Getting json after getting the Response body - not disturbed, not locked (body source: ${bodySource})`); + + promise_test(function() { + return createResponseWithReadableStream(bodySource, function(response) { + return response.arrayBuffer().then(function(arrayBuffer) { + assert_true(arrayBuffer.byteLength > 0); + }); + }); + }, `Getting arrayBuffer after getting the Response body - not disturbed, not locked (body source: ${bodySource})`); +} diff --git a/test/wpt/tests/fetch/api/response/response-stream-disturbed-2.any.js b/test/wpt/tests/fetch/api/response/response-stream-disturbed-2.any.js new file mode 100644 index 0000000..c46a180 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-stream-disturbed-2.any.js @@ -0,0 +1,35 @@ +// META: global=window,worker +// META: title=Consuming Response body after getting a ReadableStream +// META: script=./response-stream-disturbed-util.js + +async function createResponseWithLockedReadableStream(bodySource, callback) { + const response = await responseFromBodySource(bodySource); + response.body.getReader(); + return callback(response); +} + +for (const bodySource of ["fetch", "stream", "string"]) { + promise_test(function(test) { + return createResponseWithLockedReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.blob()); + }); + }, `Getting blob after getting a locked Response body (body source: ${bodySource})`); + + promise_test(function(test) { + return createResponseWithLockedReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.text()); + }); + }, `Getting text after getting a locked Response body (body source: ${bodySource})`); + + promise_test(function(test) { + return createResponseWithLockedReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.json()); + }); + }, `Getting json after getting a locked Response body (body source: ${bodySource})`); + + promise_test(function(test) { + return createResponseWithLockedReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.arrayBuffer()); + }); + }, `Getting arrayBuffer after getting a locked Response body (body source: ${bodySource})`); +} diff --git a/test/wpt/tests/fetch/api/response/response-stream-disturbed-3.any.js b/test/wpt/tests/fetch/api/response/response-stream-disturbed-3.any.js new file mode 100644 index 0000000..35fb086 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-stream-disturbed-3.any.js @@ -0,0 +1,36 @@ +// META: global=window,worker +// META: title=Consuming Response body after getting a ReadableStream +// META: script=./response-stream-disturbed-util.js + +async function createResponseWithDisturbedReadableStream(bodySource, callback) { + const response = await responseFromBodySource(bodySource); + const reader = response.body.getReader(); + reader.read(); + return callback(response); +} + +for (const bodySource of ["fetch", "stream", "string"]) { + promise_test(function(test) { + return createResponseWithDisturbedReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.blob()); + }); + }, `Getting blob after reading the Response body (body source: ${bodySource})`); + + promise_test(function(test) { + return createResponseWithDisturbedReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.text()); + }); + }, `Getting text after reading the Response body (body source: ${bodySource})`); + + promise_test(function(test) { + return createResponseWithDisturbedReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.json()); + }); + }, `Getting json after reading the Response body (body source: ${bodySource})`); + + promise_test(function(test) { + return createResponseWithDisturbedReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.arrayBuffer()); + }); + }, `Getting arrayBuffer after reading the Response body (body source: ${bodySource})`); +} diff --git a/test/wpt/tests/fetch/api/response/response-stream-disturbed-4.any.js b/test/wpt/tests/fetch/api/response/response-stream-disturbed-4.any.js new file mode 100644 index 0000000..490672f --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-stream-disturbed-4.any.js @@ -0,0 +1,35 @@ +// META: global=window,worker +// META: title=Consuming Response body after getting a ReadableStream +// META: script=./response-stream-disturbed-util.js + +async function createResponseWithCancelledReadableStream(bodySource, callback) { + const response = await responseFromBodySource(bodySource); + response.body.cancel(); + return callback(response); +} + +for (const bodySource of ["fetch", "stream", "string"]) { + promise_test(function(test) { + return createResponseWithCancelledReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.blob()); + }); + }, `Getting blob after cancelling the Response body (body source: ${bodySource})`); + + promise_test(function(test) { + return createResponseWithCancelledReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.text()); + }); + }, `Getting text after cancelling the Response body (body source: ${bodySource})`); + + promise_test(function(test) { + return createResponseWithCancelledReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.json()); + }); + }, `Getting json after cancelling the Response body (body source: ${bodySource})`); + + promise_test(function(test) { + return createResponseWithCancelledReadableStream(bodySource, function(response) { + return promise_rejects_js(test, TypeError, response.arrayBuffer()); + }); + }, `Getting arrayBuffer after cancelling the Response body (body source: ${bodySource})`); +} diff --git a/test/wpt/tests/fetch/api/response/response-stream-disturbed-5.any.js b/test/wpt/tests/fetch/api/response/response-stream-disturbed-5.any.js new file mode 100644 index 0000000..348fc39 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-stream-disturbed-5.any.js @@ -0,0 +1,19 @@ +// META: global=window,worker +// META: title=Consuming Response body after getting a ReadableStream +// META: script=./response-stream-disturbed-util.js + +for (const bodySource of ["fetch", "stream", "string"]) { + for (const consumeAs of ["blob", "text", "json", "arrayBuffer"]) { + promise_test( + async () => { + const response = await responseFromBodySource(bodySource); + response[consumeAs](); + assert_not_equals(response.body, null); + assert_throws_js(TypeError, function () { + response.body.getReader(); + }); + }, + `Getting a body reader after consuming as ${consumeAs} (body source: ${bodySource})`, + ); + } +} diff --git a/test/wpt/tests/fetch/api/response/response-stream-disturbed-6.any.js b/test/wpt/tests/fetch/api/response/response-stream-disturbed-6.any.js new file mode 100644 index 0000000..61d8544 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-stream-disturbed-6.any.js @@ -0,0 +1,76 @@ +// META: global=window,worker +// META: title=ReadableStream disturbed tests, via Response's bodyUsed property + +"use strict"; + +test(() => { + const stream = new ReadableStream(); + const response = new Response(stream); + assert_false(response.bodyUsed, "On construction"); + + const reader = stream.getReader(); + assert_false(response.bodyUsed, "After getting a reader"); + + reader.read(); + assert_true(response.bodyUsed, "After calling stream.read()"); +}, "A non-closed stream on which read() has been called"); + +test(() => { + const stream = new ReadableStream(); + const response = new Response(stream); + assert_false(response.bodyUsed, "On construction"); + + const reader = stream.getReader(); + assert_false(response.bodyUsed, "After getting a reader"); + + reader.cancel(); + assert_true(response.bodyUsed, "After calling stream.cancel()"); +}, "A non-closed stream on which cancel() has been called"); + +test(() => { + const stream = new ReadableStream({ + start(c) { + c.close(); + } + }); + const response = new Response(stream); + assert_false(response.bodyUsed, "On construction"); + + const reader = stream.getReader(); + assert_false(response.bodyUsed, "After getting a reader"); + + reader.read(); + assert_true(response.bodyUsed, "After calling stream.read()"); +}, "A closed stream on which read() has been called"); + +test(() => { + const stream = new ReadableStream({ + start(c) { + c.error(new Error("some error")); + } + }); + const response = new Response(stream); + assert_false(response.bodyUsed, "On construction"); + + const reader = stream.getReader(); + assert_false(response.bodyUsed, "After getting a reader"); + + reader.read().then(() => { }, () => { }); + assert_true(response.bodyUsed, "After calling stream.read()"); +}, "An errored stream on which read() has been called"); + +test(() => { + const stream = new ReadableStream({ + start(c) { + c.error(new Error("some error")); + } + }); + const response = new Response(stream); + assert_false(response.bodyUsed, "On construction"); + + const reader = stream.getReader(); + assert_false(response.bodyUsed, "After getting a reader"); + + reader.cancel().then(() => { }, () => { }); + assert_true(response.bodyUsed, "After calling stream.cancel()"); +}, "An errored stream on which cancel() has been called"); diff --git a/test/wpt/tests/fetch/api/response/response-stream-disturbed-by-pipe.any.js b/test/wpt/tests/fetch/api/response/response-stream-disturbed-by-pipe.any.js new file mode 100644 index 0000000..5341b75 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-stream-disturbed-by-pipe.any.js @@ -0,0 +1,17 @@ +// META: global=window,worker + +test(() => { + const r = new Response(new ReadableStream()); + // highWaterMark: 0 means that nothing will actually be read from the body. + r.body.pipeTo(new WritableStream({}, {highWaterMark: 0})); + assert_true(r.bodyUsed, 'bodyUsed should be true'); +}, 'using pipeTo on Response body should disturb it synchronously'); + +test(() => { + const r = new Response(new ReadableStream()); + r.body.pipeThrough({ + writable: new WritableStream({}, {highWaterMark: 0}), + readable: new ReadableStream() + }); + assert_true(r.bodyUsed, 'bodyUsed should be true'); +}, 'using pipeThrough on Response body should disturb it synchronously'); diff --git a/test/wpt/tests/fetch/api/response/response-stream-disturbed-util.js b/test/wpt/tests/fetch/api/response/response-stream-disturbed-util.js new file mode 100644 index 0000000..50bb586 --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-stream-disturbed-util.js @@ -0,0 +1,17 @@ +const BODY = '{"key": "value"}'; + +function responseFromBodySource(bodySource) { + if (bodySource === "fetch") { + return fetch("../resources/data.json"); + } else if (bodySource === "stream") { + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(new TextEncoder().encode(BODY)); + controller.close(); + }, + }); + return new Response(stream); + } else { + return new Response(BODY); + } +} diff --git a/test/wpt/tests/fetch/api/response/response-stream-with-broken-then.any.js b/test/wpt/tests/fetch/api/response/response-stream-with-broken-then.any.js new file mode 100644 index 0000000..8fef66c --- /dev/null +++ b/test/wpt/tests/fetch/api/response/response-stream-with-broken-then.any.js @@ -0,0 +1,117 @@ +// META: global=window,worker +// META: script=../resources/utils.js + +promise_test(async () => { + // t.add_cleanup doesn't work when Object.prototype.then is overwritten, so + // these tests use add_completion_callback for cleanup instead. + add_completion_callback(() => delete Object.prototype.then); + const hello = new TextEncoder().encode('hello'); + const bye = new TextEncoder().encode('bye'); + const rs = new ReadableStream({ + start(controller) { + controller.enqueue(hello); + controller.close(); + } + }); + const resp = new Response(rs); + Object.prototype.then = (onFulfilled) => { + delete Object.prototype.then; + onFulfilled({done: false, value: bye}); + }; + const text = await resp.text(); + delete Object.prototype.then; + assert_equals(text, 'hello', 'The value should be "hello".'); +}, 'Attempt to inject {done: false, value: bye} via Object.prototype.then.'); + +promise_test(async (t) => { + add_completion_callback(() => delete Object.prototype.then); + const hello = new TextEncoder().encode('hello'); + const rs = new ReadableStream({ + start(controller) { + controller.enqueue(hello); + controller.close(); + } + }); + const resp = new Response(rs); + Object.prototype.then = (onFulfilled) => { + delete Object.prototype.then; + onFulfilled({done: false, value: undefined}); + }; + const text = await resp.text(); + delete Object.prototype.then; + assert_equals(text, 'hello', 'The value should be "hello".'); +}, 'Attempt to inject value: undefined via Object.prototype.then.'); + +promise_test(async (t) => { + add_completion_callback(() => delete Object.prototype.then); + const hello = new TextEncoder().encode('hello'); + const rs = new ReadableStream({ + start(controller) { + controller.enqueue(hello); + controller.close(); + } + }); + const resp = new Response(rs); + Object.prototype.then = (onFulfilled) => { + delete Object.prototype.then; + onFulfilled(undefined); + }; + const text = await resp.text(); + delete Object.prototype.then; + assert_equals(text, 'hello', 'The value should be "hello".'); +}, 'Attempt to inject undefined via Object.prototype.then.'); + +promise_test(async (t) => { + add_completion_callback(() => delete Object.prototype.then); + const hello = new TextEncoder().encode('hello'); + const rs = new ReadableStream({ + start(controller) { + controller.enqueue(hello); + controller.close(); + } + }); + const resp = new Response(rs); + Object.prototype.then = (onFulfilled) => { + delete Object.prototype.then; + onFulfilled(8.2); + }; + const text = await resp.text(); + delete Object.prototype.then; + assert_equals(text, 'hello', 'The value should be "hello".'); +}, 'Attempt to inject 8.2 via Object.prototype.then.'); + +promise_test(async () => { + add_completion_callback(() => delete Object.prototype.then); + const hello = new TextEncoder().encode('hello'); + const bye = new TextEncoder().encode('bye'); + const resp = new Response(hello); + Object.prototype.then = (onFulfilled) => { + delete Object.prototype.then; + onFulfilled({done: false, value: bye}); + }; + const text = await resp.text(); + delete Object.prototype.then; + assert_equals(text, 'hello', 'The value should be "hello".'); +}, 'intercepting arraybuffer to text conversion via Object.prototype.then ' + + 'should not be possible'); + +promise_test(async () => { + add_completion_callback(() => delete Object.prototype.then); + const u8a123 = new Uint8Array([1, 2, 3]); + const u8a456 = new Uint8Array([4, 5, 6]); + const resp = new Response(u8a123); + const writtenBytes = []; + const ws = new WritableStream({ + write(chunk) { + writtenBytes.push(...Array.from(chunk)); + } + }); + Object.prototype.then = (onFulfilled) => { + delete Object.prototype.then; + onFulfilled({done: false, value: u8a456}); + }; + await resp.body.pipeTo(ws); + delete Object.prototype.then; + assert_array_equals(writtenBytes, u8a123, 'The value should be [1, 2, 3]'); +}, 'intercepting arraybuffer to body readable stream conversion via ' + + 'Object.prototype.then should not be possible'); diff --git a/test/wpt/tests/fetch/connection-pool/network-partition-key.html b/test/wpt/tests/fetch/connection-pool/network-partition-key.html new file mode 100644 index 0000000..60a784c --- /dev/null +++ b/test/wpt/tests/fetch/connection-pool/network-partition-key.html @@ -0,0 +1,264 @@ + + + + + Connection partitioning by site + + + + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/connection-pool/resources/network-partition-about-blank-checker.html b/test/wpt/tests/fetch/connection-pool/resources/network-partition-about-blank-checker.html new file mode 100644 index 0000000..7a8b613 --- /dev/null +++ b/test/wpt/tests/fetch/connection-pool/resources/network-partition-about-blank-checker.html @@ -0,0 +1,35 @@ + + + + + about:blank Network Partition Checker + + + + + + + diff --git a/test/wpt/tests/fetch/connection-pool/resources/network-partition-checker.html b/test/wpt/tests/fetch/connection-pool/resources/network-partition-checker.html new file mode 100644 index 0000000..b058f61 --- /dev/null +++ b/test/wpt/tests/fetch/connection-pool/resources/network-partition-checker.html @@ -0,0 +1,30 @@ + + + + + Network Partition Checker + + + + + + + + + + diff --git a/test/wpt/tests/fetch/connection-pool/resources/network-partition-iframe-checker.html b/test/wpt/tests/fetch/connection-pool/resources/network-partition-iframe-checker.html new file mode 100644 index 0000000..f76ed18 --- /dev/null +++ b/test/wpt/tests/fetch/connection-pool/resources/network-partition-iframe-checker.html @@ -0,0 +1,22 @@ + + + + + Iframe Network Partition Checker + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/connection-pool/resources/network-partition-key.js b/test/wpt/tests/fetch/connection-pool/resources/network-partition-key.js new file mode 100644 index 0000000..bd66109 --- /dev/null +++ b/test/wpt/tests/fetch/connection-pool/resources/network-partition-key.js @@ -0,0 +1,47 @@ +// Runs multiple fetches that validate connections see only a single partition_id. +// Requests are run in parallel so that they use multiple connections to maximize the +// chance of exercising all matching connections in the connection pool. Only returns +// once all requests have completed to make cleaning up server state non-racy. +function check_partition_ids(location) { + const NUM_FETCHES = 20; + + var base_url = 'SUBRESOURCE_PREFIX:&dispatch=check_partition'; + + // Not a perfect parse of the query string, but good enough for this test. + var include_credentials = base_url.search('include_credentials=true') != -1; + var exclude_credentials = base_url.search('include_credentials=false') != -1; + if (include_credentials != !exclude_credentials) + throw new Exception('Credentials mode not specified'); + + + // Run NUM_FETCHES in parallel. + var fetches = []; + for (i = 0; i < NUM_FETCHES; ++i) { + var fetch_params = { + credentials: 'omit', + mode: 'cors', + headers: { + 'Header-To-Force-CORS': 'cors' + }, + }; + + // Use a unique URL for each request, in case the caching layer serializes multiple + // requests for the same URL. + var url = `${base_url}&${token()}`; + + fetches.push(fetch(url, fetch_params).then( + function (response) { + return response.text().then(function(text) { + assert_equals(text, 'ok', `Socket unexpectedly reused`); + }); + })); + } + + // Wait for all promises to complete. + return Promise.allSettled(fetches).then(function (results) { + results.forEach(function (result) { + if (result.status != 'fulfilled') + throw result.reason; + }); + }); +} diff --git a/test/wpt/tests/fetch/connection-pool/resources/network-partition-key.py b/test/wpt/tests/fetch/connection-pool/resources/network-partition-key.py new file mode 100644 index 0000000..32fe499 --- /dev/null +++ b/test/wpt/tests/fetch/connection-pool/resources/network-partition-key.py @@ -0,0 +1,130 @@ +import mimetypes +import os + +from wptserve.utils import isomorphic_decode, isomorphic_encode + +# Test server that tracks the last partition_id was used with each connection for each uuid, and +# lets consumers query if multiple different partition_ids have been been used for any socket. +# +# Server assumes that ports aren't reused, so a client address and a server port uniquely identify +# a connection. If that constraint is ever violated, the test will be flaky. No sockets being +# closed for the duration of the test is sufficient to ensure that, though even if sockets are +# closed, the OS should generally prefer to use new ports for new connections, if any are +# available. +def main(request, response): + response.headers.set(b"Cache-Control", b"no-store") + dispatch = request.GET.first(b"dispatch", None) + uuid = request.GET.first(b"uuid", None) + partition_id = request.GET.first(b"partition_id", None) + + if not uuid or not dispatch or not partition_id: + return simple_response(request, response, 404, b"Not found", b"Invalid query parameters") + + # Unless nocheck_partition is true, check partition_id against server_state, and update server_state. + stash = request.server.stash + test_failed = False + request_count = 0; + connection_count = 0; + if request.GET.first(b"nocheck_partition", None) != b"True": + # Need to grab the lock to access the Stash, since requests are made in parallel. + with stash.lock: + # Don't use server hostname here, since H2 allows multiple hosts to reuse a connection. + # Server IP is not currently available, unfortunately. + address_key = isomorphic_encode(str(request.client_address) + u"|" + str(request.url_parts.port)) + server_state = stash.take(uuid) or {b"test_failed": False, + b"request_count": 0, b"connection_count": 0} + request_count = server_state[b"request_count"] + request_count += 1 + server_state[b"request_count"] = request_count + if address_key in server_state: + if server_state[address_key] != partition_id: + server_state[b"test_failed"] = True + else: + connection_count = server_state[b"connection_count"] + connection_count += 1 + server_state[b"connection_count"] = connection_count + server_state[address_key] = partition_id + test_failed = server_state[b"test_failed"] + stash.put(uuid, server_state) + + origin = request.headers.get(b"Origin") + if origin: + response.headers.set(b"Access-Control-Allow-Origin", origin) + response.headers.set(b"Access-Control-Allow-Credentials", b"true") + + if request.method == u"OPTIONS": + return handle_preflight(request, response) + + if dispatch == b"fetch_file": + return handle_fetch_file(request, response, partition_id, uuid) + + if dispatch == b"check_partition": + status = request.GET.first(b"status", 200) + if test_failed: + return simple_response(request, response, status, b"OK", b"Multiple partition IDs used on a socket") + body = b"ok" + if request.GET.first(b"addcounter", False): + body += (". Request was sent " + str(request_count) + " times. " + + str(connection_count) + " connections were created.").encode('utf-8') + return simple_response(request, response, status, b"OK", body) + + if dispatch == b"clean_up": + stash.take(uuid) + if test_failed: + return simple_response(request, response, 200, b"OK", b"Test failed, but cleanup completed.") + return simple_response(request, response, 200, b"OK", b"cleanup complete") + + return simple_response(request, response, 404, b"Not Found", b"Unrecognized dispatch parameter: " + dispatch) + +def handle_preflight(request, response): + response.status = (200, b"OK") + response.headers.set(b"Access-Control-Allow-Methods", b"GET") + response.headers.set(b"Access-Control-Allow-Headers", b"header-to-force-cors") + response.headers.set(b"Access-Control-Max-Age", b"86400") + return b"Preflight request" + +def simple_response(request, response, status_code, status_message, body, content_type=b"text/plain"): + response.status = (status_code, status_message) + response.headers.set(b"Content-Type", content_type) + return body + +def handle_fetch_file(request, response, partition_id, uuid): + subresource_origin = request.GET.first(b"subresource_origin", None) + rel_path = request.GET.first(b"path", None) + + # This needs to be passed on to subresources so they all have access to it. + include_credentials = request.GET.first(b"include_credentials", None) + if not subresource_origin or not rel_path or not include_credentials: + return simple_response(request, response, 404, b"Not found", b"Invalid query parameters") + + cur_path = os.path.realpath(isomorphic_decode(__file__)) + base_path = os.path.abspath(os.path.join(os.path.dirname(cur_path), os.pardir, os.pardir, os.pardir)) + path = os.path.abspath(os.path.join(base_path, isomorphic_decode(rel_path))) + + # Basic security check. + if not path.startswith(base_path): + return simple_response(request, response, 404, b"Not found", b"Invalid path") + + sandbox = request.GET.first(b"sandbox", None) + if sandbox == b"true": + response.headers.set(b"Content-Security-Policy", b"sandbox allow-scripts") + + file = open(path, mode="rb") + body = file.read() + file.close() + + subresource_path = b"/" + isomorphic_encode(os.path.relpath(isomorphic_decode(__file__), base_path)).replace(b'\\', b'/') + subresource_params = b"?partition_id=" + partition_id + b"&uuid=" + uuid + b"&subresource_origin=" + subresource_origin + b"&include_credentials=" + include_credentials + body = body.replace(b"SUBRESOURCE_PREFIX:", subresource_origin + subresource_path + subresource_params) + + other_origin = request.GET.first(b"other_origin", None) + if other_origin: + body = body.replace(b"OTHER_PREFIX:", other_origin + subresource_path + subresource_params) + + mimetypes.init() + mimetype_pair = mimetypes.guess_type(path) + mimetype = mimetype_pair[0] + + if mimetype == None or mimetype_pair[1] != None: + return simple_response(request, response, 500, b"Server Error", b"Unknown MIME type") + return simple_response(request, response, 200, b"OK", body, mimetype) diff --git a/test/wpt/tests/fetch/connection-pool/resources/network-partition-worker-checker.html b/test/wpt/tests/fetch/connection-pool/resources/network-partition-worker-checker.html new file mode 100644 index 0000000..e6b7ea7 --- /dev/null +++ b/test/wpt/tests/fetch/connection-pool/resources/network-partition-worker-checker.html @@ -0,0 +1,24 @@ + + + + + Worker Network Partition Checker + + + + + + + + + + diff --git a/test/wpt/tests/fetch/connection-pool/resources/network-partition-worker.js b/test/wpt/tests/fetch/connection-pool/resources/network-partition-worker.js new file mode 100644 index 0000000..1745edf --- /dev/null +++ b/test/wpt/tests/fetch/connection-pool/resources/network-partition-worker.js @@ -0,0 +1,15 @@ +// This tests the partition key of fetches to subresouce_origin made by the worker and +// imported scripts from subresource_origin. +importScripts('SUBRESOURCE_PREFIX:&dispatch=fetch_file&path=common/utils.js'); +importScripts('SUBRESOURCE_PREFIX:&dispatch=fetch_file&path=resources/testharness.js'); +importScripts('SUBRESOURCE_PREFIX:&dispatch=fetch_file&path=fetch/connection-pool/resources/network-partition-key.js'); + +async function fetch_and_reply() { + try { + await check_partition_ids(); + self.postMessage({result: 'success'}); + } catch (e) { + self.postMessage({result: 'error', details: e.message}); + } +} +fetch_and_reply(); diff --git a/test/wpt/tests/fetch/content-encoding/bad-gzip-body.any.js b/test/wpt/tests/fetch/content-encoding/bad-gzip-body.any.js new file mode 100644 index 0000000..17bc126 --- /dev/null +++ b/test/wpt/tests/fetch/content-encoding/bad-gzip-body.any.js @@ -0,0 +1,22 @@ +// META: global=window,worker + +promise_test((test) => { + return fetch("resources/bad-gzip-body.py").then(res => { + assert_equals(res.status, 200); + }); +}, "Fetching a resource with bad gzip content should still resolve"); + +[ + "arrayBuffer", + "blob", + "formData", + "json", + "text" +].forEach(method => { + promise_test(t => { + return fetch("resources/bad-gzip-body.py").then(res => { + assert_equals(res.status, 200); + return promise_rejects_js(t, TypeError, res[method]()); + }); + }, "Consuming the body of a resource with bad gzip content with " + method + "() should reject"); +}); diff --git a/test/wpt/tests/fetch/content-encoding/gzip-body.any.js b/test/wpt/tests/fetch/content-encoding/gzip-body.any.js new file mode 100644 index 0000000..37758b7 --- /dev/null +++ b/test/wpt/tests/fetch/content-encoding/gzip-body.any.js @@ -0,0 +1,16 @@ +// META: global=window,worker + +const expectedDecompressedSize = 10500; +[ + "text", + "octetstream" +].forEach(contentType => { + promise_test(async t => { + let response = await fetch(`resources/foo.${contentType}.gz`); + assert_true(response.ok); + let arrayBuffer = await response.arrayBuffer() + let u8 = new Uint8Array(arrayBuffer); + assert_equals(u8.length, expectedDecompressedSize); + }, `fetched gzip data with content type ${contentType} should be decompressed.`); +}); + diff --git a/test/wpt/tests/fetch/content-encoding/resources/bad-gzip-body.py b/test/wpt/tests/fetch/content-encoding/resources/bad-gzip-body.py new file mode 100644 index 0000000..a79b94e --- /dev/null +++ b/test/wpt/tests/fetch/content-encoding/resources/bad-gzip-body.py @@ -0,0 +1,3 @@ +def main(request, response): + headers = [(b"Content-Encoding", b"gzip")] + return headers, b"not actually gzip" diff --git a/test/wpt/tests/fetch/content-encoding/resources/foo.octetstream.gz b/test/wpt/tests/fetch/content-encoding/resources/foo.octetstream.gz new file mode 100644 index 0000000..f3df4cb Binary files /dev/null and b/test/wpt/tests/fetch/content-encoding/resources/foo.octetstream.gz differ diff --git a/test/wpt/tests/fetch/content-encoding/resources/foo.octetstream.gz.headers b/test/wpt/tests/fetch/content-encoding/resources/foo.octetstream.gz.headers new file mode 100644 index 0000000..27d4f40 --- /dev/null +++ b/test/wpt/tests/fetch/content-encoding/resources/foo.octetstream.gz.headers @@ -0,0 +1,2 @@ +Content-type: application/octet-stream +Content-Encoding: gzip diff --git a/test/wpt/tests/fetch/content-encoding/resources/foo.text.gz b/test/wpt/tests/fetch/content-encoding/resources/foo.text.gz new file mode 100644 index 0000000..05a5cce Binary files /dev/null and b/test/wpt/tests/fetch/content-encoding/resources/foo.text.gz differ diff --git a/test/wpt/tests/fetch/content-encoding/resources/foo.text.gz.headers b/test/wpt/tests/fetch/content-encoding/resources/foo.text.gz.headers new file mode 100644 index 0000000..7def3dd --- /dev/null +++ b/test/wpt/tests/fetch/content-encoding/resources/foo.text.gz.headers @@ -0,0 +1,2 @@ +Content-type: text/plain +Content-Encoding: gzip diff --git a/test/wpt/tests/fetch/content-length/api-and-duplicate-headers.any.js b/test/wpt/tests/fetch/content-length/api-and-duplicate-headers.any.js new file mode 100644 index 0000000..8015289 --- /dev/null +++ b/test/wpt/tests/fetch/content-length/api-and-duplicate-headers.any.js @@ -0,0 +1,23 @@ +promise_test(async t => { + const response = await fetch("resources/identical-duplicates.asis"); + assert_equals(response.statusText, "BLAH"); + assert_equals(response.headers.get("test"), "x, x"); + assert_equals(response.headers.get("content-type"), "text/plain, text/plain"); + assert_equals(response.headers.get("content-length"), "6, 6"); + const text = await response.text(); + assert_equals(text, "Test.\n"); +}, "fetch() and duplicate Content-Length/Content-Type headers"); + +async_test(t => { + const xhr = new XMLHttpRequest(); + xhr.open("GET", "resources/identical-duplicates.asis"); + xhr.send(); + xhr.onload = t.step_func_done(() => { + assert_equals(xhr.statusText, "BLAH"); + assert_equals(xhr.getResponseHeader("test"), "x, x"); + assert_equals(xhr.getResponseHeader("content-type"), "text/plain, text/plain"); + assert_equals(xhr.getResponseHeader("content-length"), "6, 6"); + assert_equals(xhr.getAllResponseHeaders(), "content-length: 6, 6\r\ncontent-type: text/plain, text/plain\r\ntest: x, x\r\n"); + assert_equals(xhr.responseText, "Test.\n"); + }); +}, "XMLHttpRequest and duplicate Content-Length/Content-Type headers"); diff --git a/test/wpt/tests/fetch/content-length/content-length.html b/test/wpt/tests/fetch/content-length/content-length.html new file mode 100644 index 0000000..cda9b5b --- /dev/null +++ b/test/wpt/tests/fetch/content-length/content-length.html @@ -0,0 +1,14 @@ + + +Content-Length Test + + + +PASS +but FAIL if this is in the body. \ No newline at end of file diff --git a/test/wpt/tests/fetch/content-length/content-length.html.headers b/test/wpt/tests/fetch/content-length/content-length.html.headers new file mode 100644 index 0000000..25389b7 --- /dev/null +++ b/test/wpt/tests/fetch/content-length/content-length.html.headers @@ -0,0 +1 @@ +Content-Length: 403 diff --git a/test/wpt/tests/fetch/content-length/parsing.window.js b/test/wpt/tests/fetch/content-length/parsing.window.js new file mode 100644 index 0000000..5028ad9 --- /dev/null +++ b/test/wpt/tests/fetch/content-length/parsing.window.js @@ -0,0 +1,18 @@ +promise_test(() => { + return fetch("resources/content-lengths.json").then(res => res.json()).then(runTests); +}, "Loading JSON…"); + +function runTests(testUnits) { + testUnits.forEach(({ input, output }) => { + promise_test(t => { + const result = fetch(`resources/content-length.py?length=${encodeURIComponent(input)}`); + if (output === null) { + return promise_rejects_js(t, TypeError, result); + } else { + return result.then(res => res.text()).then(text => { + assert_equals(text.length, output); + }); + } + }, `Input: ${format_value(input)}. Expected: ${output === null ? "network error" : output}.`); + }); +} diff --git a/test/wpt/tests/fetch/content-length/resources/content-length.py b/test/wpt/tests/fetch/content-length/resources/content-length.py new file mode 100644 index 0000000..92cfade --- /dev/null +++ b/test/wpt/tests/fetch/content-length/resources/content-length.py @@ -0,0 +1,10 @@ +def main(request, response): + response.add_required_headers = False + output = b"HTTP/1.1 200 OK\r\n" + output += b"Content-Type: text/plain;charset=UTF-8\r\n" + output += b"Connection: close\r\n" + output += request.GET.first(b"length") + b"\r\n" + output += b"\r\n" + output += b"Fact: this is really forty-two bytes long." + response.writer.write(output) + response.close_connection = True diff --git a/test/wpt/tests/fetch/content-length/resources/content-lengths.json b/test/wpt/tests/fetch/content-length/resources/content-lengths.json new file mode 100644 index 0000000..ac6f1a2 --- /dev/null +++ b/test/wpt/tests/fetch/content-length/resources/content-lengths.json @@ -0,0 +1,142 @@ +[ + { + "input": "Content-Length: 42", + "output": 42 + }, + { + "input": "Content-Length: 42,42", + "output": 42 + }, + { + "input": "Content-Length: 42\r\nContent-Length: 42", + "output": 42 + }, + { + "input": "Content-Length: 42\r\nContent-Length: 42,42", + "output": 42 + }, + { + "input": "Content-Length: 30", + "output": 30 + }, + { + "input": "Content-Length: 30,30", + "output": 30 + }, + { + "input": "Content-Length: 30\r\nContent-Length: 30", + "output": 30 + }, + { + "input": "Content-Length: 30\r\nContent-Length: 30,30", + "output": 30 + }, + { + "input": "Content-Length: 30,30\r\nContent-Length: 30,30", + "output": 30 + }, + { + "input": "Content-Length: 30,30, 30 \r\nContent-Length: 30 ", + "output": 30 + }, + { + "input": "Content-Length: 30,42\r\nContent-Length: 30", + "output": null + }, + { + "input": "Content-Length: 30,42\r\nContent-Length: 30,42", + "output": null + }, + { + "input": "Content-Length: 42,30", + "output": null + }, + { + "input": "Content-Length: 30,42", + "output": null + }, + { + "input": "Content-Length: 42\r\nContent-Length: 30", + "output": null + }, + { + "input": "Content-Length: 30\r\nContent-Length: 42", + "output": null + }, + { + "input": "Content-Length: 30,", + "output": null + }, + { + "input": "Content-Length: ,30", + "output": null + }, + { + "input": "Content-Length: 30\r\nContent-Length: \t", + "output": null + }, + { + "input": "Content-Length: \r\nContent-Length: 30", + "output": null + }, + { + "input": "Content-Length: aaaah\r\nContent-Length: nah", + "output": null + }, + { + "input": "Content-Length: aaaah, nah", + "output": null + }, + { + "input": "Content-Length: aaaah\r\nContent-Length: aaaah", + "output": 42 + }, + { + "input": "Content-Length: aaaah, aaaah", + "output": 42 + }, + { + "input": "Content-Length: aaaah", + "output": 42 + }, + { + "input": "Content-Length: 42s", + "output": 42 + }, + { + "input": "Content-Length: 30s", + "output": 42 + }, + { + "input": "Content-Length: -1", + "output": 42 + }, + { + "input": "Content-Length: 0x20", + "output": 42 + }, + { + "input": "Content-Length: 030", + "output": 30 + }, + { + "input": "Content-Length: 030\r\nContent-Length: 30", + "output": null + }, + { + "input": "Content-Length: 030, 30", + "output": null + }, + { + "input": "Content-Length: \"30\"", + "output": 42 + }, + { + "input": "Content-Length:30\r\nContent-Length:,\r\nContent-Length:30", + "output": null + }, + { + "input": "Content-Length: ", + "output": 42 + } +] diff --git a/test/wpt/tests/fetch/content-length/resources/identical-duplicates.asis b/test/wpt/tests/fetch/content-length/resources/identical-duplicates.asis new file mode 100644 index 0000000..f38c9a4 --- /dev/null +++ b/test/wpt/tests/fetch/content-length/resources/identical-duplicates.asis @@ -0,0 +1,9 @@ +HTTP/1.1 200 BLAH +Test: x +Test: x +Content-Type: text/plain +Content-Type: text/plain +Content-Length: 6 +Content-Length: 6 + +Test. diff --git a/test/wpt/tests/fetch/content-length/too-long.window.js b/test/wpt/tests/fetch/content-length/too-long.window.js new file mode 100644 index 0000000..f8cefaa --- /dev/null +++ b/test/wpt/tests/fetch/content-length/too-long.window.js @@ -0,0 +1,4 @@ +promise_test(async t => { + const result = await fetch(`resources/content-length.py?length=${encodeURIComponent("Content-Length: 50")}`); + await promise_rejects_js(t, TypeError, result.text()); +}, "Content-Length header value of network response exceeds response body"); diff --git a/test/wpt/tests/fetch/content-type/README.md b/test/wpt/tests/fetch/content-type/README.md new file mode 100644 index 0000000..f553b7e --- /dev/null +++ b/test/wpt/tests/fetch/content-type/README.md @@ -0,0 +1,20 @@ +# `resources/content-types.json` + +An array of tests. Each test has these fields: + +* `contentType`: an array of values for the `Content-Type` header. A harness needs to run the test twice if there are multiple values. One time with the values concatenated with `,` followed by a space and one time with multiple `Content-Type` declarations, each on their own line with one of the values, in order. +* `encoding`: the expected encoding, null for the default. +* `mimeType`: the result of extracting a MIME type and serializing it. +* `documentContentType`: the MIME type expected to be exposed in DOM documents. + +(These tests are currently somewhat geared towards browser use, but could be generalized easily enough if someone wanted to contribute tests for MIME types that would cause downloads in the browser or some such.) + +# `resources/script-content-types.json` + +An array of tests, surprise. Each test has these fields: + +* `contentType`: see above. +* `executes`: whether the script is expected to execute. +* `encoding`: how the script is expected to be decoded. + +These tests are expected to be loaded through ` + +
+ diff --git a/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html-nosniff.tentative.sub-ref.html b/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html-nosniff.tentative.sub-ref.html new file mode 100644 index 0000000..a771ed6 --- /dev/null +++ b/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html-nosniff.tentative.sub-ref.html @@ -0,0 +1,4 @@ + + + + diff --git a/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html-nosniff.tentative.sub.html b/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html-nosniff.tentative.sub.html new file mode 100644 index 0000000..82adc47 --- /dev/null +++ b/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html-nosniff.tentative.sub.html @@ -0,0 +1,11 @@ + + + + + + + diff --git a/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html.sub-ref.html b/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html.sub-ref.html new file mode 100644 index 0000000..ebb337d --- /dev/null +++ b/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html.sub-ref.html @@ -0,0 +1,4 @@ + + + + diff --git a/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html.sub.html b/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html.sub.html new file mode 100644 index 0000000..1ae4cfc --- /dev/null +++ b/test/wpt/tests/fetch/corb/img-png-mislabeled-as-html.sub.html @@ -0,0 +1,10 @@ + + + + + + + diff --git a/test/wpt/tests/fetch/corb/img-svg-doctype-html-mimetype-empty.sub.html b/test/wpt/tests/fetch/corb/img-svg-doctype-html-mimetype-empty.sub.html new file mode 100644 index 0000000..3219fed --- /dev/null +++ b/test/wpt/tests/fetch/corb/img-svg-doctype-html-mimetype-empty.sub.html @@ -0,0 +1,7 @@ + + + + + diff --git a/test/wpt/tests/fetch/corb/img-svg-doctype-html-mimetype-svg.sub.html b/test/wpt/tests/fetch/corb/img-svg-doctype-html-mimetype-svg.sub.html new file mode 100644 index 0000000..efcfaa2 --- /dev/null +++ b/test/wpt/tests/fetch/corb/img-svg-doctype-html-mimetype-svg.sub.html @@ -0,0 +1,11 @@ + + + + + diff --git a/test/wpt/tests/fetch/corb/img-svg-invalid.sub-ref.html b/test/wpt/tests/fetch/corb/img-svg-invalid.sub-ref.html new file mode 100644 index 0000000..484cd0a --- /dev/null +++ b/test/wpt/tests/fetch/corb/img-svg-invalid.sub-ref.html @@ -0,0 +1,5 @@ + + + + diff --git a/test/wpt/tests/fetch/corb/img-svg-labeled-as-dash.sub.html b/test/wpt/tests/fetch/corb/img-svg-labeled-as-dash.sub.html new file mode 100644 index 0000000..0578b83 --- /dev/null +++ b/test/wpt/tests/fetch/corb/img-svg-labeled-as-dash.sub.html @@ -0,0 +1,6 @@ + + + + + diff --git a/test/wpt/tests/fetch/corb/img-svg-labeled-as-svg-xml.sub.html b/test/wpt/tests/fetch/corb/img-svg-labeled-as-svg-xml.sub.html new file mode 100644 index 0000000..30a2eb3 --- /dev/null +++ b/test/wpt/tests/fetch/corb/img-svg-labeled-as-svg-xml.sub.html @@ -0,0 +1,6 @@ + + + + + diff --git a/test/wpt/tests/fetch/corb/img-svg-xml-decl.sub.html b/test/wpt/tests/fetch/corb/img-svg-xml-decl.sub.html new file mode 100644 index 0000000..0d3aeaf --- /dev/null +++ b/test/wpt/tests/fetch/corb/img-svg-xml-decl.sub.html @@ -0,0 +1,6 @@ + + + + + diff --git a/test/wpt/tests/fetch/corb/img-svg.sub-ref.html b/test/wpt/tests/fetch/corb/img-svg.sub-ref.html new file mode 100644 index 0000000..5462f68 --- /dev/null +++ b/test/wpt/tests/fetch/corb/img-svg.sub-ref.html @@ -0,0 +1,5 @@ + + + + diff --git a/test/wpt/tests/fetch/corb/preload-image-png-mislabeled-as-html-nosniff.tentative.sub.html b/test/wpt/tests/fetch/corb/preload-image-png-mislabeled-as-html-nosniff.tentative.sub.html new file mode 100644 index 0000000..cea80f2 --- /dev/null +++ b/test/wpt/tests/fetch/corb/preload-image-png-mislabeled-as-html-nosniff.tentative.sub.html @@ -0,0 +1,24 @@ + + + + + +
+ + + + + diff --git a/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html-nosniff.css b/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html-nosniff.css new file mode 100644 index 0000000..afd2b92 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html-nosniff.css @@ -0,0 +1 @@ +#header { color: red; } diff --git a/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html-nosniff.css.headers b/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html-nosniff.css.headers new file mode 100644 index 0000000..0f228f9 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html-nosniff.css.headers @@ -0,0 +1,2 @@ +Content-Type: text/html +X-Content-Type-Options: nosniff diff --git a/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html.css b/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html.css new file mode 100644 index 0000000..afd2b92 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html.css @@ -0,0 +1 @@ +#header { color: red; } diff --git a/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html.css.headers b/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html.css.headers new file mode 100644 index 0000000..156209f --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/css-mislabeled-as-html.css.headers @@ -0,0 +1 @@ +Content-Type: text/html diff --git a/test/wpt/tests/fetch/corb/resources/css-with-json-parser-breaker.css b/test/wpt/tests/fetch/corb/resources/css-with-json-parser-breaker.css new file mode 100644 index 0000000..7db6f5c --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/css-with-json-parser-breaker.css @@ -0,0 +1,3 @@ +)]}' +{} +#header { color: red; } diff --git a/test/wpt/tests/fetch/corb/resources/empty-labeled-as-png.png b/test/wpt/tests/fetch/corb/resources/empty-labeled-as-png.png new file mode 100644 index 0000000..e69de29 diff --git a/test/wpt/tests/fetch/corb/resources/empty-labeled-as-png.png.headers b/test/wpt/tests/fetch/corb/resources/empty-labeled-as-png.png.headers new file mode 100644 index 0000000..e7be84a --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/empty-labeled-as-png.png.headers @@ -0,0 +1 @@ +Content-Type: image/png diff --git a/test/wpt/tests/fetch/corb/resources/html-correctly-labeled.html b/test/wpt/tests/fetch/corb/resources/html-correctly-labeled.html new file mode 100644 index 0000000..7bad71b --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/html-correctly-labeled.html @@ -0,0 +1,10 @@ + + + + + Page Title + + +

Page body

+ + diff --git a/test/wpt/tests/fetch/corb/resources/html-correctly-labeled.html.headers b/test/wpt/tests/fetch/corb/resources/html-correctly-labeled.html.headers new file mode 100644 index 0000000..156209f --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/html-correctly-labeled.html.headers @@ -0,0 +1 @@ +Content-Type: text/html diff --git a/test/wpt/tests/fetch/corb/resources/html-js-polyglot.js b/test/wpt/tests/fetch/corb/resources/html-js-polyglot.js new file mode 100644 index 0000000..db45bb4 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/html-js-polyglot.js @@ -0,0 +1,9 @@ + diff --git a/test/wpt/tests/fetch/corb/resources/html-js-polyglot.js.headers b/test/wpt/tests/fetch/corb/resources/html-js-polyglot.js.headers new file mode 100644 index 0000000..156209f --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/html-js-polyglot.js.headers @@ -0,0 +1 @@ +Content-Type: text/html diff --git a/test/wpt/tests/fetch/corb/resources/html-js-polyglot2.js b/test/wpt/tests/fetch/corb/resources/html-js-polyglot2.js new file mode 100644 index 0000000..faae1b7 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/html-js-polyglot2.js @@ -0,0 +1,10 @@ + diff --git a/test/wpt/tests/fetch/corb/resources/html-js-polyglot2.js.headers b/test/wpt/tests/fetch/corb/resources/html-js-polyglot2.js.headers new file mode 100644 index 0000000..156209f --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/html-js-polyglot2.js.headers @@ -0,0 +1 @@ +Content-Type: text/html diff --git a/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html-nosniff.js b/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html-nosniff.js new file mode 100644 index 0000000..a880a5b --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html-nosniff.js @@ -0,0 +1 @@ +window.has_executed_script = true; diff --git a/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html-nosniff.js.headers b/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html-nosniff.js.headers new file mode 100644 index 0000000..0f228f9 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html-nosniff.js.headers @@ -0,0 +1,2 @@ +Content-Type: text/html +X-Content-Type-Options: nosniff diff --git a/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html.js b/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html.js new file mode 100644 index 0000000..a880a5b --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html.js @@ -0,0 +1 @@ +window.has_executed_script = true; diff --git a/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html.js.headers b/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html.js.headers new file mode 100644 index 0000000..156209f --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/js-mislabeled-as-html.js.headers @@ -0,0 +1 @@ +Content-Type: text/html diff --git a/test/wpt/tests/fetch/corb/resources/png-correctly-labeled.png b/test/wpt/tests/fetch/corb/resources/png-correctly-labeled.png new file mode 100644 index 0000000..820f8ca Binary files /dev/null and b/test/wpt/tests/fetch/corb/resources/png-correctly-labeled.png differ diff --git a/test/wpt/tests/fetch/corb/resources/png-correctly-labeled.png.headers b/test/wpt/tests/fetch/corb/resources/png-correctly-labeled.png.headers new file mode 100644 index 0000000..e7be84a --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/png-correctly-labeled.png.headers @@ -0,0 +1 @@ +Content-Type: image/png diff --git a/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html-nosniff.png b/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html-nosniff.png new file mode 100644 index 0000000..820f8ca Binary files /dev/null and b/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html-nosniff.png differ diff --git a/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html-nosniff.png.headers b/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html-nosniff.png.headers new file mode 100644 index 0000000..0f228f9 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html-nosniff.png.headers @@ -0,0 +1,2 @@ +Content-Type: text/html +X-Content-Type-Options: nosniff diff --git a/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html.png b/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html.png new file mode 100644 index 0000000..820f8ca Binary files /dev/null and b/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html.png differ diff --git a/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html.png.headers b/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html.png.headers new file mode 100644 index 0000000..156209f --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/png-mislabeled-as-html.png.headers @@ -0,0 +1 @@ +Content-Type: text/html diff --git a/test/wpt/tests/fetch/corb/resources/response_block_probe.js b/test/wpt/tests/fetch/corb/resources/response_block_probe.js new file mode 100644 index 0000000..9c3b87b --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/response_block_probe.js @@ -0,0 +1 @@ +alert(1); // Arbitrary JavaScript. Details don't matter for the test. diff --git a/test/wpt/tests/fetch/corb/resources/response_block_probe.js.headers b/test/wpt/tests/fetch/corb/resources/response_block_probe.js.headers new file mode 100644 index 0000000..0d848b0 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/response_block_probe.js.headers @@ -0,0 +1 @@ +Content-Type: text/csv diff --git a/test/wpt/tests/fetch/corb/resources/sniffable-resource.py b/test/wpt/tests/fetch/corb/resources/sniffable-resource.py new file mode 100644 index 0000000..f815093 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/sniffable-resource.py @@ -0,0 +1,11 @@ +def main(request, response): + body = request.GET.first(b"body", None) + type = request.GET.first(b"type", None) + + response.add_required_headers = False + response.writer.write_status(200) + response.writer.write_header(b"content-length", len(body)) + response.writer.write_header(b"content-type", type) + response.writer.end_headers() + + response.writer.write(body) diff --git a/test/wpt/tests/fetch/corb/resources/subframe-that-posts-html-containing-blob-url-to-parent.html b/test/wpt/tests/fetch/corb/resources/subframe-that-posts-html-containing-blob-url-to-parent.html new file mode 100644 index 0000000..67b3ad5 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/subframe-that-posts-html-containing-blob-url-to-parent.html @@ -0,0 +1,16 @@ + + + diff --git a/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-empty.svg b/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-empty.svg new file mode 100644 index 0000000..fa2d29b --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-empty.svg @@ -0,0 +1,4 @@ + + + + diff --git a/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-empty.svg.headers b/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-empty.svg.headers new file mode 100644 index 0000000..29515ee --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-empty.svg.headers @@ -0,0 +1 @@ +Content-Type: diff --git a/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-svg.svg b/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-svg.svg new file mode 100644 index 0000000..fa2d29b --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-svg.svg @@ -0,0 +1,4 @@ + + + + diff --git a/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-svg.svg.headers b/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-svg.svg.headers new file mode 100644 index 0000000..070de35 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/svg-doctype-html-mimetype-svg.svg.headers @@ -0,0 +1 @@ +Content-Type: image/svg+xml diff --git a/test/wpt/tests/fetch/corb/resources/svg-labeled-as-dash.svg b/test/wpt/tests/fetch/corb/resources/svg-labeled-as-dash.svg new file mode 100644 index 0000000..2b7d101 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/svg-labeled-as-dash.svg @@ -0,0 +1,3 @@ + + + diff --git a/test/wpt/tests/fetch/corb/resources/svg-labeled-as-dash.svg.headers b/test/wpt/tests/fetch/corb/resources/svg-labeled-as-dash.svg.headers new file mode 100644 index 0000000..43ce612 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/svg-labeled-as-dash.svg.headers @@ -0,0 +1 @@ +Content-Type: application/dash+xml diff --git a/test/wpt/tests/fetch/corb/resources/svg-labeled-as-svg-xml.svg b/test/wpt/tests/fetch/corb/resources/svg-labeled-as-svg-xml.svg new file mode 100644 index 0000000..2b7d101 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/svg-labeled-as-svg-xml.svg @@ -0,0 +1,3 @@ + + + diff --git a/test/wpt/tests/fetch/corb/resources/svg-labeled-as-svg-xml.svg.headers b/test/wpt/tests/fetch/corb/resources/svg-labeled-as-svg-xml.svg.headers new file mode 100644 index 0000000..070de35 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/svg-labeled-as-svg-xml.svg.headers @@ -0,0 +1 @@ +Content-Type: image/svg+xml diff --git a/test/wpt/tests/fetch/corb/resources/svg-xml-decl.svg b/test/wpt/tests/fetch/corb/resources/svg-xml-decl.svg new file mode 100644 index 0000000..3b39aff --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/svg-xml-decl.svg @@ -0,0 +1,4 @@ + + + + diff --git a/test/wpt/tests/fetch/corb/resources/svg.svg b/test/wpt/tests/fetch/corb/resources/svg.svg new file mode 100644 index 0000000..2b7d101 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/svg.svg @@ -0,0 +1,3 @@ + + + diff --git a/test/wpt/tests/fetch/corb/resources/svg.svg.headers b/test/wpt/tests/fetch/corb/resources/svg.svg.headers new file mode 100644 index 0000000..070de35 --- /dev/null +++ b/test/wpt/tests/fetch/corb/resources/svg.svg.headers @@ -0,0 +1 @@ +Content-Type: image/svg+xml diff --git a/test/wpt/tests/fetch/corb/response_block.tentative.https.html b/test/wpt/tests/fetch/corb/response_block.tentative.https.html new file mode 100644 index 0000000..6b11600 --- /dev/null +++ b/test/wpt/tests/fetch/corb/response_block.tentative.https.html @@ -0,0 +1,50 @@ + + + + + + diff --git a/test/wpt/tests/fetch/corb/script-html-correctly-labeled.tentative.sub.html b/test/wpt/tests/fetch/corb/script-html-correctly-labeled.tentative.sub.html new file mode 100644 index 0000000..6d1947c --- /dev/null +++ b/test/wpt/tests/fetch/corb/script-html-correctly-labeled.tentative.sub.html @@ -0,0 +1,32 @@ + + + + + +
+ diff --git a/test/wpt/tests/fetch/corb/script-html-js-polyglot.sub.html b/test/wpt/tests/fetch/corb/script-html-js-polyglot.sub.html new file mode 100644 index 0000000..9a272d6 --- /dev/null +++ b/test/wpt/tests/fetch/corb/script-html-js-polyglot.sub.html @@ -0,0 +1,32 @@ + + + + + +
+ diff --git a/test/wpt/tests/fetch/corb/script-html-via-cross-origin-blob-url.sub.html b/test/wpt/tests/fetch/corb/script-html-via-cross-origin-blob-url.sub.html new file mode 100644 index 0000000..c8a90c7 --- /dev/null +++ b/test/wpt/tests/fetch/corb/script-html-via-cross-origin-blob-url.sub.html @@ -0,0 +1,38 @@ + + + + + +
+ diff --git a/test/wpt/tests/fetch/corb/script-js-mislabeled-as-html-nosniff.sub.html b/test/wpt/tests/fetch/corb/script-js-mislabeled-as-html-nosniff.sub.html new file mode 100644 index 0000000..b6bc909 --- /dev/null +++ b/test/wpt/tests/fetch/corb/script-js-mislabeled-as-html-nosniff.sub.html @@ -0,0 +1,33 @@ + + + + + +
+ + + + + + + diff --git a/test/wpt/tests/fetch/corb/script-js-mislabeled-as-html.sub.html b/test/wpt/tests/fetch/corb/script-js-mislabeled-as-html.sub.html new file mode 100644 index 0000000..44cb1f8 --- /dev/null +++ b/test/wpt/tests/fetch/corb/script-js-mislabeled-as-html.sub.html @@ -0,0 +1,25 @@ + + + + + +
+ + + + + + + diff --git a/test/wpt/tests/fetch/corb/script-resource-with-json-parser-breaker.tentative.sub.html b/test/wpt/tests/fetch/corb/script-resource-with-json-parser-breaker.tentative.sub.html new file mode 100644 index 0000000..f0eb1f0 --- /dev/null +++ b/test/wpt/tests/fetch/corb/script-resource-with-json-parser-breaker.tentative.sub.html @@ -0,0 +1,85 @@ + + + + + +
+ diff --git a/test/wpt/tests/fetch/corb/script-resource-with-nonsniffable-types.tentative.sub.html b/test/wpt/tests/fetch/corb/script-resource-with-nonsniffable-types.tentative.sub.html new file mode 100644 index 0000000..6d490d5 --- /dev/null +++ b/test/wpt/tests/fetch/corb/script-resource-with-nonsniffable-types.tentative.sub.html @@ -0,0 +1,84 @@ + + + + + + +
+ diff --git a/test/wpt/tests/fetch/corb/style-css-mislabeled-as-html-nosniff.sub.html b/test/wpt/tests/fetch/corb/style-css-mislabeled-as-html-nosniff.sub.html new file mode 100644 index 0000000..8fef0dc --- /dev/null +++ b/test/wpt/tests/fetch/corb/style-css-mislabeled-as-html-nosniff.sub.html @@ -0,0 +1,42 @@ + + + +CSS is not applied (because of nosniff + non-text/css headers) + + + + + + + + + + + +

Header example

+

Paragraph body

+ + + diff --git a/test/wpt/tests/fetch/corb/style-css-mislabeled-as-html.sub.html b/test/wpt/tests/fetch/corb/style-css-mislabeled-as-html.sub.html new file mode 100644 index 0000000..4f0b4c2 --- /dev/null +++ b/test/wpt/tests/fetch/corb/style-css-mislabeled-as-html.sub.html @@ -0,0 +1,36 @@ + + + +CSS is not applied (because of strict content-type enforcement for cross-origin stylesheets) + + + + + + + + + + + +

Header example

+

Paragraph body

+ + + diff --git a/test/wpt/tests/fetch/corb/style-css-with-json-parser-breaker.sub.html b/test/wpt/tests/fetch/corb/style-css-with-json-parser-breaker.sub.html new file mode 100644 index 0000000..29ed586 --- /dev/null +++ b/test/wpt/tests/fetch/corb/style-css-with-json-parser-breaker.sub.html @@ -0,0 +1,38 @@ + + + +CORB doesn't block a stylesheet that has a proper Content-Type and begins with a JSON parser breaker + + + + + + + + + + + +

Header example

+

Paragraph body

+ + + diff --git a/test/wpt/tests/fetch/corb/style-html-correctly-labeled.sub.html b/test/wpt/tests/fetch/corb/style-html-correctly-labeled.sub.html new file mode 100644 index 0000000..cdefcd2 --- /dev/null +++ b/test/wpt/tests/fetch/corb/style-html-correctly-labeled.sub.html @@ -0,0 +1,41 @@ + + + +CSS is not applied (because of mismatched Content-Type header) + + + + + + + + + + + +

Header example

+

Paragraph body

+ + + diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/fetch-in-iframe.html b/test/wpt/tests/fetch/cross-origin-resource-policy/fetch-in-iframe.html new file mode 100644 index 0000000..cc6a3a8 --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/fetch-in-iframe.html @@ -0,0 +1,67 @@ + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/fetch.any.js b/test/wpt/tests/fetch/cross-origin-resource-policy/fetch.any.js new file mode 100644 index 0000000..64a7bfe --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/fetch.any.js @@ -0,0 +1,76 @@ +// META: timeout=long +// META: global=window,dedicatedworker,sharedworker +// META: script=/common/get-host-info.sub.js + +const host = get_host_info(); +const path = "/fetch/cross-origin-resource-policy/"; +const localBaseURL = host.HTTP_ORIGIN + path; +const sameSiteBaseURL = "http://" + host.ORIGINAL_HOST + ":" + host.HTTP_PORT2 + path; +const notSameSiteBaseURL = host.HTTP_NOTSAMESITE_ORIGIN + path; +const httpsBaseURL = host.HTTPS_ORIGIN + path; + +promise_test(async () => { + const response = await fetch("./resources/hello.py?corp=same-origin"); + assert_equals(await response.text(), "hello"); +}, "Same-origin fetch with a 'Cross-Origin-Resource-Policy: same-origin' response header."); + +promise_test(async () => { + const response = await fetch("./resources/hello.py?corp=same-site"); + assert_equals(await response.text(), "hello"); +}, "Same-origin fetch with a 'Cross-Origin-Resource-Policy: same-site' response header."); + +promise_test(async (test) => { + const response = await fetch(notSameSiteBaseURL + "resources/hello.py?corp=same-origin"); + assert_equals(await response.text(), "hello"); +}, "Cross-origin cors fetch with a 'Cross-Origin-Resource-Policy: same-origin' response header."); + +promise_test(async (test) => { + const response = await fetch(notSameSiteBaseURL + "resources/hello.py?corp=same-site"); + assert_equals(await response.text(), "hello"); +}, "Cross-origin cors fetch with a 'Cross-Origin-Resource-Policy: same-site' response header."); + +promise_test((test) => { + const remoteURL = notSameSiteBaseURL + "resources/hello.py?corp=same-origin"; + return promise_rejects_js(test, TypeError, fetch(remoteURL, { mode : "no-cors" })); +}, "Cross-origin no-cors fetch with a 'Cross-Origin-Resource-Policy: same-origin' response header."); + +promise_test((test) => { + const remoteURL = notSameSiteBaseURL + "resources/hello.py?corp=same-site"; + return promise_rejects_js(test, TypeError, fetch(remoteURL, { mode: "no-cors" })); +}, "Cross-origin no-cors fetch with a 'Cross-Origin-Resource-Policy: same-site' response header."); + +promise_test((test) => { + const remoteURL = httpsBaseURL + "resources/hello.py?corp=same-site"; + return promise_rejects_js(test, TypeError, fetch(remoteURL, { mode: "no-cors" })); +}, "Cross-scheme (HTTP to HTTPS) no-cors fetch to a same-site URL with a 'Cross-Origin-Resource-Policy: same-site' response header."); + +promise_test((test) => { + const remoteURL = httpsBaseURL + "resources/hello.py?corp=same-origin"; + return promise_rejects_js(test, TypeError, fetch(remoteURL, { mode : "no-cors" })); +}, "Cross-origin no-cors fetch to a same-site URL with a 'Cross-Origin-Resource-Policy: same-origin' response header."); + +promise_test(async (test) => { + const remoteSameSiteURL = sameSiteBaseURL + "resources/hello.py?corp=same-site"; + + await fetch(remoteSameSiteURL, { mode: "no-cors" }); + + return promise_rejects_js(test, TypeError, fetch(sameSiteBaseURL + "resources/hello.py?corp=same-origin", { mode: "no-cors" })); +}, "Valid cross-origin no-cors fetch with a 'Cross-Origin-Resource-Policy: same-site' response header."); + +promise_test((test) => { + const finalURL = notSameSiteBaseURL + "resources/hello.py?corp=same-origin"; + return promise_rejects_js(test, TypeError, fetch("resources/redirect.py?redirectTo=" + encodeURIComponent(finalURL), { mode: "no-cors" })); +}, "Cross-origin no-cors fetch with a 'Cross-Origin-Resource-Policy: same-origin' response header after a redirection."); + +promise_test((test) => { + const finalURL = localBaseURL + "resources/hello.py?corp=same-origin"; + return fetch(notSameSiteBaseURL + "resources/redirect.py?redirectTo=" + encodeURIComponent(finalURL), { mode: "no-cors" }); +}, "Cross-origin no-cors fetch with a 'Cross-Origin-Resource-Policy: same-origin' response header after a cross-origin redirection."); + +promise_test(async (test) => { + const finalURL = localBaseURL + "resources/hello.py?corp=same-origin"; + + await fetch(finalURL, { mode: "no-cors" }); + + return promise_rejects_js(test, TypeError, fetch(notSameSiteBaseURL + "resources/redirect.py?corp=same-origin&redirectTo=" + encodeURIComponent(finalURL), { mode: "no-cors" })); +}, "Cross-origin no-cors fetch with a 'Cross-Origin-Resource-Policy: same-origin' redirect response header."); diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/fetch.https.any.js b/test/wpt/tests/fetch/cross-origin-resource-policy/fetch.https.any.js new file mode 100644 index 0000000..c9b5b75 --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/fetch.https.any.js @@ -0,0 +1,56 @@ +// META: timeout=long +// META: global=window,worker +// META: script=/common/get-host-info.sub.js + +const host = get_host_info(); +const path = "/fetch/cross-origin-resource-policy/"; +const localBaseURL = host.HTTPS_ORIGIN + path; +const notSameSiteBaseURL = host.HTTPS_NOTSAMESITE_ORIGIN + path; + +promise_test(async () => { + const response = await fetch("./resources/hello.py?corp=same-origin"); + assert_equals(await response.text(), "hello"); +}, "Same-origin fetch with a 'Cross-Origin-Resource-Policy: same-origin' response header."); + +promise_test(async () => { + const response = await fetch("./resources/hello.py?corp=same-site"); + assert_equals(await response.text(), "hello"); +}, "Same-origin fetch with a 'Cross-Origin-Resource-Policy: same-site' response header."); + +promise_test(async (test) => { + const response = await fetch(notSameSiteBaseURL + "resources/hello.py?corp=same-origin"); + assert_equals(await response.text(), "hello"); +}, "Cross-origin cors fetch with a 'Cross-Origin-Resource-Policy: same-origin' response header."); + +promise_test(async (test) => { + const response = await fetch(notSameSiteBaseURL + "resources/hello.py?corp=same-site"); + assert_equals(await response.text(), "hello"); +}, "Cross-origin cors fetch with a 'Cross-Origin-Resource-Policy: same-site' response header."); + +promise_test((test) => { + const remoteURL = notSameSiteBaseURL + "resources/hello.py?corp=same-origin"; + return promise_rejects_js(test, TypeError, fetch(remoteURL, { mode : "no-cors" })); +}, "Cross-origin no-cors fetch with a 'Cross-Origin-Resource-Policy: same-origin' response header."); + +promise_test((test) => { + const remoteURL = notSameSiteBaseURL + "resources/hello.py?corp=same-site"; + return promise_rejects_js(test, TypeError, fetch(remoteURL, { mode: "no-cors" })); +}, "Cross-origin no-cors fetch with a 'Cross-Origin-Resource-Policy: same-site' response header."); + +promise_test((test) => { + const finalURL = notSameSiteBaseURL + "resources/hello.py?corp=same-origin"; + return promise_rejects_js(test, TypeError, fetch("resources/redirect.py?redirectTo=" + encodeURIComponent(finalURL), { mode: "no-cors" })); +}, "Cross-origin no-cors fetch with a 'Cross-Origin-Resource-Policy: same-origin' response header after a redirection."); + +promise_test((test) => { + const finalURL = localBaseURL + "resources/hello.py?corp=same-origin"; + return fetch(notSameSiteBaseURL + "resources/redirect.py?redirectTo=" + encodeURIComponent(finalURL), { mode: "no-cors" }); +}, "Cross-origin no-cors fetch with a 'Cross-Origin-Resource-Policy: same-origin' response header after a cross-origin redirection."); + +promise_test(async (test) => { + const finalURL = localBaseURL + "resources/hello.py?corp=same-origin"; + + await fetch(finalURL, { mode: "no-cors" }); + + return promise_rejects_js(test, TypeError, fetch(notSameSiteBaseURL + "resources/redirect.py?corp=same-origin&redirectTo=" + encodeURIComponent(finalURL), { mode: "no-cors" })); +}, "Cross-origin no-cors fetch with a 'Cross-Origin-Resource-Policy: same-origin' redirect response header."); diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/iframe-loads.html b/test/wpt/tests/fetch/cross-origin-resource-policy/iframe-loads.html new file mode 100644 index 0000000..63902c3 --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/iframe-loads.html @@ -0,0 +1,46 @@ + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/image-loads.html b/test/wpt/tests/fetch/cross-origin-resource-policy/image-loads.html new file mode 100644 index 0000000..060b755 --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/image-loads.html @@ -0,0 +1,54 @@ + + + + + + + + +
+ + + diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/resources/green.png b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/green.png new file mode 100644 index 0000000..28a1faa Binary files /dev/null and b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/green.png differ diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/resources/hello.py b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/hello.py new file mode 100644 index 0000000..2b1cb84 --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/hello.py @@ -0,0 +1,6 @@ +def main(request, response): + headers = [(b"Cross-Origin-Resource-Policy", request.GET[b'corp'])] + if b'origin' in request.headers: + headers.append((b'Access-Control-Allow-Origin', request.headers[b'origin'])) + + return 200, headers, b"hello" diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/resources/iframe.py b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/iframe.py new file mode 100644 index 0000000..815ecf5 --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/iframe.py @@ -0,0 +1,5 @@ +def main(request, response): + headers = [(b"Content-Type", b"text/html"), + (b"Cross-Origin-Resource-Policy", request.GET[b'corp'])] + return 200, headers, b"

The iframe

" + diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/resources/iframeFetch.html b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/iframeFetch.html new file mode 100644 index 0000000..2571858 --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/iframeFetch.html @@ -0,0 +1,19 @@ + + + + + + +

The iframe making a same origin fetch call.

+ + diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/resources/image.py b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/image.py new file mode 100644 index 0000000..2a779cf --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/image.py @@ -0,0 +1,22 @@ +import os.path + +from wptserve.utils import isomorphic_decode + +def main(request, response): + type = request.GET.first(b"type", None) + + body = open(os.path.join(os.path.dirname(isomorphic_decode(__file__)), u"green.png"), u"rb").read() + + response.add_required_headers = False + response.writer.write_status(200) + + if b'corp' in request.GET: + response.writer.write_header(b"cross-origin-resource-policy", request.GET[b'corp']) + if b'acao' in request.GET: + response.writer.write_header(b"access-control-allow-origin", request.GET[b'acao']) + response.writer.write_header(b"content-length", len(body)) + if(type != None): + response.writer.write_header(b"content-type", type) + response.writer.end_headers() + + response.writer.write(body) diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/resources/redirect.py b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/redirect.py new file mode 100644 index 0000000..0dad4dd --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/redirect.py @@ -0,0 +1,6 @@ +def main(request, response): + headers = [(b"Location", request.GET[b'redirectTo'])] + if b'corp' in request.GET: + headers.append((b'Cross-Origin-Resource-Policy', request.GET[b'corp'])) + + return 302, headers, b"" diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/resources/script.py b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/script.py new file mode 100644 index 0000000..58f8d34 --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/resources/script.py @@ -0,0 +1,6 @@ +def main(request, response): + headers = [(b"Cross-Origin-Resource-Policy", request.GET[b'corp'])] + if b'origin' in request.headers: + headers.append((b'Access-Control-Allow-Origin', request.headers[b'origin'])) + + return 200, headers, b"" diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/scheme-restriction.any.js b/test/wpt/tests/fetch/cross-origin-resource-policy/scheme-restriction.any.js new file mode 100644 index 0000000..8f63381 --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/scheme-restriction.any.js @@ -0,0 +1,7 @@ +// META: script=/common/get-host-info.sub.js + +promise_test(t => { + return promise_rejects_js(t, + TypeError, + fetch(get_host_info().HTTPS_REMOTE_ORIGIN + "/fetch/cross-origin-resource-policy/resources/hello.py?corp=same-site", { mode: "no-cors" })); +}, "Cross-Origin-Resource-Policy: same-site blocks retrieving HTTPS from HTTP"); diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/scheme-restriction.https.window.js b/test/wpt/tests/fetch/cross-origin-resource-policy/scheme-restriction.https.window.js new file mode 100644 index 0000000..4c74571 --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/scheme-restriction.https.window.js @@ -0,0 +1,13 @@ +// META: script=/common/get-host-info.sub.js + +promise_test(t => { + const img = new Image(); + img.src = get_host_info().HTTP_REMOTE_ORIGIN + "/fetch/cross-origin-resource-policy/resources/image.py?corp=same-site"; + return new Promise((resolve, reject) => { + img.onload = resolve; + img.onerror = reject; + document.body.appendChild(img); + }).finally(() => { + img.remove(); + }); +}, "Cross-Origin-Resource-Policy does not block Mixed Content "); diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/script-loads.html b/test/wpt/tests/fetch/cross-origin-resource-policy/script-loads.html new file mode 100644 index 0000000..a9690fc --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/script-loads.html @@ -0,0 +1,52 @@ + + + + + + + + +
+ + + diff --git a/test/wpt/tests/fetch/cross-origin-resource-policy/syntax.any.js b/test/wpt/tests/fetch/cross-origin-resource-policy/syntax.any.js new file mode 100644 index 0000000..dc87497 --- /dev/null +++ b/test/wpt/tests/fetch/cross-origin-resource-policy/syntax.any.js @@ -0,0 +1,19 @@ +// META: script=/common/get-host-info.sub.js + +const crossOriginURL = get_host_info().HTTP_REMOTE_ORIGIN + "/fetch/cross-origin-resource-policy/resources/hello.py?corp="; + +[ + "same", + "same, same-origin", + "SAME-ORIGIN", + "Same-Origin", + "same-origin, <>", + "same-origin, same-origin", + "https://www.example.com", // See https://github.com/whatwg/fetch/issues/760 +].forEach(incorrectHeaderValue => { + // Note: an incorrect value results in a successful load, so this test is only meaningful in + // implementations with support for the header. + promise_test(t => { + return fetch(crossOriginURL + encodeURIComponent(incorrectHeaderValue), { mode: "no-cors" }); + }, "Parsing Cross-Origin-Resource-Policy: " + incorrectHeaderValue); +}); diff --git a/test/wpt/tests/fetch/data-urls/README.md b/test/wpt/tests/fetch/data-urls/README.md new file mode 100644 index 0000000..1ce5b18 --- /dev/null +++ b/test/wpt/tests/fetch/data-urls/README.md @@ -0,0 +1,11 @@ +## data: URLs + +`resources/data-urls.json` contains `data:` URL tests. The tests are encoded as a JSON array. Each value in the array is an array of two or three values. The first value describes the input, the second value describes the expected MIME type, null if the input is expected to fail somehow, or the empty string if the expected value is `text/plain;charset=US-ASCII`. The third value, if present, describes the expected body as an array of integers representing bytes. + +These tests are used for `data:` URLs in this directory (see `processing.any.js`). + +## Forgiving-base64 decode + +`resources/base64.json` contains [forgiving-base64 decode](https://infra.spec.whatwg.org/#forgiving-base64-decode) tests. The tests are encoded as a JSON array. Each value in the array is an array of two values. The first value describes the input, the second value describes the output as an array of integers representing bytes or null if the input cannot be decoded. + +These tests are used for `data:` URLs in this directory (see `base64.any.js`) and `window.atob()` in `../../html/webappapis/atob/base64.html`. diff --git a/test/wpt/tests/fetch/data-urls/base64.any.js b/test/wpt/tests/fetch/data-urls/base64.any.js new file mode 100644 index 0000000..83f34db --- /dev/null +++ b/test/wpt/tests/fetch/data-urls/base64.any.js @@ -0,0 +1,18 @@ +// META: global=window,worker + +promise_test(() => fetch("resources/base64.json").then(res => res.json()).then(runBase64Tests), "Setup."); +function runBase64Tests(tests) { + for(let i = 0; i < tests.length; i++) { + const input = tests[i][0], + output = tests[i][1], + dataURL = "data:;base64," + input; + promise_test(t => { + if(output === null) { + return promise_rejects_js(t, TypeError, fetch(dataURL)); + } + return fetch(dataURL).then(res => res.arrayBuffer()).then(body => { + assert_array_equals(new Uint8Array(body), output); + }); + }, "data: URL base64 handling: " + format_value(input)); + } +} diff --git a/test/wpt/tests/fetch/data-urls/navigate.window.js b/test/wpt/tests/fetch/data-urls/navigate.window.js new file mode 100644 index 0000000..b532a00 --- /dev/null +++ b/test/wpt/tests/fetch/data-urls/navigate.window.js @@ -0,0 +1,75 @@ +// META: timeout=long +// +// Test some edge cases around navigation to data: URLs to ensure they use the same code path + +[ + { + input: "data:text/html,", + result: 1, + name: "Nothing fancy", + }, + { + input: "data:text/html;base64,PHNjcmlwdD5wYXJlbnQucG9zdE1lc3NhZ2UoMiwgJyonKTwvc2NyaXB0Pg==", + result: 2, + name: "base64", + }, + { + input: "data:text/html;base64,PHNjcmlwdD5wYXJlbnQucG9zdE1lc3NhZ2UoNCwgJyonKTwvc2NyaXB0Pr+/", + result: 4, + name: "base64 with code points that differ from base64url" + }, + { + input: "data:text/html;base64,PHNjcml%09%20%20%0A%0C%0DwdD5wYXJlbnQucG9zdE1lc3NhZ2UoNiwgJyonKTwvc2NyaXB0Pg==", + result: 6, + name: "ASCII whitespace in the input is removed" + } +].forEach(({ input, result, name }) => { + // Use promise_test so they go sequentially + promise_test(async t => { + const event = await new Promise((resolve, reject) => { + self.addEventListener("message", t.step_func(resolve), { once: true }); + const frame = document.body.appendChild(document.createElement("iframe")); + t.add_cleanup(() => frame.remove()); + + // The assumption is that postMessage() is quicker + t.step_timeout(reject, 500); + frame.src = input; + }); + assert_equals(event.data, result); + }, name); +}); + +// Failure cases +[ + { + input: "data:text/html;base64,PHNjcmlwdD5wYXJlbnQucG9zdE1lc3NhZ2UoMywgJyonKTwvc2NyaXB0Pg=", + name: "base64 with incorrect padding", + }, + { + input: "data:text/html;base64,PHNjcmlwdD5wYXJlbnQucG9zdE1lc3NhZ2UoNSwgJyonKTwvc2NyaXB0Pr-_", + name: "base64url is not supported" + }, + { + input: "data:text/html;base64,%0BPHNjcmlwdD5wYXJlbnQucG9zdE1lc3NhZ2UoNywgJyonKTwvc2NyaXB0Pg==", + name: "Vertical tab in the input leads to an error" + } +].forEach(({ input, name }) => { + // Continue to use promise_test so they go sequentially + promise_test(async t => { + const event = await new Promise((resolve, reject) => { + self.addEventListener("message", t.step_func(reject), { once: true }); + const frame = document.body.appendChild(document.createElement("iframe")); + t.add_cleanup(() => frame.remove()); + + // The assumption is that postMessage() is quicker + t.step_timeout(resolve, 500); + frame.src = input; + }); + }, name); +}); + +// I found some of the interesting code point cases above through brute force: +// +// for (i = 0; i < 256; i++) { +// w(btoa("This is a document." diff --git a/test/wpt/tests/fetch/h1-parsing/resources/message.py b/test/wpt/tests/fetch/h1-parsing/resources/message.py new file mode 100644 index 0000000..640080c --- /dev/null +++ b/test/wpt/tests/fetch/h1-parsing/resources/message.py @@ -0,0 +1,3 @@ +def main(request, response): + response.writer.write(request.GET.first(b"message")) + response.close_connection = True diff --git a/test/wpt/tests/fetch/h1-parsing/resources/script-with-0x00-in-header.py b/test/wpt/tests/fetch/h1-parsing/resources/script-with-0x00-in-header.py new file mode 100644 index 0000000..39f58d8 --- /dev/null +++ b/test/wpt/tests/fetch/h1-parsing/resources/script-with-0x00-in-header.py @@ -0,0 +1,4 @@ +def main(request, response): + response.headers.set(b"Content-Type", b"text/javascript") + response.headers.set(b"Custom", b"\0") + return b"var thisIsJavaScript = 0" diff --git a/test/wpt/tests/fetch/h1-parsing/resources/status-code.py b/test/wpt/tests/fetch/h1-parsing/resources/status-code.py new file mode 100644 index 0000000..5421893 --- /dev/null +++ b/test/wpt/tests/fetch/h1-parsing/resources/status-code.py @@ -0,0 +1,6 @@ +def main(request, response): + output = b"HTTP/1.1 " + output += request.GET.first(b"input") + output += b"\nheader-parsing: is sad\n" + response.writer.write(output) + response.close_connection = True diff --git a/test/wpt/tests/fetch/h1-parsing/status-code.window.js b/test/wpt/tests/fetch/h1-parsing/status-code.window.js new file mode 100644 index 0000000..5776cf4 --- /dev/null +++ b/test/wpt/tests/fetch/h1-parsing/status-code.window.js @@ -0,0 +1,98 @@ +[ + { + input: "", + expected: null + }, + { + input: "BLAH", + expected: null + }, + { + input: "0 OK", + expected: { + status: 0, + statusText: "OK" + } + }, + { + input: "1 OK", + expected: { + status: 1, + statusText: "OK" + } + }, + { + input: "99 NOT OK", + expected: { + status: 99, + statusText: "NOT OK" + } + }, + { + input: "077 77", + expected: { + status: 77, + statusText: "77" + } + }, + { + input: "099 HELLO", + expected: { + status: 99, + statusText: "HELLO" + } + }, + { + input: "200", + expected: { + status: 200, + statusText: "" + } + }, + { + input: "999 DOES IT MATTER", + expected: { + status: 999, + statusText: "DOES IT MATTER" + } + }, + { + input: "1000 BOO", + expected: null + }, + { + input: "0200 BOO", + expected: null + }, + { + input: "65736 NOT 200 OR SOME SUCH", + expected: null + }, + { + input: "131072 HI", + expected: null + }, + { + input: "-200 TEST", + expected: null + }, + { + input: "0xA", + expected: null + }, + { + input: "C8", + expected: null + } +].forEach(({ description, input, expected }) => { + promise_test(async t => { + if (expected !== null) { + const response = await fetch("resources/status-code.py?input=" + input); + assert_equals(response.status, expected.status); + assert_equals(response.statusText, expected.statusText); + assert_equals(response.headers.get("header-parsing"), "is sad"); + } else { + await promise_rejects_js(t, TypeError, fetch("resources/status-code.py?input=" + input)); + } + }, `HTTP/1.1 ${input} ${expected === null ? "(network error)" : ""}`); +}); diff --git a/test/wpt/tests/fetch/http-cache/304-update.any.js b/test/wpt/tests/fetch/http-cache/304-update.any.js new file mode 100644 index 0000000..15484f0 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/304-update.any.js @@ -0,0 +1,146 @@ +// META: global=window,worker +// META: title=HTTP Cache - 304 Updates +// META: timeout=long +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=http-cache.js + +var tests = [ + { + name: "HTTP cache updates returned headers from a Last-Modified 304", + requests: [ + { + response_headers: [ + ["Expires", -5000], + ["Last-Modified", -3000], + ["Test-Header", "A"] + ] + }, + { + response_headers: [ + ["Expires", -3000], + ["Last-Modified", -3000], + ["Test-Header", "B"] + ], + expected_type: "lm_validated", + expected_response_headers: [ + ["Test-Header", "B"] + ] + } + ] + }, + { + name: "HTTP cache updates stored headers from a Last-Modified 304", + requests: [ + { + response_headers: [ + ["Expires", -5000], + ["Last-Modified", -3000], + ["Test-Header", "A"] + ] + }, + { + response_headers: [ + ["Expires", 3000], + ["Last-Modified", -3000], + ["Test-Header", "B"] + ], + expected_type: "lm_validated", + expected_response_headers: [ + ["Test-Header", "B"] + ], + pause_after: true + }, + { + expected_type: "cached", + expected_response_headers: [ + ["Test-Header", "B"] + ] + } + ] + }, + { + name: "HTTP cache updates returned headers from a ETag 304", + requests: [ + { + response_headers: [ + ["Expires", -5000], + ["ETag", "ABC"], + ["Test-Header", "A"] + ] + }, + { + response_headers: [ + ["Expires", -3000], + ["ETag", "ABC"], + ["Test-Header", "B"] + ], + expected_type: "etag_validated", + expected_response_headers: [ + ["Test-Header", "B"] + ] + } + ] + }, + { + name: "HTTP cache updates stored headers from a ETag 304", + requests: [ + { + response_headers: [ + ["Expires", -5000], + ["ETag", "DEF"], + ["Test-Header", "A"] + ] + }, + { + response_headers: [ + ["Expires", 3000], + ["ETag", "DEF"], + ["Test-Header", "B"] + ], + expected_type: "etag_validated", + expected_response_headers: [ + ["Test-Header", "B"] + ], + pause_after: true + }, + { + expected_type: "cached", + expected_response_headers: [ + ["Test-Header", "B"] + ] + } + ] + }, + { + name: "Content-* header", + requests: [ + { + response_headers: [ + ["Expires", -5000], + ["ETag", "GHI"], + ["Content-Test-Header", "A"] + ] + }, + { + response_headers: [ + ["Expires", 3000], + ["ETag", "GHI"], + ["Content-Test-Header", "B"] + ], + expected_type: "etag_validated", + expected_response_headers: [ + ["Content-Test-Header", "B"] + ], + pause_after: true + }, + { + expected_type: "cached", + expected_response_headers: [ + ["Content-Test-Header", "B"] + ] + } + ] + }, +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/http-cache/README.md b/test/wpt/tests/fetch/http-cache/README.md new file mode 100644 index 0000000..512c422 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/README.md @@ -0,0 +1,72 @@ +## HTTP Caching Tests + +These tests cover HTTP-specified behaviours for caches, primarily from +[RFC9111](https://www.rfc-editor.org/rfc/rfc9111.html), but as seen through the +lens of Fetch. + +A few notes: + +* By its nature, [caching is entirely optional]( + https://www.rfc-editor.org/rfc/rfc9111.html#section-2-2); + some tests expecting a response to be + cached might fail because the client chose not to cache it, or chose to + race the cache with a network request. + +* Likewise, some tests might fail because there is a separate document-level + cache that's not well defined; see [this + issue](https://github.com/whatwg/fetch/issues/354). + +* [Partial content tests](partial.any.js) (a.k.a. Range requests) are not specified + in Fetch; tests are included here for interest only. + +* Some browser caches will behave differently when reloading / + shift-reloading, despite the `cache mode` staying the same. + +* [cache-tests.fyi](https://cache-tests.fyi/) is another test suite of HTTP caching + which also caters to server/CDN implementations. + +## Test Format + +Each test run gets its own URL and randomized content and operates independently. + +Each test is an an array of objects, with the following members: + +- `name` - The name of the test. +- `requests` - a list of request objects (see below). + +Possible members of a request object: + +- template - A template object for the request, by name. +- request_method - A string containing the HTTP method to be used. +- request_headers - An array of `[header_name_string, header_value_string]` arrays to + emit in the request. +- request_body - A string to use as the request body. +- mode - The mode string to pass to `fetch()`. +- credentials - The credentials string to pass to `fetch()`. +- cache - The cache string to pass to `fetch()`. +- pause_after - Boolean controlling a 3-second pause after the request completes. +- response_status - A `[number, string]` array containing the HTTP status code + and phrase to return. +- response_headers - An array of `[header_name_string, header_value_string]` arrays to + emit in the response. These values will also be checked like + expected_response_headers, unless there is a third value that is + `false`. See below for special handling considerations. +- response_body - String to send as the response body. If not set, it will contain + the test identifier. +- expected_type - One of `["cached", "not_cached", "lm_validate", "etag_validate", "error"]` +- expected_status - A number representing a HTTP status code to check the response for. + If not set, the value of `response_status[0]` will be used; if that + is not set, 200 will be used. +- expected_request_headers - An array of `[header_name_string, header_value_string]` representing + headers to check the request for. +- expected_response_headers - An array of `[header_name_string, header_value_string]` representing + headers to check the response for. See also response_headers. +- expected_response_text - A string to check the response body against. If not present, `response_body` will be checked if present and non-null; otherwise the response body will be checked for the test uuid (unless the status code disallows a body). Set to `null` to disable all response body checking. + +Some headers in `response_headers` are treated specially: + +* For date-carrying headers, if the value is a number, it will be interpreted as a delta to the time of the first request at the server. +* For URL-carrying headers, the value will be appended as a query parameter for `target`. + +See the source for exact details. + diff --git a/test/wpt/tests/fetch/http-cache/basic-auth-cache-test-ref.html b/test/wpt/tests/fetch/http-cache/basic-auth-cache-test-ref.html new file mode 100644 index 0000000..905facd --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/basic-auth-cache-test-ref.html @@ -0,0 +1,6 @@ + + + + + + diff --git a/test/wpt/tests/fetch/http-cache/basic-auth-cache-test.html b/test/wpt/tests/fetch/http-cache/basic-auth-cache-test.html new file mode 100644 index 0000000..a8979ba --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/basic-auth-cache-test.html @@ -0,0 +1,27 @@ + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/http-cache/cache-mode.any.js b/test/wpt/tests/fetch/http-cache/cache-mode.any.js new file mode 100644 index 0000000..8f406d5 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/cache-mode.any.js @@ -0,0 +1,61 @@ +// META: global=window,worker +// META: title=Fetch - Cache Mode +// META: timeout=long +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=http-cache.js + +var tests = [ + { + name: "Fetch sends Cache-Control: max-age=0 when cache mode is no-cache", + requests: [ + { + cache: "no-cache", + expected_request_headers: [['cache-control', 'max-age=0']] + } + ] + }, + { + name: "Fetch doesn't touch Cache-Control when cache mode is no-cache and Cache-Control is already present", + requests: [ + { + cache: "no-cache", + request_headers: [['cache-control', 'foo']], + expected_request_headers: [['cache-control', 'foo']] + } + ] + }, + { + name: "Fetch sends Cache-Control: no-cache and Pragma: no-cache when cache mode is no-store", + requests: [ + { + cache: "no-store", + expected_request_headers: [ + ['cache-control', 'no-cache'], + ['pragma', 'no-cache'] + ] + } + ] + }, + { + name: "Fetch doesn't touch Cache-Control when cache mode is no-store and Cache-Control is already present", + requests: [ + { + cache: "no-store", + request_headers: [['cache-control', 'foo']], + expected_request_headers: [['cache-control', 'foo']] + } + ] + }, + { + name: "Fetch doesn't touch Pragma when cache mode is no-store and Pragma is already present", + requests: [ + { + cache: "no-store", + request_headers: [['pragma', 'foo']], + expected_request_headers: [['pragma', 'foo']] + } + ] + } +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/http-cache/cc-request.any.js b/test/wpt/tests/fetch/http-cache/cc-request.any.js new file mode 100644 index 0000000..d556566 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/cc-request.any.js @@ -0,0 +1,202 @@ +// META: global=window,worker +// META: title=HTTP Cache - Cache-Control Request Directives +// META: timeout=long +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=http-cache.js + +var tests = [ + { + name: "HTTP cache doesn't use aged but fresh response when request contains Cache-Control: max-age=0", + requests: [ + { + template: "fresh", + pause_after: true + }, + { + request_headers: [ + ["Cache-Control", "max-age=0"] + ], + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache doesn't use aged but fresh response when request contains Cache-Control: max-age=1", + requests: [ + { + template: "fresh", + pause_after: true + }, + { + request_headers: [ + ["Cache-Control", "max-age=1"] + ], + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache doesn't use fresh response with Age header when request contains Cache-Control: max-age that is greater than remaining freshness", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"], + ["Age", "1800"] + ] + }, + { + request_headers: [ + ["Cache-Control", "max-age=600"] + ], + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache does use aged stale response when request contains Cache-Control: max-stale that permits its use", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=1"] + ], + pause_after: true + }, + { + request_headers: [ + ["Cache-Control", "max-stale=1000"] + ], + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache does reuse stale response with Age header when request contains Cache-Control: max-stale that permits its use", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=1500"], + ["Age", "2000"] + ] + }, + { + request_headers: [ + ["Cache-Control", "max-stale=1000"] + ], + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache doesn't reuse fresh response when request contains Cache-Control: min-fresh that wants it fresher", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=1500"] + ] + }, + { + request_headers: [ + ["Cache-Control", "min-fresh=2000"] + ], + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache doesn't reuse fresh response with Age header when request contains Cache-Control: min-fresh that wants it fresher", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=1500"], + ["Age", "1000"] + ] + }, + { + request_headers: [ + ["Cache-Control", "min-fresh=1000"] + ], + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache doesn't reuse fresh response when request contains Cache-Control: no-cache", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"] + ] + }, + { + request_headers: [ + ["Cache-Control", "no-cache"] + ], + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache validates fresh response with Last-Modified when request contains Cache-Control: no-cache", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"], + ["Last-Modified", -10000] + ] + }, + { + request_headers: [ + ["Cache-Control", "no-cache"] + ], + expected_type: "lm_validate" + } + ] + }, + { + name: "HTTP cache validates fresh response with ETag when request contains Cache-Control: no-cache", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"], + ["ETag", http_content("abc")] + ] + }, + { + request_headers: [ + ["Cache-Control", "no-cache"] + ], + expected_type: "etag_validate" + } + ] + }, + { + name: "HTTP cache doesn't reuse fresh response when request contains Cache-Control: no-store", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"] + ] + }, + { + request_headers: [ + ["Cache-Control", "no-store"] + ], + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache generates 504 status code when nothing is in cache and request contains Cache-Control: only-if-cached", + requests: [ + { + request_headers: [ + ["Cache-Control", "only-if-cached"] + ], + expected_status: 504, + expected_response_text: null + } + ] + } +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/http-cache/credentials.tentative.any.js b/test/wpt/tests/fetch/http-cache/credentials.tentative.any.js new file mode 100644 index 0000000..3177092 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/credentials.tentative.any.js @@ -0,0 +1,62 @@ +// META: global=window,worker +// META: title=HTTP Cache - Content +// META: timeout=long +// META: script=/common/utils.js +// META: script=http-cache.js + +// This is a tentative test. +// Firefox behavior is used as expectations. +// +// whatwg/fetch issue: +// https://github.com/whatwg/fetch/issues/1253 +// +// Chrome design doc: +// https://docs.google.com/document/d/1lvbiy4n-GM5I56Ncw304sgvY5Td32R6KHitjRXvkZ6U/edit# + +const request_cacheable = { + request_headers: [], + response_headers: [ + ['Cache-Control', 'max-age=3600'], + ], + // TODO(arthursonzogni): The behavior is tested only for same-origin requests. + // It must behave similarly for cross-site and cross-origin requests. The + // problems is the http-cache.js infrastructure returns the + // "Server-Request-Count" as HTTP response headers, which aren't readable for + // CORS requests. + base_url: location.href.replace(/\/[^\/]*$/, '/'), +}; + +const request_credentialled = { ...request_cacheable, credentials: 'include', }; +const request_anonymous = { ...request_cacheable, credentials: 'omit', }; + +const responseIndex = count => { + return { + expected_response_headers: [ + ['Server-Request-Count', count.toString()], + ], + } +}; + +var tests = [ + { + name: 'same-origin: 2xAnonymous, 2xCredentialled, 1xAnonymous', + requests: [ + { ...request_anonymous , ...responseIndex(1)} , + { ...request_anonymous , ...responseIndex(1)} , + { ...request_credentialled , ...responseIndex(2)} , + { ...request_credentialled , ...responseIndex(2)} , + { ...request_anonymous , ...responseIndex(1)} , + ] + }, + { + name: 'same-origin: 2xCredentialled, 2xAnonymous, 1xCredentialled', + requests: [ + { ...request_credentialled , ...responseIndex(1)} , + { ...request_credentialled , ...responseIndex(1)} , + { ...request_anonymous , ...responseIndex(2)} , + { ...request_anonymous , ...responseIndex(2)} , + { ...request_credentialled , ...responseIndex(1)} , + ] + }, +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/http-cache/freshness.any.js b/test/wpt/tests/fetch/http-cache/freshness.any.js new file mode 100644 index 0000000..6b97c82 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/freshness.any.js @@ -0,0 +1,215 @@ +// META: global=window,worker +// META: title=HTTP Cache - Freshness +// META: timeout=long +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=http-cache.js + +var tests = [ + // response directives + { + name: "HTTP cache reuses a response with a future Expires", + requests: [ + { + response_headers: [ + ["Expires", (30 * 24 * 60 * 60)] + ] + }, + { + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache does not reuse a response with a past Expires", + requests: [ + { + response_headers: [ + ["Expires", (-30 * 24 * 60 * 60)] + ] + }, + { + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache does not reuse a response with a present Expires", + requests: [ + { + response_headers: [ + ["Expires", 0] + ] + }, + { + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache does not reuse a response with an invalid Expires", + requests: [ + { + response_headers: [ + ["Expires", "0"] + ] + }, + { + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache reuses a response with positive Cache-Control: max-age", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"] + ] + }, + { + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache does not reuse a response with Cache-Control: max-age=0", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=0"] + ] + }, + { + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache reuses a response with positive Cache-Control: max-age and a past Expires", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"], + ["Expires", -10000] + ] + }, + { + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache reuses a response with positive Cache-Control: max-age and an invalid Expires", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"], + ["Expires", "0"] + ] + }, + { + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache does not reuse a response with Cache-Control: max-age=0 and a future Expires", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=0"], + ["Expires", 10000] + ] + }, + { + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache does not prefer Cache-Control: s-maxage over Cache-Control: max-age", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=1, s-maxage=3600"] + ], + pause_after: true, + }, + { + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache does not reuse a response when the Age header is greater than its freshness lifetime", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"], + ["Age", "12000"] + ], + }, + { + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache does not store a response with Cache-Control: no-store", + requests: [ + { + response_headers: [ + ["Cache-Control", "no-store"] + ] + }, + { + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache does not store a response with Cache-Control: no-store, even with max-age and Expires", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=10000, no-store"], + ["Expires", 10000] + ] + }, + { + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache stores a response with Cache-Control: no-cache, but revalidates upon use", + requests: [ + { + response_headers: [ + ["Cache-Control", "no-cache"], + ["ETag", "abcd"] + ] + }, + { + expected_type: "etag_validated" + } + ] + }, + { + name: "HTTP cache stores a response with Cache-Control: no-cache, but revalidates upon use, even with max-age and Expires", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=10000, no-cache"], + ["Expires", 10000], + ["ETag", "abcd"] + ] + }, + { + expected_type: "etag_validated" + } + ] + }, +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/http-cache/heuristic.any.js b/test/wpt/tests/fetch/http-cache/heuristic.any.js new file mode 100644 index 0000000..d846131 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/heuristic.any.js @@ -0,0 +1,93 @@ +// META: global=window,worker +// META: title=HTTP Cache - Heuristic Freshness +// META: timeout=long +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=http-cache.js + +var tests = [ + { + name: "HTTP cache reuses an unknown response with Last-Modified based upon heuristic freshness when Cache-Control: public is present", + requests: [ + { + response_status: [299, "Whatever"], + response_headers: [ + ["Last-Modified", (-3 * 100)], + ["Cache-Control", "public"] + ], + }, + { + expected_type: "cached", + response_status: [299, "Whatever"] + } + ] + }, + { + name: "HTTP cache does not reuse an unknown response with Last-Modified based upon heuristic freshness when Cache-Control: public is not present", + requests: [ + { + response_status: [299, "Whatever"], + response_headers: [ + ["Last-Modified", (-3 * 100)] + ], + }, + { + expected_type: "not_cached" + } + ] + } +]; + +function check_status(status) { + var succeed = status[0]; + var code = status[1]; + var phrase = status[2]; + var body = status[3]; + if (body === undefined) { + body = http_content(code); + } + var expected_type = "not_cached"; + var desired = "does not use" + if (succeed === true) { + expected_type = "cached"; + desired = "reuses"; + } + tests.push( + { + name: "HTTP cache " + desired + " a " + code + " " + phrase + " response with Last-Modified based upon heuristic freshness", + requests: [ + { + response_status: [code, phrase], + response_headers: [ + ["Last-Modified", (-3 * 100)] + ], + response_body: body + }, + { + expected_type: expected_type, + response_status: [code, phrase], + response_body: body + } + ] + } + ) +} +[ + [true, 200, "OK"], + [true, 203, "Non-Authoritative Information"], + [true, 204, "No Content", ""], + [true, 404, "Not Found"], + [true, 405, "Method Not Allowed"], + [true, 410, "Gone"], + [true, 414, "URI Too Long"], + [true, 501, "Not Implemented"] +].forEach(check_status); +[ + [false, 201, "Created"], + [false, 202, "Accepted"], + [false, 403, "Forbidden"], + [false, 502, "Bad Gateway"], + [false, 503, "Service Unavailable"], + [false, 504, "Gateway Timeout"], +].forEach(check_status); +run_tests(tests); diff --git a/test/wpt/tests/fetch/http-cache/http-cache.js b/test/wpt/tests/fetch/http-cache/http-cache.js new file mode 100644 index 0000000..19f1ca9 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/http-cache.js @@ -0,0 +1,274 @@ +/* global btoa fetch token promise_test step_timeout */ +/* global assert_equals assert_true assert_own_property assert_throws_js assert_less_than */ + +const templates = { + 'fresh': { + 'response_headers': [ + ['Expires', 100000], + ['Last-Modified', 0] + ] + }, + 'stale': { + 'response_headers': [ + ['Expires', -5000], + ['Last-Modified', -100000] + ] + }, + 'lcl_response': { + 'response_headers': [ + ['Location', 'location_target'], + ['Content-Location', 'content_location_target'] + ] + }, + 'location': { + 'query_arg': 'location_target', + 'response_headers': [ + ['Expires', 100000], + ['Last-Modified', 0] + ] + }, + 'content_location': { + 'query_arg': 'content_location_target', + 'response_headers': [ + ['Expires', 100000], + ['Last-Modified', 0] + ] + } +} + +const noBodyStatus = new Set([204, 304]) + +function makeTest (test) { + return function () { + var uuid = token() + var requests = expandTemplates(test) + var fetchFunctions = makeFetchFunctions(requests, uuid) + return runTest(fetchFunctions, requests, uuid) + } +} + +function makeFetchFunctions(requests, uuid) { + var fetchFunctions = [] + for (let i = 0; i < requests.length; ++i) { + fetchFunctions.push({ + code: function (idx) { + var config = requests[idx] + var url = makeTestUrl(uuid, config) + var init = fetchInit(requests, config) + return fetch(url, init) + .then(makeCheckResponse(idx, config)) + .then(makeCheckResponseBody(config, uuid), function (reason) { + if ('expected_type' in config && config.expected_type === 'error') { + assert_throws_js(TypeError, function () { throw reason }) + } else { + throw reason + } + }) + }, + pauseAfter: 'pause_after' in requests[i] + }) + } + return fetchFunctions +} + +function runTest(fetchFunctions, requests, uuid) { + var idx = 0 + function runNextStep () { + if (fetchFunctions.length) { + var nextFetchFunction = fetchFunctions.shift() + if (nextFetchFunction.pauseAfter === true) { + return nextFetchFunction.code(idx++) + .then(pause) + .then(runNextStep) + } else { + return nextFetchFunction.code(idx++) + .then(runNextStep) + } + } else { + return Promise.resolve() + } + } + + return runNextStep() + .then(function () { + return getServerState(uuid) + }).then(function (testState) { + checkRequests(requests, testState) + return Promise.resolve() + }) +} + +function expandTemplates (test) { + var rawRequests = test.requests + var requests = [] + for (let i = 0; i < rawRequests.length; i++) { + var request = rawRequests[i] + request.name = test.name + if ('template' in request) { + var template = templates[request['template']] + for (let member in template) { + if (!request.hasOwnProperty(member)) { + request[member] = template[member] + } + } + } + requests.push(request) + } + return requests +} + +function fetchInit (requests, config) { + var init = { + 'headers': [] + } + if ('request_method' in config) init.method = config['request_method'] + // Note: init.headers must be a copy of config['request_headers'] array, + // because new elements are added later. + if ('request_headers' in config) init.headers = [...config['request_headers']]; + if ('name' in config) init.headers.push(['Test-Name', config.name]) + if ('request_body' in config) init.body = config['request_body'] + if ('mode' in config) init.mode = config['mode'] + if ('credentials' in config) init.credentials = config['credentials'] + if ('cache' in config) init.cache = config['cache'] + init.headers.push(['Test-Requests', btoa(JSON.stringify(requests))]) + return init +} + +function makeCheckResponse (idx, config) { + return function checkResponse (response) { + var reqNum = idx + 1 + var resNum = parseInt(response.headers.get('Server-Request-Count')) + if ('expected_type' in config) { + if (config.expected_type === 'error') { + assert_true(false, `Request ${reqNum} doesn't throw an error`) + return response.text() + } + if (config.expected_type === 'cached') { + assert_less_than(resNum, reqNum, `Response ${reqNum} does not come from cache`) + } + if (config.expected_type === 'not_cached') { + assert_equals(resNum, reqNum, `Response ${reqNum} comes from cache`) + } + } + if ('expected_status' in config) { + assert_equals(response.status, config.expected_status, + `Response ${reqNum} status is ${response.status}, not ${config.expected_status}`) + } else if ('response_status' in config) { + assert_equals(response.status, config.response_status[0], + `Response ${reqNum} status is ${response.status}, not ${config.response_status[0]}`) + } else { + assert_equals(response.status, 200, `Response ${reqNum} status is ${response.status}, not 200`) + } + if ('response_headers' in config) { + config.response_headers.forEach(function (header) { + if (header.len < 3 || header[2] === true) { + assert_equals(response.headers.get(header[0]), header[1], + `Response ${reqNum} header ${header[0]} is "${response.headers.get(header[0])}", not "${header[1]}"`) + } + }) + } + if ('expected_response_headers' in config) { + config.expected_response_headers.forEach(function (header) { + assert_equals(response.headers.get(header[0]), header[1], + `Response ${reqNum} header ${header[0]} is "${response.headers.get(header[0])}", not "${header[1]}"`) + }) + } + return response.text() + } +} + +function makeCheckResponseBody (config, uuid) { + return function checkResponseBody (resBody) { + var statusCode = 200 + if ('response_status' in config) { + statusCode = config.response_status[0] + } + if ('expected_response_text' in config) { + if (config.expected_response_text !== null) { + assert_equals(resBody, config.expected_response_text, + `Response body is "${resBody}", not expected "${config.expected_response_text}"`) + } + } else if ('response_body' in config && config.response_body !== null) { + assert_equals(resBody, config.response_body, + `Response body is "${resBody}", not sent "${config.response_body}"`) + } else if (!noBodyStatus.has(statusCode)) { + assert_equals(resBody, uuid, `Response body is "${resBody}", not default "${uuid}"`) + } + } +} + +function checkRequests (requests, testState) { + var testIdx = 0 + for (let i = 0; i < requests.length; ++i) { + var expectedValidatingHeaders = [] + var config = requests[i] + var serverRequest = testState[testIdx] + var reqNum = i + 1 + if ('expected_type' in config) { + if (config.expected_type === 'cached') continue // the server will not see the request + if (config.expected_type === 'etag_validated') { + expectedValidatingHeaders.push('if-none-match') + } + if (config.expected_type === 'lm_validated') { + expectedValidatingHeaders.push('if-modified-since') + } + } + testIdx++ + expectedValidatingHeaders.forEach(vhdr => { + assert_own_property(serverRequest.request_headers, vhdr, + `request ${reqNum} doesn't have ${vhdr} header`) + }) + if ('expected_request_headers' in config) { + config.expected_request_headers.forEach(expectedHdr => { + assert_equals(serverRequest.request_headers[expectedHdr[0].toLowerCase()], expectedHdr[1], + `request ${reqNum} header ${expectedHdr[0]} value is "${serverRequest.request_headers[expectedHdr[0].toLowerCase()]}", not "${expectedHdr[1]}"`) + }) + } + } +} + +function pause () { + return new Promise(function (resolve, reject) { + step_timeout(function () { + return resolve() + }, 3000) + }) +} + +function makeTestUrl (uuid, config) { + var arg = '' + var base_url = '' + if ('base_url' in config) { + base_url = config.base_url + } + if ('query_arg' in config) { + arg = `&target=${config.query_arg}` + } + return `${base_url}resources/http-cache.py?dispatch=test&uuid=${uuid}${arg}` +} + +function getServerState (uuid) { + return fetch(`resources/http-cache.py?dispatch=state&uuid=${uuid}`) + .then(function (response) { + return response.text() + }).then(function (text) { + return JSON.parse(text) || [] + }) +} + +function run_tests (tests) { + tests.forEach(function (test) { + promise_test(makeTest(test), test.name) + }) +} + +var contentStore = {} +function http_content (csKey) { + if (csKey in contentStore) { + return contentStore[csKey] + } else { + var content = btoa(Math.random() * Date.now()) + contentStore[csKey] = content + return content + } +} diff --git a/test/wpt/tests/fetch/http-cache/invalidate.any.js b/test/wpt/tests/fetch/http-cache/invalidate.any.js new file mode 100644 index 0000000..9f8090a --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/invalidate.any.js @@ -0,0 +1,235 @@ +// META: global=window,worker +// META: title=HTTP Cache - Invalidation +// META: timeout=long +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=http-cache.js + +var tests = [ + { + name: 'HTTP cache invalidates after a successful response from a POST', + requests: [ + { + template: "fresh" + }, { + request_method: "POST", + request_body: "abc" + }, { + expected_type: "not_cached" + } + ] + }, + { + name: 'HTTP cache does not invalidate after a failed response from an unsafe request', + requests: [ + { + template: "fresh" + }, { + request_method: "POST", + request_body: "abc", + response_status: [500, "Internal Server Error"] + }, { + expected_type: "cached" + } + ] + }, + { + name: 'HTTP cache invalidates after a successful response from a PUT', + requests: [ + { + template: "fresh" + }, { + template: "fresh", + request_method: "PUT", + request_body: "abc" + }, { + expected_type: "not_cached" + } + ] + }, + { + name: 'HTTP cache invalidates after a successful response from a DELETE', + requests: [ + { + template: "fresh" + }, { + request_method: "DELETE", + request_body: "abc" + }, { + expected_type: "not_cached" + } + ] + }, + { + name: 'HTTP cache invalidates after a successful response from an unknown method', + requests: [ + { + template: "fresh" + }, { + request_method: "FOO", + request_body: "abc" + }, { + expected_type: "not_cached" + } + ] + }, + + + { + name: 'HTTP cache invalidates Location URL after a successful response from a POST', + requests: [ + { + template: "location" + }, { + request_method: "POST", + request_body: "abc", + template: "lcl_response" + }, { + template: "location", + expected_type: "not_cached" + } + ] + }, + { + name: 'HTTP cache does not invalidate Location URL after a failed response from an unsafe request', + requests: [ + { + template: "location" + }, { + template: "lcl_response", + request_method: "POST", + request_body: "abc", + response_status: [500, "Internal Server Error"] + }, { + template: "location", + expected_type: "cached" + } + ] + }, + { + name: 'HTTP cache invalidates Location URL after a successful response from a PUT', + requests: [ + { + template: "location" + }, { + template: "lcl_response", + request_method: "PUT", + request_body: "abc" + }, { + template: "location", + expected_type: "not_cached" + } + ] + }, + { + name: 'HTTP cache invalidates Location URL after a successful response from a DELETE', + requests: [ + { + template: "location" + }, { + template: "lcl_response", + request_method: "DELETE", + request_body: "abc" + }, { + template: "location", + expected_type: "not_cached" + } + ] + }, + { + name: 'HTTP cache invalidates Location URL after a successful response from an unknown method', + requests: [ + { + template: "location" + }, { + template: "lcl_response", + request_method: "FOO", + request_body: "abc" + }, { + template: "location", + expected_type: "not_cached" + } + ] + }, + + + + { + name: 'HTTP cache invalidates Content-Location URL after a successful response from a POST', + requests: [ + { + template: "content_location" + }, { + request_method: "POST", + request_body: "abc", + template: "lcl_response" + }, { + template: "content_location", + expected_type: "not_cached" + } + ] + }, + { + name: 'HTTP cache does not invalidate Content-Location URL after a failed response from an unsafe request', + requests: [ + { + template: "content_location" + }, { + template: "lcl_response", + request_method: "POST", + request_body: "abc", + response_status: [500, "Internal Server Error"] + }, { + template: "content_location", + expected_type: "cached" + } + ] + }, + { + name: 'HTTP cache invalidates Content-Location URL after a successful response from a PUT', + requests: [ + { + template: "content_location" + }, { + template: "lcl_response", + request_method: "PUT", + request_body: "abc" + }, { + template: "content_location", + expected_type: "not_cached" + } + ] + }, + { + name: 'HTTP cache invalidates Content-Location URL after a successful response from a DELETE', + requests: [ + { + template: "content_location" + }, { + template: "lcl_response", + request_method: "DELETE", + request_body: "abc" + }, { + template: "content_location", + expected_type: "not_cached" + } + ] + }, + { + name: 'HTTP cache invalidates Content-Location URL after a successful response from an unknown method', + requests: [ + { + template: "content_location" + }, { + template: "lcl_response", + request_method: "FOO", + request_body: "abc" + }, { + template: "content_location", + expected_type: "not_cached" + } + ] + } + +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/http-cache/partial.any.js b/test/wpt/tests/fetch/http-cache/partial.any.js new file mode 100644 index 0000000..3f23b59 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/partial.any.js @@ -0,0 +1,208 @@ +// META: global=window,worker +// META: title=HTTP Cache - Partial Content +// META: timeout=long +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=http-cache.js + +var tests = [ + { + name: "HTTP cache stores partial content and reuses it", + requests: [ + { + request_headers: [ + ['Range', "bytes=-5"] + ], + response_status: [206, "Partial Content"], + response_headers: [ + ["Cache-Control", "max-age=3600"], + ["Content-Range", "bytes 4-9/10"] + ], + response_body: "01234", + expected_request_headers: [ + ["Range", "bytes=-5"] + ] + }, + { + request_headers: [ + ["Range", "bytes=-5"] + ], + expected_type: "cached", + expected_status: 206, + expected_response_text: "01234" + } + ] + }, + { + name: "HTTP cache stores complete response and serves smaller ranges from it (byte-range-spec)", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"] + ], + response_body: "01234567890" + }, + { + request_headers: [ + ['Range', "bytes=0-1"] + ], + expected_type: "cached", + expected_status: 206, + expected_response_text: "01" + }, + ] + }, + { + name: "HTTP cache stores complete response and serves smaller ranges from it (absent last-byte-pos)", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"], + ], + response_body: "01234567890" + }, + { + request_headers: [ + ['Range', "bytes=1-"] + ], + expected_type: "cached", + expected_status: 206, + expected_response_text: "1234567890" + } + ] + }, + { + name: "HTTP cache stores complete response and serves smaller ranges from it (suffix-byte-range-spec)", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"], + ], + response_body: "0123456789A" + }, + { + request_headers: [ + ['Range', "bytes=-1"] + ], + expected_type: "cached", + expected_status: 206, + expected_response_text: "A" + } + ] + }, + { + name: "HTTP cache stores complete response and serves smaller ranges from it with only-if-cached", + requests: [ + { + response_headers: [ + ["Cache-Control", "max-age=3600"] + ], + response_body: "01234567890" + }, + { + request_headers: [ + ['Range', "bytes=0-1"] + ], + mode: "same-origin", + cache: "only-if-cached", + expected_type: "cached", + expected_status: 206, + expected_response_text: "01" + }, + ] + }, + { + name: "HTTP cache stores partial response and serves smaller ranges from it (byte-range-spec)", + requests: [ + { + request_headers: [ + ['Range', "bytes=-5"] + ], + response_status: [206, "Partial Content"], + response_headers: [ + ["Cache-Control", "max-age=3600"], + ["Content-Range", "bytes 4-9/10"] + ], + response_body: "01234" + }, + { + request_headers: [ + ['Range', "bytes=6-8"] + ], + expected_type: "cached", + expected_status: 206, + expected_response_text: "234" + } + ] + }, + { + name: "HTTP cache stores partial response and serves smaller ranges from it (absent last-byte-pos)", + requests: [ + { + request_headers: [ + ['Range', "bytes=-5"] + ], + response_status: [206, "Partial Content"], + response_headers: [ + ["Cache-Control", "max-age=3600"], + ["Content-Range", "bytes 4-9/10"] + ], + response_body: "01234" + }, + { + request_headers: [ + ["Range", "bytes=6-"] + ], + expected_type: "cached", + expected_status: 206, + expected_response_text: "234" + } + ] + }, + { + name: "HTTP cache stores partial response and serves smaller ranges from it (suffix-byte-range-spec)", + requests: [ + { + request_headers: [ + ['Range', "bytes=-5"] + ], + response_status: [206, "Partial Content"], + response_headers: [ + ["Cache-Control", "max-age=3600"], + ["Content-Range", "bytes 4-9/10"] + ], + response_body: "01234" + }, + { + request_headers: [ + ['Range', "bytes=-1"] + ], + expected_type: "cached", + expected_status: 206, + expected_response_text: "4" + } + ] + }, + { + name: "HTTP cache stores partial content and completes it", + requests: [ + { + request_headers: [ + ['Range', "bytes=-5"] + ], + response_status: [206, "Partial Content"], + response_headers: [ + ["Cache-Control", "max-age=3600"], + ["Content-Range", "bytes 0-4/10"] + ], + response_body: "01234" + }, + { + expected_request_headers: [ + ["range", "bytes=5-"] + ] + } + ] + }, +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/http-cache/post-patch.any.js b/test/wpt/tests/fetch/http-cache/post-patch.any.js new file mode 100644 index 0000000..0a69baa --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/post-patch.any.js @@ -0,0 +1,46 @@ +// META: global=window,worker +// META: title=HTTP Cache - Caching POST and PATCH responses +// META: timeout=long +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=http-cache.js + +var tests = [ + { + name: "HTTP cache uses content after PATCH request with response containing Content-Location and cache-allowing header", + requests: [ + { + request_method: "PATCH", + request_body: "abc", + response_status: [200, "OK"], + response_headers: [ + ['Cache-Control', "private, max-age=1000"], + ['Content-Location', ""] + ], + response_body: "abc" + }, + { + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache uses content after POST request with response containing Content-Location and cache-allowing header", + requests: [ + { + request_method: "POST", + request_body: "abc", + response_status: [200, "OK"], + response_headers: [ + ['Cache-Control', "private, max-age=1000"], + ['Content-Location', ""] + ], + response_body: "abc" + }, + { + expected_type: "cached" + } + ] + } +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/http-cache/resources/http-cache.py b/test/wpt/tests/fetch/http-cache/resources/http-cache.py new file mode 100644 index 0000000..3ab610d --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/resources/http-cache.py @@ -0,0 +1,124 @@ +import datetime +import json +import time +from base64 import b64decode + +from wptserve.utils import isomorphic_decode, isomorphic_encode + +NOTEHDRS = set([u'content-type', u'access-control-allow-origin', u'last-modified', u'etag']) +NOBODYSTATUS = set([204, 304]) +LOCATIONHDRS = set([u'location', u'content-location']) +DATEHDRS = set([u'date', u'expires', u'last-modified']) + +def main(request, response): + dispatch = request.GET.first(b"dispatch", None) + uuid = request.GET.first(b"uuid", None) + response.headers.set(b"Access-Control-Allow-Credentials", b"true") + + if request.method == u"OPTIONS": + return handle_preflight(uuid, request, response) + if not uuid: + response.status = (404, b"Not Found") + response.headers.set(b"Content-Type", b"text/plain") + return b"UUID not found" + if dispatch == b'test': + return handle_test(uuid, request, response) + elif dispatch == b'state': + return handle_state(uuid, request, response) + response.status = (404, b"Not Found") + response.headers.set(b"Content-Type", b"text/plain") + return b"Fallthrough" + +def handle_preflight(uuid, request, response): + response.status = (200, b"OK") + response.headers.set(b"Access-Control-Allow-Origin", request.headers.get(b"origin") or '*') + response.headers.set(b"Access-Control-Allow-Methods", b"GET") + response.headers.set(b"Access-Control-Allow-Headers", request.headers.get(b"Access-Control-Request-Headers") or "*") + response.headers.set(b"Access-Control-Max-Age", b"86400") + return b"Preflight request" + +def handle_state(uuid, request, response): + response.headers.set(b"Content-Type", b"text/plain") + return json.dumps(request.server.stash.take(uuid)) + +def handle_test(uuid, request, response): + server_state = request.server.stash.take(uuid) or [] + try: + requests = json.loads(b64decode(request.headers.get(b'Test-Requests', b""))) + except: + response.status = (400, b"Bad Request") + response.headers.set(b"Content-Type", b"text/plain") + return b"No or bad Test-Requests request header" + config = requests[len(server_state)] + if not config: + response.status = (404, b"Not Found") + response.headers.set(b"Content-Type", b"text/plain") + return b"Config not found" + noted_headers = {} + now = time.time() + for header in config.get(u'response_headers', []): + if header[0].lower() in LOCATIONHDRS: # magic locations + if (len(header[1]) > 0): + header[1] = u"%s&target=%s" % (request.url, header[1]) + else: + header[1] = request.url + if header[0].lower() in DATEHDRS and isinstance(header[1], int): # magic dates + header[1] = http_date(now, header[1]) + response.headers.set(isomorphic_encode(header[0]), isomorphic_encode(header[1])) + if header[0].lower() in NOTEHDRS: + noted_headers[header[0].lower()] = header[1] + state = { + u'now': now, + u'request_method': request.method, + u'request_headers': dict([[isomorphic_decode(h.lower()), isomorphic_decode(request.headers[h])] for h in request.headers]), + u'response_headers': noted_headers + } + server_state.append(state) + request.server.stash.put(uuid, server_state) + + if u"access-control-allow-origin" not in noted_headers: + response.headers.set(b"Access-Control-Allow-Origin", b"*") + if u"content-type" not in noted_headers: + response.headers.set(b"Content-Type", b"text/plain") + response.headers.set(b"Server-Request-Count", len(server_state)) + + code, phrase = config.get(u"response_status", [200, b"OK"]) + if config.get(u"expected_type", u"").endswith(u'validated'): + ref_hdrs = server_state[0][u'response_headers'] + previous_lm = ref_hdrs.get(u'last-modified', False) + if previous_lm and request.headers.get(b"If-Modified-Since", False) == isomorphic_encode(previous_lm): + code, phrase = [304, b"Not Modified"] + previous_etag = ref_hdrs.get(u'etag', False) + if previous_etag and request.headers.get(b"If-None-Match", False) == isomorphic_encode(previous_etag): + code, phrase = [304, b"Not Modified"] + if code != 304: + code, phrase = [999, b'304 Not Generated'] + response.status = (code, phrase) + + content = config.get(u"response_body", uuid) + if code in NOBODYSTATUS: + return b"" + return content + + +def get_header(headers, header_name): + result = None + for header in headers: + if header[0].lower() == header_name.lower(): + result = header[1] + return result + +WEEKDAYS = [u'Mon', u'Tue', u'Wed', u'Thu', u'Fri', u'Sat', u'Sun'] +MONTHS = [None, u'Jan', u'Feb', u'Mar', u'Apr', u'May', u'Jun', u'Jul', + u'Aug', u'Sep', u'Oct', u'Nov', u'Dec'] + +def http_date(now, delta_secs=0): + date = datetime.datetime.utcfromtimestamp(now + delta_secs) + return u"%s, %.2d %s %.4d %.2d:%.2d:%.2d GMT" % ( + WEEKDAYS[date.weekday()], + date.day, + MONTHS[date.month], + date.year, + date.hour, + date.minute, + date.second) diff --git a/test/wpt/tests/fetch/http-cache/resources/securedimage.py b/test/wpt/tests/fetch/http-cache/resources/securedimage.py new file mode 100644 index 0000000..cac9cfe --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/resources/securedimage.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 - + +from wptserve.utils import isomorphic_decode, isomorphic_encode + +def main(request, response): + image_url = str.replace(request.url, u"fetch/http-cache/resources/securedimage.py", u"images/green.png") + + if b"authorization" not in request.headers: + response.status = 401 + response.headers.set(b"WWW-Authenticate", b"Basic") + return + else: + auth = request.headers.get(b"Authorization") + if auth != b"Basic dGVzdHVzZXI6dGVzdHBhc3M=": + response.set_error(403, u"Invalid username or password - " + isomorphic_decode(auth)) + return + + response.status = 301 + response.headers.set(b"Location", isomorphic_encode(image_url)) diff --git a/test/wpt/tests/fetch/http-cache/resources/split-cache-popup-with-iframe.html b/test/wpt/tests/fetch/http-cache/resources/split-cache-popup-with-iframe.html new file mode 100644 index 0000000..48b1618 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/resources/split-cache-popup-with-iframe.html @@ -0,0 +1,34 @@ + + + + + HTTP Cache - helper + + + + + + + + + diff --git a/test/wpt/tests/fetch/http-cache/resources/split-cache-popup.html b/test/wpt/tests/fetch/http-cache/resources/split-cache-popup.html new file mode 100644 index 0000000..edb5794 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/resources/split-cache-popup.html @@ -0,0 +1,28 @@ + + + + + HTTP Cache - helper + + + + + + + + + diff --git a/test/wpt/tests/fetch/http-cache/split-cache.html b/test/wpt/tests/fetch/http-cache/split-cache.html new file mode 100644 index 0000000..fe93d2e --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/split-cache.html @@ -0,0 +1,158 @@ + + + + + HTTP Cache - Partioning by site + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/http-cache/status.any.js b/test/wpt/tests/fetch/http-cache/status.any.js new file mode 100644 index 0000000..10c83a2 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/status.any.js @@ -0,0 +1,60 @@ +// META: global=window,worker +// META: title=HTTP Cache - Status Codes +// META: timeout=long +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=http-cache.js + +var tests = []; +function check_status(status) { + var code = status[0]; + var phrase = status[1]; + var body = status[2]; + if (body === undefined) { + body = http_content(code); + } + tests.push({ + name: "HTTP cache goes to the network if it has a stale " + code + " response", + requests: [ + { + template: "stale", + response_status: [code, phrase], + response_body: body + }, { + expected_type: "not_cached", + response_status: [code, phrase], + response_body: body + } + ] + }) + tests.push({ + name: "HTTP cache avoids going to the network if it has a fresh " + code + " response", + requests: [ + { + template: "fresh", + response_status: [code, phrase], + response_body: body + }, { + expected_type: "cached", + response_status: [code, phrase], + response_body: body + } + ] + }) +} +[ + [200, "OK"], + [203, "Non-Authoritative Information"], + [204, "No Content", null], + [299, "Whatever"], + [400, "Bad Request"], + [404, "Not Found"], + [410, "Gone"], + [499, "Whatever"], + [500, "Internal Server Error"], + [502, "Bad Gateway"], + [503, "Service Unavailable"], + [504, "Gateway Timeout"], + [599, "Whatever"] +].forEach(check_status); +run_tests(tests); diff --git a/test/wpt/tests/fetch/http-cache/vary.any.js b/test/wpt/tests/fetch/http-cache/vary.any.js new file mode 100644 index 0000000..2cfd226 --- /dev/null +++ b/test/wpt/tests/fetch/http-cache/vary.any.js @@ -0,0 +1,313 @@ +// META: global=window,worker +// META: title=HTTP Cache - Vary +// META: timeout=long +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=http-cache.js + +var tests = [ + { + name: "HTTP cache reuses Vary response when request matches", + requests: [ + { + request_headers: [ + ["Foo", "1"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo"] + ] + }, + { + request_headers: [ + ["Foo", "1"] + ], + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache doesn't use Vary response when request doesn't match", + requests: [ + { + request_headers: [ + ["Foo", "1"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo"] + ] + }, + { + request_headers: [ + ["Foo", "2"] + ], + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache doesn't use Vary response when request omits variant header", + requests: [ + { + request_headers: [ + ["Foo", "1"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo"] + ] + }, + { + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache doesn't invalidate existing Vary response", + requests: [ + { + request_headers: [ + ["Foo", "1"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo"] + ], + response_body: http_content('foo_1') + }, + { + request_headers: [ + ["Foo", "2"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo"] + ], + expected_type: "not_cached", + response_body: http_content('foo_2'), + }, + { + request_headers: [ + ["Foo", "1"] + ], + response_body: http_content('foo_1'), + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache doesn't pay attention to headers not listed in Vary", + requests: [ + { + request_headers: [ + ["Foo", "1"], + ["Other", "2"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo"] + ], + }, + { + request_headers: [ + ["Foo", "1"], + ["Other", "3"] + ], + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache reuses two-way Vary response when request matches", + requests: [ + { + request_headers: [ + ["Foo", "1"], + ["Bar", "abc"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo, Bar"] + ] + }, + { + request_headers: [ + ["Foo", "1"], + ["Bar", "abc"] + ], + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache doesn't use two-way Vary response when request doesn't match", + requests: [ + { + request_headers: [ + ["Foo", "1"], + ["Bar", "abc"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo, Bar"] + ] + }, + { + request_headers: [ + ["Foo", "2"], + ["Bar", "abc"] + ], + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache doesn't use two-way Vary response when request omits variant header", + requests: [ + { + request_headers: [ + ["Foo", "1"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo, Bar"] + ] + }, + { + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache reuses three-way Vary response when request matches", + requests: [ + { + request_headers: [ + ["Foo", "1"], + ["Bar", "abc"], + ["Baz", "789"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo, Bar, Baz"] + ] + }, + { + request_headers: [ + ["Foo", "1"], + ["Bar", "abc"], + ["Baz", "789"] + ], + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache doesn't use three-way Vary response when request doesn't match", + requests: [ + { + request_headers: [ + ["Foo", "1"], + ["Bar", "abc"], + ["Baz", "789"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo, Bar, Baz"] + ] + }, + { + request_headers: [ + ["Foo", "2"], + ["Bar", "abc"], + ["Baz", "789"] + ], + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache doesn't use three-way Vary response when request doesn't match, regardless of header order", + requests: [ + { + request_headers: [ + ["Foo", "1"], + ["Bar", "abc4"], + ["Baz", "789"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo, Bar, Baz"] + ] + }, + { + request_headers: [ + ["Foo", "1"], + ["Bar", "abc"], + ["Baz", "789"] + ], + expected_type: "not_cached" + } + ] + }, + { + name: "HTTP cache uses three-way Vary response when both request and the original request omited a variant header", + requests: [ + { + request_headers: [ + ["Foo", "1"], + ["Baz", "789"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "Foo, Bar, Baz"] + ] + }, + { + request_headers: [ + ["Foo", "1"], + ["Baz", "789"] + ], + expected_type: "cached" + } + ] + }, + { + name: "HTTP cache doesn't use Vary response with a field value of '*'", + requests: [ + { + request_headers: [ + ["Foo", "1"], + ["Baz", "789"] + ], + response_headers: [ + ["Expires", 5000], + ["Last-Modified", -3000], + ["Vary", "*"] + ] + }, + { + request_headers: [ + ["*", "1"], + ["Baz", "789"] + ], + expected_type: "not_cached" + } + ] + } +]; +run_tests(tests); diff --git a/test/wpt/tests/fetch/images/canvas-remote-read-remote-image-redirect.html b/test/wpt/tests/fetch/images/canvas-remote-read-remote-image-redirect.html new file mode 100644 index 0000000..4a887f3 --- /dev/null +++ b/test/wpt/tests/fetch/images/canvas-remote-read-remote-image-redirect.html @@ -0,0 +1,28 @@ + + +Load a no-cors image from a same-origin URL that redirects to a cross-origin URL that redirects to the initial origin + + + + diff --git a/test/wpt/tests/fetch/metadata/META.yml b/test/wpt/tests/fetch/metadata/META.yml new file mode 100644 index 0000000..85f0a7d --- /dev/null +++ b/test/wpt/tests/fetch/metadata/META.yml @@ -0,0 +1,4 @@ +spec: https://w3c.github.io/webappsec-fetch-metadata/ +suggested_reviewers: + - mikewest + - iVanlIsh diff --git a/test/wpt/tests/fetch/metadata/README.md b/test/wpt/tests/fetch/metadata/README.md new file mode 100644 index 0000000..34864d4 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/README.md @@ -0,0 +1,9 @@ +Fetch Metadata Tests +==================== + +This directory contains tests related to the Fetch Metadata proposal: + +: Explainer +:: +: "Spec" +:: diff --git a/test/wpt/tests/fetch/metadata/audio-worklet.https.html b/test/wpt/tests/fetch/metadata/audio-worklet.https.html new file mode 100644 index 0000000..3b768ef --- /dev/null +++ b/test/wpt/tests/fetch/metadata/audio-worklet.https.html @@ -0,0 +1,20 @@ + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/embed.https.sub.tentative.html b/test/wpt/tests/fetch/metadata/embed.https.sub.tentative.html new file mode 100644 index 0000000..1900dbd --- /dev/null +++ b/test/wpt/tests/fetch/metadata/embed.https.sub.tentative.html @@ -0,0 +1,63 @@ + + + + + + + + + +

Relevant issue: +<embed> should support loading random HTML documents, like <object> +

+ + diff --git a/test/wpt/tests/fetch/metadata/fetch-preflight.https.sub.any.js b/test/wpt/tests/fetch/metadata/fetch-preflight.https.sub.any.js new file mode 100644 index 0000000..d524743 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/fetch-preflight.https.sub.any.js @@ -0,0 +1,29 @@ +// META: global=window,worker +// META: script=/fetch/metadata/resources/helper.js + +// Site +promise_test(t => { + return validate_expectations_custom_url("https://{{hosts[][www]}}:{{ports[https][0]}}/fetch/metadata/resources/echo-as-json.py", + { + mode: "cors", + headers: { 'x-test': 'testing' } + }, { + "site": "same-site", + "user": "", + "mode": "cors", + "dest": "empty" + }, "Same-site fetch with preflight"); +}, "Same-site fetch with preflight"); + +promise_test(t => { + return validate_expectations_custom_url("https://{{hosts[alt][www]}}:{{ports[https][0]}}/fetch/metadata/resources/echo-as-json.py", + { + mode: "cors", + headers: { 'x-test': 'testing' } + }, { + "site": "cross-site", + "user": "", + "mode": "cors", + "dest": "empty" + }, "Cross-site fetch with preflight"); +}, "Cross-site fetch with preflight"); diff --git a/test/wpt/tests/fetch/metadata/fetch.https.sub.any.js b/test/wpt/tests/fetch/metadata/fetch.https.sub.any.js new file mode 100644 index 0000000..aeec5cd --- /dev/null +++ b/test/wpt/tests/fetch/metadata/fetch.https.sub.any.js @@ -0,0 +1,58 @@ +// META: global=window,worker +// META: script=/fetch/metadata/resources/helper.js + +// Site +promise_test(t => { + return validate_expectations_custom_url("https://{{host}}:{{ports[https][0]}}/fetch/metadata/resources/echo-as-json.py", {}, { + "site": "same-origin", + "user": "", + "mode": "cors", + "dest": "empty" + }, "Same-origin fetch"); +}, "Same-origin fetch"); + +promise_test(t => { + return validate_expectations_custom_url("https://{{hosts[][www]}}:{{ports[https][0]}}/fetch/metadata/resources/echo-as-json.py", {}, { + "site": "same-site", + "user": "", + "mode": "cors", + "dest": "empty" + }, "Same-site fetch"); +}, "Same-site fetch"); + +promise_test(t => { + return validate_expectations_custom_url("https://{{hosts[alt][www]}}:{{ports[https][0]}}/fetch/metadata/resources/echo-as-json.py", {}, { + "site": "cross-site", + "user": "", + "mode": "cors", + "dest": "empty" + }, "Cross-site fetch"); +}, "Cross-site fetch"); + +// Mode +promise_test(t => { + return validate_expectations_custom_url("https://{{host}}:{{ports[https][0]}}/fetch/metadata/resources/echo-as-json.py", {mode: "same-origin"}, { + "site": "same-origin", + "user": "", + "mode": "same-origin", + "dest": "empty" + }, "Same-origin mode"); +}, "Same-origin mode"); + +promise_test(t => { + return validate_expectations_custom_url("https://{{host}}:{{ports[https][0]}}/fetch/metadata/resources/echo-as-json.py", {mode: "cors"}, { + "site": "same-origin", + "user": "", + "mode": "cors", + "dest": "empty" + }, "CORS mode"); +}, "CORS mode"); + +promise_test(t => { + return validate_expectations_custom_url("https://{{host}}:{{ports[https][0]}}/fetch/metadata/resources/echo-as-json.py", {mode: "no-cors"}, { + "site": "same-origin", + "user": "", + "mode": "no-cors", + "dest": "empty" + }, "no-CORS mode"); +}, "no-CORS mode"); diff --git a/test/wpt/tests/fetch/metadata/generated/appcache-manifest.https.sub.html b/test/wpt/tests/fetch/metadata/generated/appcache-manifest.https.sub.html new file mode 100644 index 0000000..cf322fd --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/appcache-manifest.https.sub.html @@ -0,0 +1,341 @@ + + + + + HTTP headers on request for Appcache manifest + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/audioworklet.https.sub.html b/test/wpt/tests/fetch/metadata/generated/audioworklet.https.sub.html new file mode 100644 index 0000000..64fb760 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/audioworklet.https.sub.html @@ -0,0 +1,271 @@ + + + + + HTTP headers on request for AudioWorklet module + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/css-font-face.https.sub.tentative.html b/test/wpt/tests/fetch/metadata/generated/css-font-face.https.sub.tentative.html new file mode 100644 index 0000000..332effe --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/css-font-face.https.sub.tentative.html @@ -0,0 +1,230 @@ + + + + + HTTP headers on request for CSS font-face + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/css-font-face.sub.tentative.html b/test/wpt/tests/fetch/metadata/generated/css-font-face.sub.tentative.html new file mode 100644 index 0000000..8a0b90c --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/css-font-face.sub.tentative.html @@ -0,0 +1,196 @@ + + + + + HTTP headers on request for CSS font-face + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/css-images.https.sub.tentative.html b/test/wpt/tests/fetch/metadata/generated/css-images.https.sub.tentative.html new file mode 100644 index 0000000..3fa2401 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/css-images.https.sub.tentative.html @@ -0,0 +1,1384 @@ + + + + + + HTTP headers on request for CSS image-accepting properties + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/css-images.sub.tentative.html b/test/wpt/tests/fetch/metadata/generated/css-images.sub.tentative.html new file mode 100644 index 0000000..f1ef27c --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/css-images.sub.tentative.html @@ -0,0 +1,1099 @@ + + + + + + HTTP headers on request for CSS image-accepting properties + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-a.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-a.https.sub.html new file mode 100644 index 0000000..dffd36c --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-a.https.sub.html @@ -0,0 +1,482 @@ + + + + + + HTTP headers on request for HTML "a" element navigation + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-a.sub.html b/test/wpt/tests/fetch/metadata/generated/element-a.sub.html new file mode 100644 index 0000000..0661de3 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-a.sub.html @@ -0,0 +1,342 @@ + + + + + + HTTP headers on request for HTML "a" element navigation + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-area.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-area.https.sub.html new file mode 100644 index 0000000..be3f5f9 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-area.https.sub.html @@ -0,0 +1,482 @@ + + + + + + HTTP headers on request for HTML "area" element navigation + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-area.sub.html b/test/wpt/tests/fetch/metadata/generated/element-area.sub.html new file mode 100644 index 0000000..5f5c338 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-area.sub.html @@ -0,0 +1,342 @@ + + + + + + HTTP headers on request for HTML "area" element navigation + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-audio.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-audio.https.sub.html new file mode 100644 index 0000000..a9d9512 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-audio.https.sub.html @@ -0,0 +1,325 @@ + + + + + HTTP headers on request for HTML "audio" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-audio.sub.html b/test/wpt/tests/fetch/metadata/generated/element-audio.sub.html new file mode 100644 index 0000000..2b62632 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-audio.sub.html @@ -0,0 +1,229 @@ + + + + + HTTP headers on request for HTML "audio" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-embed.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-embed.https.sub.html new file mode 100644 index 0000000..819bed8 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-embed.https.sub.html @@ -0,0 +1,224 @@ + + + + + HTTP headers on request for HTML "embed" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-embed.sub.html b/test/wpt/tests/fetch/metadata/generated/element-embed.sub.html new file mode 100644 index 0000000..b6e14a5 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-embed.sub.html @@ -0,0 +1,190 @@ + + + + + HTTP headers on request for HTML "embed" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-frame.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-frame.https.sub.html new file mode 100644 index 0000000..17504ff --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-frame.https.sub.html @@ -0,0 +1,309 @@ + + + + + HTTP headers on request for HTML "frame" element source + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-frame.sub.html b/test/wpt/tests/fetch/metadata/generated/element-frame.sub.html new file mode 100644 index 0000000..2d9a7ec --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-frame.sub.html @@ -0,0 +1,250 @@ + + + + + HTTP headers on request for HTML "frame" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-iframe.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-iframe.https.sub.html new file mode 100644 index 0000000..fba1c8b --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-iframe.https.sub.html @@ -0,0 +1,309 @@ + + + + + HTTP headers on request for HTML "frame" element source + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-iframe.sub.html b/test/wpt/tests/fetch/metadata/generated/element-iframe.sub.html new file mode 100644 index 0000000..6f71cc0 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-iframe.sub.html @@ -0,0 +1,250 @@ + + + + + HTTP headers on request for HTML "frame" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-img-environment-change.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-img-environment-change.https.sub.html new file mode 100644 index 0000000..a19aa11 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-img-environment-change.https.sub.html @@ -0,0 +1,357 @@ + + + + + HTTP headers on image request triggered by change to environment + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-img-environment-change.sub.html b/test/wpt/tests/fetch/metadata/generated/element-img-environment-change.sub.html new file mode 100644 index 0000000..9665872 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-img-environment-change.sub.html @@ -0,0 +1,270 @@ + + + + + HTTP headers on image request triggered by change to environment + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-img.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-img.https.sub.html new file mode 100644 index 0000000..51d6e08 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-img.https.sub.html @@ -0,0 +1,645 @@ + + + + + HTTP headers on request for HTML "img" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-img.sub.html b/test/wpt/tests/fetch/metadata/generated/element-img.sub.html new file mode 100644 index 0000000..5a4b152 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-img.sub.html @@ -0,0 +1,456 @@ + + + + + HTTP headers on request for HTML "img" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-input-image.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-input-image.https.sub.html new file mode 100644 index 0000000..7fa6740 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-input-image.https.sub.html @@ -0,0 +1,229 @@ + + + + + HTTP headers on request for HTML "input" element with type="button" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-input-image.sub.html b/test/wpt/tests/fetch/metadata/generated/element-input-image.sub.html new file mode 100644 index 0000000..fb2a146 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-input-image.sub.html @@ -0,0 +1,184 @@ + + + + + HTTP headers on request for HTML "input" element with type="button" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-link-icon.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-link-icon.https.sub.html new file mode 100644 index 0000000..b244960 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-link-icon.https.sub.html @@ -0,0 +1,371 @@ + + + + + + HTTP headers on request for HTML "link" element with rel="icon" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-link-icon.sub.html b/test/wpt/tests/fetch/metadata/generated/element-link-icon.sub.html new file mode 100644 index 0000000..e9226c1 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-link-icon.sub.html @@ -0,0 +1,279 @@ + + + + + + HTTP headers on request for HTML "link" element with rel="icon" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-link-prefetch.https.optional.sub.html b/test/wpt/tests/fetch/metadata/generated/element-link-prefetch.https.optional.sub.html new file mode 100644 index 0000000..bdd684a --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-link-prefetch.https.optional.sub.html @@ -0,0 +1,559 @@ + + + + + + HTTP headers on request for HTML "link" element with rel="prefetch" + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-link-prefetch.optional.sub.html b/test/wpt/tests/fetch/metadata/generated/element-link-prefetch.optional.sub.html new file mode 100644 index 0000000..c224488 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-link-prefetch.optional.sub.html @@ -0,0 +1,275 @@ + + + + + + HTTP headers on request for HTML "link" element with rel="prefetch" + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-meta-refresh.https.optional.sub.html b/test/wpt/tests/fetch/metadata/generated/element-meta-refresh.https.optional.sub.html new file mode 100644 index 0000000..3a1a8eb --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-meta-refresh.https.optional.sub.html @@ -0,0 +1,276 @@ + + + + + HTTP headers on request for HTML "meta" element with http-equiv="refresh" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-meta-refresh.optional.sub.html b/test/wpt/tests/fetch/metadata/generated/element-meta-refresh.optional.sub.html new file mode 100644 index 0000000..df3e92e --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-meta-refresh.optional.sub.html @@ -0,0 +1,225 @@ + + + + + HTTP headers on request for HTML "meta" element with http-equiv="refresh" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-picture.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-picture.https.sub.html new file mode 100644 index 0000000..ba6636a --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-picture.https.sub.html @@ -0,0 +1,997 @@ + + + + + HTTP headers on request for HTML "picture" element source + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-picture.sub.html b/test/wpt/tests/fetch/metadata/generated/element-picture.sub.html new file mode 100644 index 0000000..64f851c --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-picture.sub.html @@ -0,0 +1,721 @@ + + + + + HTTP headers on request for HTML "picture" element source + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-script.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-script.https.sub.html new file mode 100644 index 0000000..dcdcba2 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-script.https.sub.html @@ -0,0 +1,593 @@ + + + + + HTTP headers on request for HTML "script" element source + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-script.sub.html b/test/wpt/tests/fetch/metadata/generated/element-script.sub.html new file mode 100644 index 0000000..a252669 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-script.sub.html @@ -0,0 +1,488 @@ + + + + + HTTP headers on request for HTML "script" element source + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-video-poster.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-video-poster.https.sub.html new file mode 100644 index 0000000..5805b46 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-video-poster.https.sub.html @@ -0,0 +1,243 @@ + + + + + HTTP headers on request for HTML "video" element "poster" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-video-poster.sub.html b/test/wpt/tests/fetch/metadata/generated/element-video-poster.sub.html new file mode 100644 index 0000000..e6cc5ee --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-video-poster.sub.html @@ -0,0 +1,198 @@ + + + + + HTTP headers on request for HTML "video" element "poster" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-video.https.sub.html b/test/wpt/tests/fetch/metadata/generated/element-video.https.sub.html new file mode 100644 index 0000000..971360d --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-video.https.sub.html @@ -0,0 +1,325 @@ + + + + + HTTP headers on request for HTML "video" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/element-video.sub.html b/test/wpt/tests/fetch/metadata/generated/element-video.sub.html new file mode 100644 index 0000000..9707413 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/element-video.sub.html @@ -0,0 +1,229 @@ + + + + + HTTP headers on request for HTML "video" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/fetch-via-serviceworker.https.sub.html b/test/wpt/tests/fetch/metadata/generated/fetch-via-serviceworker.https.sub.html new file mode 100644 index 0000000..22f9309 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/fetch-via-serviceworker.https.sub.html @@ -0,0 +1,683 @@ + + + + + + HTTP headers on request using the "fetch" API and passing through a Serive Worker + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/fetch.https.sub.html b/test/wpt/tests/fetch/metadata/generated/fetch.https.sub.html new file mode 100644 index 0000000..dde1dae --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/fetch.https.sub.html @@ -0,0 +1,302 @@ + + + + + HTTP headers on request using the "fetch" API + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/fetch.sub.html b/test/wpt/tests/fetch/metadata/generated/fetch.sub.html new file mode 100644 index 0000000..d28ea9b --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/fetch.sub.html @@ -0,0 +1,220 @@ + + + + + HTTP headers on request using the "fetch" API + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/form-submission.https.sub.html b/test/wpt/tests/fetch/metadata/generated/form-submission.https.sub.html new file mode 100644 index 0000000..988b07c --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/form-submission.https.sub.html @@ -0,0 +1,522 @@ + + + + + + HTTP headers on request for HTML form navigation + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/form-submission.sub.html b/test/wpt/tests/fetch/metadata/generated/form-submission.sub.html new file mode 100644 index 0000000..f862062 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/form-submission.sub.html @@ -0,0 +1,400 @@ + + + + + + HTTP headers on request for HTML form navigation + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/header-link.https.sub.html b/test/wpt/tests/fetch/metadata/generated/header-link.https.sub.html new file mode 100644 index 0000000..09f0113 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/header-link.https.sub.html @@ -0,0 +1,529 @@ + + + + + HTTP headers on request for HTTP "Link" header + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/header-link.https.sub.tentative.html b/test/wpt/tests/fetch/metadata/generated/header-link.https.sub.tentative.html new file mode 100644 index 0000000..307c37f --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/header-link.https.sub.tentative.html @@ -0,0 +1,51 @@ + + + + + HTTP headers on request for HTTP "Link" header + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/header-link.sub.html b/test/wpt/tests/fetch/metadata/generated/header-link.sub.html new file mode 100644 index 0000000..8b6cdae --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/header-link.sub.html @@ -0,0 +1,460 @@ + + + + + HTTP headers on request for HTTP "Link" header + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/header-refresh.https.optional.sub.html b/test/wpt/tests/fetch/metadata/generated/header-refresh.https.optional.sub.html new file mode 100644 index 0000000..e63ee42 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/header-refresh.https.optional.sub.html @@ -0,0 +1,273 @@ + + + + + + HTTP headers on request for HTTP "Refresh" header + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/header-refresh.optional.sub.html b/test/wpt/tests/fetch/metadata/generated/header-refresh.optional.sub.html new file mode 100644 index 0000000..4674ada --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/header-refresh.optional.sub.html @@ -0,0 +1,222 @@ + + + + + + HTTP headers on request for HTTP "Refresh" header + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/script-module-import-dynamic.https.sub.html b/test/wpt/tests/fetch/metadata/generated/script-module-import-dynamic.https.sub.html new file mode 100644 index 0000000..72d60fc --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/script-module-import-dynamic.https.sub.html @@ -0,0 +1,254 @@ + + + + + HTTP headers on request for dynamic ECMAScript module import + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/script-module-import-dynamic.sub.html b/test/wpt/tests/fetch/metadata/generated/script-module-import-dynamic.sub.html new file mode 100644 index 0000000..088720c --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/script-module-import-dynamic.sub.html @@ -0,0 +1,214 @@ + + + + + HTTP headers on request for dynamic ECMAScript module import + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/script-module-import-static.https.sub.html b/test/wpt/tests/fetch/metadata/generated/script-module-import-static.https.sub.html new file mode 100644 index 0000000..cea3464 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/script-module-import-static.https.sub.html @@ -0,0 +1,288 @@ + + + + + HTTP headers on request for static ECMAScript module import + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/script-module-import-static.sub.html b/test/wpt/tests/fetch/metadata/generated/script-module-import-static.sub.html new file mode 100644 index 0000000..0f94f71 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/script-module-import-static.sub.html @@ -0,0 +1,246 @@ + + + + + HTTP headers on request for static ECMAScript module import + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/serviceworker.https.sub.html b/test/wpt/tests/fetch/metadata/generated/serviceworker.https.sub.html new file mode 100644 index 0000000..12e3736 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/serviceworker.https.sub.html @@ -0,0 +1,170 @@ + + + + + + HTTP headers on request for Service Workers + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/svg-image.https.sub.html b/test/wpt/tests/fetch/metadata/generated/svg-image.https.sub.html new file mode 100644 index 0000000..b059eb3 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/svg-image.https.sub.html @@ -0,0 +1,367 @@ + + + + + + HTTP headers on request for SVG "image" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/svg-image.sub.html b/test/wpt/tests/fetch/metadata/generated/svg-image.sub.html new file mode 100644 index 0000000..a28bbb1 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/svg-image.sub.html @@ -0,0 +1,265 @@ + + + + + + HTTP headers on request for SVG "image" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/window-history.https.sub.html b/test/wpt/tests/fetch/metadata/generated/window-history.https.sub.html new file mode 100644 index 0000000..c2b3079 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/window-history.https.sub.html @@ -0,0 +1,237 @@ + + + + + HTTP headers on request for navigation via the HTML History API + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/window-history.sub.html b/test/wpt/tests/fetch/metadata/generated/window-history.sub.html new file mode 100644 index 0000000..333d90c --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/window-history.sub.html @@ -0,0 +1,360 @@ + + + + + + HTTP headers on request for navigation via the HTML History API + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/window-location.https.sub.html b/test/wpt/tests/fetch/metadata/generated/window-location.https.sub.html new file mode 100644 index 0000000..4a0d2fd --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/window-location.https.sub.html @@ -0,0 +1,1184 @@ + + + + + + HTTP headers on request for navigation via the HTML Location API + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/window-location.sub.html b/test/wpt/tests/fetch/metadata/generated/window-location.sub.html new file mode 100644 index 0000000..bb3e680 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/window-location.sub.html @@ -0,0 +1,894 @@ + + + + + + HTTP headers on request for navigation via the HTML Location API + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/worker-dedicated-constructor.https.sub.html b/test/wpt/tests/fetch/metadata/generated/worker-dedicated-constructor.https.sub.html new file mode 100644 index 0000000..86f1760 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/worker-dedicated-constructor.https.sub.html @@ -0,0 +1,118 @@ + + + + + HTTP headers on request for dedicated worker via the "Worker" constructor + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/worker-dedicated-constructor.sub.html b/test/wpt/tests/fetch/metadata/generated/worker-dedicated-constructor.sub.html new file mode 100644 index 0000000..69ac768 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/worker-dedicated-constructor.sub.html @@ -0,0 +1,204 @@ + + + + + HTTP headers on request for dedicated worker via the "Worker" constructor + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/worker-dedicated-importscripts.https.sub.html b/test/wpt/tests/fetch/metadata/generated/worker-dedicated-importscripts.https.sub.html new file mode 100644 index 0000000..0cd9f35 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/worker-dedicated-importscripts.https.sub.html @@ -0,0 +1,268 @@ + + + + + HTTP headers on request for dedicated worker via the "importScripts" API + + + + + diff --git a/test/wpt/tests/fetch/metadata/generated/worker-dedicated-importscripts.sub.html b/test/wpt/tests/fetch/metadata/generated/worker-dedicated-importscripts.sub.html new file mode 100644 index 0000000..0555bba --- /dev/null +++ b/test/wpt/tests/fetch/metadata/generated/worker-dedicated-importscripts.sub.html @@ -0,0 +1,228 @@ + + + + + HTTP headers on request for dedicated worker via the "importScripts" API + + + + + diff --git a/test/wpt/tests/fetch/metadata/navigation.https.sub.html b/test/wpt/tests/fetch/metadata/navigation.https.sub.html new file mode 100644 index 0000000..32c9cf7 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/navigation.https.sub.html @@ -0,0 +1,23 @@ + + + + + + diff --git a/test/wpt/tests/fetch/metadata/object.https.sub.html b/test/wpt/tests/fetch/metadata/object.https.sub.html new file mode 100644 index 0000000..fae5b37 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/object.https.sub.html @@ -0,0 +1,62 @@ + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/paint-worklet.https.html b/test/wpt/tests/fetch/metadata/paint-worklet.https.html new file mode 100644 index 0000000..49fc776 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/paint-worklet.https.html @@ -0,0 +1,19 @@ + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/portal.https.sub.html b/test/wpt/tests/fetch/metadata/portal.https.sub.html new file mode 100644 index 0000000..55b555a --- /dev/null +++ b/test/wpt/tests/fetch/metadata/portal.https.sub.html @@ -0,0 +1,50 @@ + + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/preload.https.sub.html b/test/wpt/tests/fetch/metadata/preload.https.sub.html new file mode 100644 index 0000000..29042a8 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/preload.https.sub.html @@ -0,0 +1,50 @@ + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/redirect/multiple-redirect-https-downgrade-upgrade.sub.html b/test/wpt/tests/fetch/metadata/redirect/multiple-redirect-https-downgrade-upgrade.sub.html new file mode 100644 index 0000000..0f8f320 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/redirect/multiple-redirect-https-downgrade-upgrade.sub.html @@ -0,0 +1,18 @@ + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/redirect/redirect-http-upgrade.sub.html b/test/wpt/tests/fetch/metadata/redirect/redirect-http-upgrade.sub.html new file mode 100644 index 0000000..fa765b6 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/redirect/redirect-http-upgrade.sub.html @@ -0,0 +1,17 @@ + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/redirect/redirect-https-downgrade.sub.html b/test/wpt/tests/fetch/metadata/redirect/redirect-https-downgrade.sub.html new file mode 100644 index 0000000..4e5a48e --- /dev/null +++ b/test/wpt/tests/fetch/metadata/redirect/redirect-https-downgrade.sub.html @@ -0,0 +1,17 @@ + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/report.https.sub.html b/test/wpt/tests/fetch/metadata/report.https.sub.html new file mode 100644 index 0000000..b65f7c0 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/report.https.sub.html @@ -0,0 +1,33 @@ + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/report.https.sub.html.sub.headers b/test/wpt/tests/fetch/metadata/report.https.sub.html.sub.headers new file mode 100644 index 0000000..1ec5df7 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/report.https.sub.html.sub.headers @@ -0,0 +1,3 @@ +Content-Security-Policy: style-src 'self' 'unsafe-inline'; report-uri /fetch/metadata/resources/record-header.py?file=report-same-origin +Content-Security-Policy: style-src 'self' 'unsafe-inline'; report-uri https://{{hosts[][www]}}:{{ports[https][0]}}/fetch/metadata/resources/record-header.py?file=report-same-site +Content-Security-Policy: style-src 'self' 'unsafe-inline'; report-uri https://{{hosts[alt][www]}}:{{ports[https][0]}}/fetch/metadata/resources/record-header.py?file=report-cross-site diff --git a/test/wpt/tests/fetch/metadata/resources/appcache-iframe.sub.html b/test/wpt/tests/fetch/metadata/resources/appcache-iframe.sub.html new file mode 100644 index 0000000..cea9a4f --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/appcache-iframe.sub.html @@ -0,0 +1,15 @@ + + + + diff --git a/test/wpt/tests/fetch/metadata/resources/dedicatedWorker.js b/test/wpt/tests/fetch/metadata/resources/dedicatedWorker.js new file mode 100644 index 0000000..18626d3 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/dedicatedWorker.js @@ -0,0 +1 @@ +self.postMessage("Loaded"); diff --git a/test/wpt/tests/fetch/metadata/resources/echo-as-json.py b/test/wpt/tests/fetch/metadata/resources/echo-as-json.py new file mode 100644 index 0000000..44f68e8 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/echo-as-json.py @@ -0,0 +1,29 @@ +import json + +from wptserve.utils import isomorphic_decode + +def main(request, response): + headers = [(b"Content-Type", b"application/json"), + (b"Access-Control-Allow-Credentials", b"true")] + + if b"origin" in request.headers: + headers.append((b"Access-Control-Allow-Origin", request.headers[b"origin"])) + + body = u"" + + # If we're in a preflight, verify that `Sec-Fetch-Mode` is `cors`. + if request.method == u'OPTIONS': + if request.headers.get(b"sec-fetch-mode") != b"cors": + return (403, b"Failed"), [], body + + headers.append((b"Access-Control-Allow-Methods", b"*")) + headers.append((b"Access-Control-Allow-Headers", b"*")) + else: + body = json.dumps({ + u"dest": isomorphic_decode(request.headers.get(b"sec-fetch-dest", b"")), + u"mode": isomorphic_decode(request.headers.get(b"sec-fetch-mode", b"")), + u"site": isomorphic_decode(request.headers.get(b"sec-fetch-site", b"")), + u"user": isomorphic_decode(request.headers.get(b"sec-fetch-user", b"")), + }) + + return headers, body diff --git a/test/wpt/tests/fetch/metadata/resources/echo-as-script.py b/test/wpt/tests/fetch/metadata/resources/echo-as-script.py new file mode 100644 index 0000000..1e7bc91 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/echo-as-script.py @@ -0,0 +1,14 @@ +import json + +from wptserve.utils import isomorphic_decode + +def main(request, response): + headers = [(b"Content-Type", b"text/javascript")] + body = u"var header = %s;" % json.dumps({ + u"dest": isomorphic_decode(request.headers.get(b"sec-fetch-dest", b"")), + u"mode": isomorphic_decode(request.headers.get(b"sec-fetch-mode", b"")), + u"site": isomorphic_decode(request.headers.get(b"sec-fetch-site", b"")), + u"user": isomorphic_decode(request.headers.get(b"sec-fetch-user", b"")), + }) + + return headers, body diff --git a/test/wpt/tests/fetch/metadata/resources/es-module.sub.js b/test/wpt/tests/fetch/metadata/resources/es-module.sub.js new file mode 100644 index 0000000..f9668a3 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/es-module.sub.js @@ -0,0 +1 @@ +import '{{GET[moduleId]}}'; diff --git a/test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker--fallback--sw.js b/test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker--fallback--sw.js new file mode 100644 index 0000000..09858b2 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker--fallback--sw.js @@ -0,0 +1,3 @@ +self.addEventListener('fetch', function(event) { + // Empty event handler - will fallback to the network. +}); diff --git a/test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker--respondWith--sw.js b/test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker--respondWith--sw.js new file mode 100644 index 0000000..8bf8d8f --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker--respondWith--sw.js @@ -0,0 +1,3 @@ +self.addEventListener('fetch', function(event) { + event.respondWith(fetch(event.request)); +}); diff --git a/test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker-frame.html b/test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker-frame.html new file mode 100644 index 0000000..9879802 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/fetch-via-serviceworker-frame.html @@ -0,0 +1,3 @@ + + +Page Title diff --git a/test/wpt/tests/fetch/metadata/resources/header-link.py b/test/wpt/tests/fetch/metadata/resources/header-link.py new file mode 100644 index 0000000..de89116 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/header-link.py @@ -0,0 +1,15 @@ +def main(request, response): + """ + Respond with a blank HTML document and a `Link` header which describes + a link relation specified by the requests `location` and `rel` query string + parameters + """ + headers = [ + (b'Content-Type', b'text/html'), + ( + b'Link', + b'<' + request.GET.first(b'location') + b'>; rel=' + request.GET.first(b'rel') + ) + ] + return (200, headers, b'') + diff --git a/test/wpt/tests/fetch/metadata/resources/helper.js b/test/wpt/tests/fetch/metadata/resources/helper.js new file mode 100644 index 0000000..725f9a7 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/helper.js @@ -0,0 +1,42 @@ +function validate_expectations(key, expected, tag) { + return fetch("/fetch/metadata/resources/record-header.py?retrieve=true&file=" + key) + .then(response => response.text()) + .then(text => { + assert_not_equals(text, "No header has been recorded"); + let value = JSON.parse(text); + test(t => assert_equals(value.dest, expected.dest), `${tag}: sec-fetch-dest`); + test(t => assert_equals(value.mode, expected.mode), `${tag}: sec-fetch-mode`); + test(t => assert_equals(value.site, expected.site), `${tag}: sec-fetch-site`); + test(t => assert_equals(value.user, expected.user), `${tag}: sec-fetch-user`); + }); +} + +function validate_expectations_custom_url(url, header, expected, tag) { + return fetch(url, header) + .then(response => response.text()) + .then(text => { + assert_not_equals(text, "No header has been recorded"); + let value = JSON.parse(text); + test(t => assert_equals(value.dest, expected.dest), `${tag}: sec-fetch-dest`); + test(t => assert_equals(value.mode, expected.mode), `${tag}: sec-fetch-mode`); + test(t => assert_equals(value.site, expected.site), `${tag}: sec-fetch-site`); + test(t => assert_equals(value.user, expected.user), `${tag}: sec-fetch-user`); + }); +} + +/** + * @param {object} value + * @param {object} expected + * @param {string} tag + **/ +function assert_header_equals(value, expected, tag) { + if (typeof(value) === "string"){ + assert_not_equals(value, "No header has been recorded"); + value = JSON.parse(value); + } + + test(t => assert_equals(value.dest, expected.dest), `${tag}: sec-fetch-dest`); + test(t => assert_equals(value.mode, expected.mode), `${tag}: sec-fetch-mode`); + test(t => assert_equals(value.site, expected.site), `${tag}: sec-fetch-site`); + test(t => assert_equals(value.user, expected.user), `${tag}: sec-fetch-user`); +} diff --git a/test/wpt/tests/fetch/metadata/resources/helper.sub.js b/test/wpt/tests/fetch/metadata/resources/helper.sub.js new file mode 100644 index 0000000..fd179fe --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/helper.sub.js @@ -0,0 +1,67 @@ +'use strict'; + +/** + * Construct a URL which, when followed, will trigger redirection through zero + * or more specified origins and ultimately resolve in the Python handler + * `record-headers.py`. + * + * @param {string} key - the WPT server "stash" name where the request's + * headers should be stored + * @param {string[]} [origins] - zero or more origin names through which the + * request should pass; see the function + * implementation for a completel list of names + * and corresponding origins; If specified, the + * final origin will be used to access the + * `record-headers.py` hander. + * @param {object} [params] - a collection of key-value pairs to include as + * URL "search" parameters in the final request to + * `record-headers.py` + * + * @returns {string} an absolute URL + */ +function makeRequestURL(key, origins, params) { + const byName = { + httpOrigin: 'http://{{host}}:{{ports[http][0]}}', + httpSameSite: 'http://{{hosts[][www]}}:{{ports[http][0]}}', + httpCrossSite: 'http://{{hosts[alt][]}}:{{ports[http][0]}}', + httpsOrigin: 'https://{{host}}:{{ports[https][0]}}', + httpsSameSite: 'https://{{hosts[][www]}}:{{ports[https][0]}}', + httpsCrossSite: 'https://{{hosts[alt][]}}:{{ports[https][0]}}' + }; + const redirectPath = '/fetch/api/resources/redirect.py?location='; + const path = '/fetch/metadata/resources/record-headers.py?key=' + key; + + let requestUrl = path; + if (params) { + requestUrl += '&' + new URLSearchParams(params).toString(); + } + + if (origins && origins.length) { + requestUrl = byName[origins.pop()] + requestUrl; + + while (origins.length) { + requestUrl = byName[origins.pop()] + redirectPath + + encodeURIComponent(requestUrl); + } + } else { + requestUrl = byName.httpsOrigin + requestUrl; + } + + return requestUrl; +} + +function retrieve(key, options) { + return fetch('/fetch/metadata/resources/record-headers.py?retrieve&key=' + key) + .then((response) => { + if (response.status === 204 && options && options.poll) { + return new Promise((resolve) => setTimeout(resolve, 300)) + .then(() => retrieve(key, options)); + } + + if (response.status !== 200) { + throw new Error('Failed to query for recorded headers.'); + } + + return response.text().then((text) => JSON.parse(text)); + }); +} diff --git a/test/wpt/tests/fetch/metadata/resources/message-opener.html b/test/wpt/tests/fetch/metadata/resources/message-opener.html new file mode 100644 index 0000000..eb2af7b --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/message-opener.html @@ -0,0 +1,17 @@ + diff --git a/test/wpt/tests/fetch/metadata/resources/post-to-owner.py b/test/wpt/tests/fetch/metadata/resources/post-to-owner.py new file mode 100644 index 0000000..256dd6e --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/post-to-owner.py @@ -0,0 +1,36 @@ +import json + +from wptserve.utils import isomorphic_decode + +def main(request, response): + headers = [ + (b"Content-Type", b"text/html"), + (b"Cache-Control", b"no-cache, no-store, must-revalidate") + ] + key = request.GET.first(b"key", None) + + # We serialize the key into JSON, so have to decode it first. + if key is not None: + key = key.decode('utf-8') + + body = u""" + + + + """ % (json.dumps({ + u"dest": isomorphic_decode(request.headers.get(b"sec-fetch-dest", b"")), + u"mode": isomorphic_decode(request.headers.get(b"sec-fetch-mode", b"")), + u"site": isomorphic_decode(request.headers.get(b"sec-fetch-site", b"")), + u"user": isomorphic_decode(request.headers.get(b"sec-fetch-user", b"")), + }), json.dumps(key)) + return headers, body diff --git a/test/wpt/tests/fetch/metadata/resources/record-header.py b/test/wpt/tests/fetch/metadata/resources/record-header.py new file mode 100644 index 0000000..29ff2ed --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/record-header.py @@ -0,0 +1,145 @@ +import os +import hashlib +import json + +from wptserve.utils import isomorphic_decode + +def main(request, response): + ## Get the query parameter (key) from URL ## + ## Tests will record POST requests (CSP Report) and GET (rest) ## + if request.GET: + key = request.GET[b'file'] + elif request.POST: + key = request.POST[b'file'] + + ## Convert the key from String to UUID valid String ## + testId = hashlib.md5(key).hexdigest() + + ## Handle the header retrieval request ## + if b'retrieve' in request.GET: + response.writer.write_status(200) + response.writer.write_header(b"Connection", b"close") + response.writer.end_headers() + try: + header_value = request.server.stash.take(testId) + response.writer.write(header_value) + except (KeyError, ValueError) as e: + response.writer.write(u"No header has been recorded") + pass + + response.close_connection = True + + ## Record incoming fetch metadata header value + else: + try: + ## Return a serialized JSON object with one member per header. If the ## + ## header isn't present, the member will contain an empty string. ## + header = json.dumps({ + u"dest": isomorphic_decode(request.headers.get(b"sec-fetch-dest", b"")), + u"mode": isomorphic_decode(request.headers.get(b"sec-fetch-mode", b"")), + u"site": isomorphic_decode(request.headers.get(b"sec-fetch-site", b"")), + u"user": isomorphic_decode(request.headers.get(b"sec-fetch-user", b"")), + }) + request.server.stash.put(testId, header) + except KeyError: + ## The header is already recorded or it doesn't exist + pass + + ## Prevent the browser from caching returned responses and allow CORS ## + response.headers.set(b"Access-Control-Allow-Origin", b"*") + response.headers.set(b"Cache-Control", b"no-cache, no-store, must-revalidate") + response.headers.set(b"Pragma", b"no-cache") + response.headers.set(b"Expires", b"0") + + ## Add a valid ServiceWorker Content-Type ## + if key.startswith(b"serviceworker"): + response.headers.set(b"Content-Type", b"application/javascript") + + ## Add a valid image Content-Type ## + if key.startswith(b"image"): + response.headers.set(b"Content-Type", b"image/png") + file = open(os.path.join(request.doc_root, u"media", u"1x1-green.png"), u"rb") + image = file.read() + file.close() + return image + + ## Return a valid .vtt content for the tag ## + if key.startswith(b"track"): + return b"WEBVTT" + + ## Return a valid SharedWorker ## + if key.startswith(b"sharedworker"): + response.headers.set(b"Content-Type", b"application/javascript") + file = open(os.path.join(request.doc_root, u"fetch", u"metadata", + u"resources", u"sharedWorker.js"), u"rb") + shared_worker = file.read() + file.close() + return shared_worker + + ## Return a valid font content and Content-Type ## + if key.startswith(b"font"): + response.headers.set(b"Content-Type", b"application/x-font-ttf") + file = open(os.path.join(request.doc_root, u"fonts", u"Ahem.ttf"), u"rb") + font = file.read() + file.close() + return font + + ## Return a valid audio content and Content-Type ## + if key.startswith(b"audio"): + response.headers.set(b"Content-Type", b"audio/mpeg") + file = open(os.path.join(request.doc_root, u"media", u"sound_5.mp3"), u"rb") + audio = file.read() + file.close() + return audio + + ## Return a valid video content and Content-Type ## + if key.startswith(b"video"): + response.headers.set(b"Content-Type", b"video/mp4") + file = open(os.path.join(request.doc_root, u"media", u"A4.mp4"), u"rb") + video = file.read() + file.close() + return video + + ## Return valid style content and Content-Type ## + if key.startswith(b"style"): + response.headers.set(b"Content-Type", b"text/css") + return b"div { }" + + ## Return a valid embed/object content and Content-Type ## + if key.startswith(b"embed") or key.startswith(b"object"): + response.headers.set(b"Content-Type", b"text/html") + return b"EMBED!" + + ## Return a valid image content and Content-Type for redirect requests ## + if key.startswith(b"redirect"): + response.headers.set(b"Content-Type", b"image/jpeg") + file = open(os.path.join(request.doc_root, u"media", u"1x1-green.png"), u"rb") + image = file.read() + file.close() + return image + + ## Return a valid dedicated worker + if key.startswith(b"worker"): + response.headers.set(b"Content-Type", b"application/javascript") + return b"self.postMessage('loaded');" + + ## Return a valid worklet + if key.startswith(b"worklet"): + response.headers.set(b"Content-Type", b"application/javascript") + return b"" + + ## Return a valid XSLT + if key.startswith(b"xslt"): + response.headers.set(b"Content-Type", b"text/xsl") + return b""" + + + + + + +""" + + if key.startswith(b"script"): + response.headers.set(b"Content-Type", b"application/javascript") + return b"void 0;" diff --git a/test/wpt/tests/fetch/metadata/resources/record-headers.py b/test/wpt/tests/fetch/metadata/resources/record-headers.py new file mode 100644 index 0000000..0362fe2 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/record-headers.py @@ -0,0 +1,73 @@ +import os +import uuid +import hashlib +import time +import json + + +def bytes_to_strings(d): + # Recursively convert bytes to strings in `d`. + if not isinstance(d, dict): + if isinstance(d, (tuple,list,set)): + v = [bytes_to_strings(x) for x in d] + return v + else: + if isinstance(d, bytes): + d = d.decode() + return d + + result = {} + for k,v in d.items(): + if isinstance(k, bytes): + k = k.decode() + if isinstance(v, dict): + v = bytes_to_strings(v) + elif isinstance(v, (tuple,list,set)): + v = [bytes_to_strings(x) for x in v] + elif isinstance(v, bytes): + v = v.decode() + result[k] = v + return result + + +def main(request, response): + # This condition avoids false positives from CORS preflight checks, where the + # request under test may be followed immediately by a request to the same URL + # using a different HTTP method. + if b'requireOPTIONS' in request.GET and request.method != b'OPTIONS': + return + + if b'key' in request.GET: + key = request.GET[b'key'] + elif b'key' in request.POST: + key = request.POST[b'key'] + + ## Convert the key from String to UUID valid String ## + testId = hashlib.md5(key).hexdigest() + + ## Handle the header retrieval request ## + if b'retrieve' in request.GET: + recorded_headers = request.server.stash.take(testId) + + if recorded_headers is None: + return (204, [], b'') + + return (200, [], recorded_headers) + + ## Record incoming fetch metadata header value + else: + try: + request.server.stash.put(testId, json.dumps(bytes_to_strings(request.headers))) + except KeyError: + ## The header is already recorded or it doesn't exist + pass + + ## Prevent the browser from caching returned responses and allow CORS ## + response.headers.set(b"Access-Control-Allow-Origin", b"*") + response.headers.set(b"Cache-Control", b"no-cache, no-store, must-revalidate") + response.headers.set(b"Pragma", b"no-cache") + response.headers.set(b"Expires", b"0") + if b"mime" in request.GET: + response.headers.set(b"Content-Type", request.GET.first(b"mime")) + + return request.GET.first(b"body", request.POST.first(b"body", b"")) diff --git a/test/wpt/tests/fetch/metadata/resources/redirectTestHelper.sub.js b/test/wpt/tests/fetch/metadata/resources/redirectTestHelper.sub.js new file mode 100644 index 0000000..1bfbbae --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/redirectTestHelper.sub.js @@ -0,0 +1,167 @@ +function createVideoElement() { + let el = document.createElement('video'); + el.src = '/media/movie_5.mp4'; + el.setAttribute('controls', ''); + el.setAttribute('crossorigin', ''); + return el; +} + +function createTrack() { + let el = document.createElement('track'); + el.setAttribute('default', ''); + el.setAttribute('kind', 'captions'); + el.setAttribute('srclang', 'en'); + return el; +} + +let secureRedirectURL = 'https://{{host}}:{{ports[https][0]}}/fetch/api/resources/redirect.py?location='; +let insecureRedirectURL = 'http://{{host}}:{{ports[http][0]}}/fetch/api/resources/redirect.py?location='; +let secureTestURL = 'https://{{host}}:{{ports[https][0]}}/fetch/metadata/'; +let insecureTestURL = 'http://{{host}}:{{ports[http][0]}}/fetch/metadata/'; + +// Helper to craft an URL that will go from HTTPS => HTTP => HTTPS to +// simulate us downgrading then upgrading again during the same redirect chain. +function MultipleRedirectTo(partialPath) { + let finalURL = insecureRedirectURL + encodeURIComponent(secureTestURL + partialPath); + return secureRedirectURL + encodeURIComponent(finalURL); +} + +// Helper to craft an URL that will go from HTTP => HTTPS to simulate upgrading a +// given request. +function upgradeRedirectTo(partialPath) { + return insecureRedirectURL + encodeURIComponent(secureTestURL + partialPath); +} + +// Helper to craft an URL that will go from HTTPS => HTTP to simulate downgrading a +// given request. +function downgradeRedirectTo(partialPath) { + return secureRedirectURL + encodeURIComponent(insecureTestURL + partialPath); +} + +// Helper to run common redirect test cases that don't require special setup on +// the test page itself. +function RunCommonRedirectTests(testNamePrefix, urlHelperMethod, expectedResults) { + async_test(t => { + let testWindow = window.open(urlHelperMethod('resources/post-to-owner.py?top-level-navigation' + nonce)); + t.add_cleanup(_ => testWindow.close()); + window.addEventListener('message', t.step_func(e => { + if (e.source != testWindow) { + return; + } + + let expectation = { ...expectedResults }; + if (expectation['mode'] != '') + expectation['mode'] = 'navigate'; + if (expectation['dest'] == 'font') + expectation['dest'] = 'document'; + assert_header_equals(e.data, expectation, testNamePrefix + ' top level navigation'); + t.done(); + })); + }, testNamePrefix + ' top level navigation'); + + promise_test(t => { + return new Promise((resolve, reject) => { + let key = 'embed-https-redirect' + nonce; + let e = document.createElement('embed'); + e.src = urlHelperMethod('resources/record-header.py?file=' + key); + e.onload = e => { + let expectation = { ...expectedResults }; + if (expectation['mode'] != '') + expectation['mode'] = 'navigate'; + if (expectation['dest'] == 'font') + expectation['dest'] = 'embed'; + fetch('/fetch/metadata/resources/record-header.py?retrieve=true&file=' + key) + .then(response => response.text()) + .then(t.step_func(text => assert_header_equals(text, expectation, testNamePrefix + ' embed'))) + .then(resolve) + .catch(e => reject(e)); + }; + document.body.appendChild(e); + }); + }, testNamePrefix + ' embed'); + + promise_test(t => { + return new Promise((resolve, reject) => { + let key = 'object-https-redirect' + nonce; + let e = document.createElement('object'); + e.data = urlHelperMethod('resources/record-header.py?file=' + key); + e.onload = e => { + let expectation = { ...expectedResults }; + if (expectation['mode'] != '') + expectation['mode'] = 'navigate'; + if (expectation['dest'] == 'font') + expectation['dest'] = 'object'; + fetch('/fetch/metadata/resources/record-header.py?retrieve=true&file=' + key) + .then(response => response.text()) + .then(t.step_func(text => assert_header_equals(text, expectation, testNamePrefix + ' object'))) + .then(resolve) + .catch(e => reject(e)); + }; + document.body.appendChild(e); + }); + }, testNamePrefix + ' object'); + + if (document.createElement('link').relList.supports('preload')) { + async_test(t => { + let key = 'preload' + nonce; + let e = document.createElement('link'); + e.rel = 'preload'; + e.href = urlHelperMethod('resources/record-header.py?file=' + key); + e.setAttribute('as', 'track'); + e.onload = e.onerror = t.step_func_done(e => { + let expectation = { ...expectedResults }; + if (expectation['mode'] != '') + expectation['mode'] = 'cors'; + fetch('/fetch/metadata/resources/record-header.py?retrieve=true&file=' + key) + .then(t.step_func(response => response.text())) + .then(t.step_func_done(text => assert_header_equals(text, expectation, testNamePrefix + ' preload'))) + .catch(t.unreached_func()); + }); + document.head.appendChild(e); + }, testNamePrefix + ' preload'); + } + + promise_test(t => { + return new Promise((resolve, reject) => { + let key = 'style-https-redirect' + nonce; + let e = document.createElement('link'); + e.rel = 'stylesheet'; + e.href = urlHelperMethod('resources/record-header.py?file=' + key); + e.onload = e => { + let expectation = { ...expectedResults }; + if (expectation['mode'] != '') + expectation['mode'] = 'no-cors'; + if (expectation['dest'] == 'font') + expectation['dest'] = 'style'; + fetch('/fetch/metadata/resources/record-header.py?retrieve=true&file=' + key) + .then(response => response.text()) + .then(t.step_func(text => assert_header_equals(text, expectation, testNamePrefix + ' stylesheet'))) + .then(resolve) + .catch(e => reject(e)); + }; + document.body.appendChild(e); + }); + }, testNamePrefix + ' stylesheet'); + + promise_test(t => { + return new Promise((resolve, reject) => { + let key = 'track-https-redirect' + nonce; + let video = createVideoElement(); + let el = createTrack(); + el.src = urlHelperMethod('resources/record-header.py?file=' + key); + el.onload = t.step_func(_ => { + let expectation = { ...expectedResults }; + if (expectation['mode'] != '') + expectation['mode'] = 'cors'; + if (expectation['dest'] == 'font') + expectation['dest'] = 'track'; + fetch('/fetch/metadata/resources/record-header.py?retrieve=true&file=' + key) + .then(response => response.text()) + .then(t.step_func(text => assert_header_equals(text, expectation, testNamePrefix + ' track'))) + .then(resolve); + }); + video.appendChild(el); + document.body.appendChild(video); + }); + }, testNamePrefix + ' track'); +} diff --git a/test/wpt/tests/fetch/metadata/resources/serviceworker-accessors-frame.html b/test/wpt/tests/fetch/metadata/resources/serviceworker-accessors-frame.html new file mode 100644 index 0000000..9879802 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/serviceworker-accessors-frame.html @@ -0,0 +1,3 @@ + + +Page Title diff --git a/test/wpt/tests/fetch/metadata/resources/serviceworker-accessors.sw.js b/test/wpt/tests/fetch/metadata/resources/serviceworker-accessors.sw.js new file mode 100644 index 0000000..36c55a7 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/serviceworker-accessors.sw.js @@ -0,0 +1,14 @@ +addEventListener("fetch", event => { + event.waitUntil(async function () { + if (!event.clientId) return; + const client = await clients.get(event.clientId); + if (!client) return; + + client.postMessage({ + "dest": event.request.headers.get("sec-fetch-dest"), + "mode": event.request.headers.get("sec-fetch-mode"), + "site": event.request.headers.get("sec-fetch-site"), + "user": event.request.headers.get("sec-fetch-user") + }); + }()); +}); diff --git a/test/wpt/tests/fetch/metadata/resources/sharedWorker.js b/test/wpt/tests/fetch/metadata/resources/sharedWorker.js new file mode 100644 index 0000000..5eb89cb --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/sharedWorker.js @@ -0,0 +1,9 @@ +onconnect = function(e) { + var port = e.ports[0]; + + port.addEventListener('message', function(e) { + port.postMessage("Ready"); + }); + + port.start(); +} diff --git a/test/wpt/tests/fetch/metadata/resources/unload-with-beacon.html b/test/wpt/tests/fetch/metadata/resources/unload-with-beacon.html new file mode 100644 index 0000000..b00c9a5 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/unload-with-beacon.html @@ -0,0 +1,12 @@ + + diff --git a/test/wpt/tests/fetch/metadata/resources/xslt-test.sub.xml b/test/wpt/tests/fetch/metadata/resources/xslt-test.sub.xml new file mode 100644 index 0000000..acb478a --- /dev/null +++ b/test/wpt/tests/fetch/metadata/resources/xslt-test.sub.xml @@ -0,0 +1,12 @@ + + + + + + diff --git a/test/wpt/tests/fetch/metadata/serviceworker-accessors.https.sub.html b/test/wpt/tests/fetch/metadata/serviceworker-accessors.https.sub.html new file mode 100644 index 0000000..03a8321 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/serviceworker-accessors.https.sub.html @@ -0,0 +1,51 @@ + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/sharedworker.https.sub.html b/test/wpt/tests/fetch/metadata/sharedworker.https.sub.html new file mode 100644 index 0000000..4df8582 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/sharedworker.https.sub.html @@ -0,0 +1,40 @@ + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/style.https.sub.html b/test/wpt/tests/fetch/metadata/style.https.sub.html new file mode 100644 index 0000000..a30d81d --- /dev/null +++ b/test/wpt/tests/fetch/metadata/style.https.sub.html @@ -0,0 +1,86 @@ + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/README.md b/test/wpt/tests/fetch/metadata/tools/README.md new file mode 100644 index 0000000..1c3bac2 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/README.md @@ -0,0 +1,126 @@ +# Fetch Metadata test generation framework + +This directory defines a command-line tool for procedurally generating WPT +tests. + +## Motivation + +Many features of the web platform involve the browser making one or more HTTP +requests to remote servers. Only some aspects of these requests are specified +within the standard that defines the relevant feature. Other aspects are +specified by external standards which span the entire platform (e.g. [Fetch +Metadata Request Headers](https://w3c.github.io/webappsec-fetch-metadata/)). + +This state of affairs makes it difficult to maintain test coverage for two +reasons: + +- When a new feature introduces a new kind of web request, it must be verified + to integrate with every cross-cutting standard. +- When a new cross-cutting standard is introduced, it must be verified to + integrate with every kind of web request. + +The tool in this directory attempts to reduce this tension. It allows +maintainers to express instructions for making web requests in an abstract +sense. These generic instructions can be reused by to produce a different suite +of tests for each cross-cutting feature. + +When a new kind of request is proposed, a single generic template can be +defined here. This will provide the maintainers of all cross-cutting features +with clear instruction on how to extend their test suite with the new feature. + +Similarly, when a new cross-cutting feature is proposed, the authors can use +this tool to build a test suite which spans the entire platform. + +## Build script + +To generate the Fetch Metadata tests, run `./wpt update-built --include fetch` +in the root of the repository. + +## Configuration + +The test generation tool requires a YAML-formatted configuration file as its +input. The file should define a dictionary with the following keys: + +- `templates` - a string describing the filesystem path from which template + files should be loaded +- `output_directory` - a string describing the filesystem path where the + generated test files should be written +- `cases` - a list of dictionaries describing how the test templates should be + expanded with individual subtests; each dictionary should have the following + keys: + - `all_subtests` - properties which should be defined for every expansion + - `common_axis` - a list of dictionaries + - `template_axes` - a dictionary relating template names to properties that + should be used when expanding that particular template + +Internally, the tool creates a set of "subtests" for each template. This set is +the Cartesian product of the `common_axis` and the given template's entry in +the `template_axes` dictionary. It uses this set of subtests to expand the +template, creating an output file. Refer to the next section for a concrete +example of how the expansion is performed. + +In general, the tool will output a single file for each template. However, the +`filename_flags` attribute has special semantics. It is used to separate +subtests for the same template file. This is intended to accommodate [the +web-platform-test's filename-based +conventions](https://web-platform-tests.org/writing-tests/file-names.html). + +For instance, when `.https` is present in a test file's name, the WPT test +harness will load that test using the HTTPS protocol. Subtests which include +the value `https` in the `filename_flags` property will be expanded using the +appropriate template but written to a distinct file whose name includes +`.https`. + +The generation tool requires that the configuration file references every +template in the `templates` directory. Because templates and configuration +files may be contributed by different people, this requirement ensures that +configuration authors are aware of all available templates. Some templates may +not be relevant for some features; in those cases, the configuration file can +include an empty array for the template's entry in the `template_axes` +dictionary (as in `template3.html` in the example which follows). + +## Expansion example + +In the following example configuration file, `a`, `b`, `s`, `w`, `x`, `y`, and +`z` all represent associative arrays. + +```yaml +templates: path/to/templates +output_directory: path/to/output +cases: + - every_subtest: s + common_axis: [a, b] + template_axes: + template1.html: [w] + template2.html: [x, y, z] + template3.html: [] +``` + +When run with such a configuration file, the tool would generate two files, +expanded with data as described below (where `(a, b)` represents the union of +`a` and `b`): + + template1.html: [(a, w), (b, w)] + template2.html: [(a, x), (b, x), (a, y), (b, y), (a, z), (b, z)] + template3.html: (zero tests; not expanded) + +## Design Considerations + +**Efficiency of generated output** The tool is capable of generating a large +number of tests given a small amount of input. Naively structured, this could +result in test suites which take large amount of time and computational +resources to complete. The tool has been designed to help authors structure the +generated output to reduce these resource requirements. + +**Literalness of generated output** Because the generated output is how most +people will interact with the tests, it is important that it be approachable. +This tool avoids outputting abstractions which would frustrate attempts to read +the source code or step through its execution environment. + +**Simplicity** The test generation logic itself was written to be approachable. +This makes it easier to anticipate how the tool will behave with new input, and +it lowers the bar for others to contribute improvements. + +Non-goals include conciseness of template files (verbosity makes the potential +expansions more predictable) and conciseness of generated output (verbosity +aids in the interpretation of results). diff --git a/test/wpt/tests/fetch/metadata/tools/fetch-metadata.conf.yml b/test/wpt/tests/fetch/metadata/tools/fetch-metadata.conf.yml new file mode 100644 index 0000000..b277bcb --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/fetch-metadata.conf.yml @@ -0,0 +1,806 @@ +--- +templates: templates +output_directory: ../generated +cases: + - all_subtests: + expected: NULL + filename_flags: [] + common_axis: + - headerName: sec-fetch-site + origins: [httpOrigin] + description: Not sent to non-trustworthy same-origin destination + - headerName: sec-fetch-site + origins: [httpSameSite] + description: Not sent to non-trustworthy same-site destination + - headerName: sec-fetch-site + origins: [httpCrossSite] + description: Not sent to non-trustworthy cross-site destination + - headerName: sec-fetch-mode + origins: [httpOrigin] + description: Not sent to non-trustworthy same-origin destination + - headerName: sec-fetch-mode + origins: [httpSameSite] + description: Not sent to non-trustworthy same-site destination + - headerName: sec-fetch-mode + origins: [httpCrossSite] + description: Not sent to non-trustworthy cross-site destination + - headerName: sec-fetch-dest + origins: [httpOrigin] + description: Not sent to non-trustworthy same-origin destination + - headerName: sec-fetch-dest + origins: [httpSameSite] + description: Not sent to non-trustworthy same-site destination + - headerName: sec-fetch-dest + origins: [httpCrossSite] + description: Not sent to non-trustworthy cross-site destination + - headerName: sec-fetch-user + origins: [httpOrigin] + description: Not sent to non-trustworthy same-origin destination + - headerName: sec-fetch-user + origins: [httpSameSite] + description: Not sent to non-trustworthy same-site destination + - headerName: sec-fetch-user + origins: [httpCrossSite] + description: Not sent to non-trustworthy cross-site destination + template_axes: + # Unused + appcache-manifest.sub.https.html: [] + # The `audioWorklet` interface is only available in secure contexts + # https://webaudio.github.io/web-audio-api/#BaseAudioContext + audioworklet.https.sub.html: [] + # Service workers are only available in secure context + fetch-via-serviceworker.https.sub.html: [] + # Service workers are only available in secure context + serviceworker.https.sub.html: [] + + css-images.sub.html: + - filename_flags: [tentative] + css-font-face.sub.html: + - filename_flags: [tentative] + element-a.sub.html: [{}] + element-area.sub.html: [{}] + element-audio.sub.html: [{}] + element-embed.sub.html: [{}] + element-frame.sub.html: [{}] + element-iframe.sub.html: [{}] + element-img.sub.html: + - sourceAttr: src + - sourceAttr: srcset + element-img-environment-change.sub.html: [{}] + element-input-image.sub.html: [{}] + element-link-icon.sub.html: [{}] + element-link-prefetch.optional.sub.html: [{}] + element-meta-refresh.optional.sub.html: [{}] + element-picture.sub.html: [{}] + element-script.sub.html: + - {} + - elementAttrs: { type: module } + element-video.sub.html: [{}] + element-video-poster.sub.html: [{}] + fetch.sub.html: [{}] + form-submission.sub.html: + - method: GET + - method: POST + header-link.sub.html: + - rel: icon + - rel: stylesheet + header-refresh.optional.sub.html: [{}] + window-location.sub.html: [{}] + script-module-import-dynamic.sub.html: [{}] + script-module-import-static.sub.html: [{}] + svg-image.sub.html: [{}] + window-history.sub.html: [{}] + worker-dedicated-importscripts.sub.html: [{}] + worker-dedicated-constructor.sub.html: [{}] + + # Sec-Fetch-Site - direct requests + - all_subtests: + headerName: sec-fetch-site + filename_flags: [https] + common_axis: + - description: Same origin + origins: [httpsOrigin] + expected: same-origin + - description: Cross-site + origins: [httpsCrossSite] + expected: cross-site + - description: Same site + origins: [httpsSameSite] + expected: same-site + template_axes: + # Unused + # - the request mode of all "classic" worker scripts is set to + # "same-origin" + # https://html.spec.whatwg.org/#fetch-a-classic-worker-script + # - the request mode of all "top-level "module" worker scripts is set to + # "same-origin": + # https://html.spec.whatwg.org/#fetch-a-single-module-script + worker-dedicated-constructor.sub.html: [] + + appcache-manifest.sub.https.html: [{}] + audioworklet.https.sub.html: [{}] + css-images.sub.html: + - filename_flags: [tentative] + css-font-face.sub.html: + - filename_flags: [tentative] + element-a.sub.html: [{}] + element-area.sub.html: [{}] + element-audio.sub.html: [{}] + element-embed.sub.html: [{}] + element-frame.sub.html: [{}] + element-iframe.sub.html: [{}] + element-img.sub.html: + - sourceAttr: src + - sourceAttr: srcset + element-img-environment-change.sub.html: [{}] + element-input-image.sub.html: [{}] + element-link-icon.sub.html: [{}] + element-link-prefetch.optional.sub.html: [{}] + element-meta-refresh.optional.sub.html: [{}] + element-picture.sub.html: [{}] + element-script.sub.html: + - {} + - elementAttrs: { type: module } + element-video.sub.html: [{}] + element-video-poster.sub.html: [{}] + fetch.sub.html: [{ init: { mode: no-cors } }] + fetch-via-serviceworker.https.sub.html: [{ init: { mode: no-cors } }] + form-submission.sub.html: + - method: GET + - method: POST + header-link.sub.html: + - rel: icon + - rel: stylesheet + header-refresh.optional.sub.html: [{}] + window-location.sub.html: [{}] + script-module-import-dynamic.sub.html: [{}] + script-module-import-static.sub.html: [{}] + serviceworker.https.sub.html: [{}] + svg-image.sub.html: [{}] + window-history.sub.html: [{}] + worker-dedicated-importscripts.sub.html: [{}] + + # Sec-Fetch-Site - redirection from HTTP + - all_subtests: + headerName: sec-fetch-site + filename_flags: [] + common_axis: + - description: HTTPS downgrade (header not sent) + origins: [httpsOrigin, httpOrigin] + expected: NULL + - description: HTTPS upgrade + origins: [httpOrigin, httpsOrigin] + expected: cross-site + - description: HTTPS downgrade-upgrade + origins: [httpsOrigin, httpOrigin, httpsOrigin] + expected: cross-site + template_axes: + # Unused + # The `audioWorklet` interface is only available in secure contexts + # https://webaudio.github.io/web-audio-api/#BaseAudioContext + audioworklet.https.sub.html: [] + # Service workers are only available in secure context + fetch-via-serviceworker.https.sub.html: [] + # Service workers' redirect mode is "error" + serviceworker.https.sub.html: [] + # Interstitial locations in an HTTP redirect chain are not added to the + # session history, so these requests cannot be initiated using the + # History API. + window-history.sub.html: [] + # Unused + # - the request mode of all "classic" worker scripts is set to + # "same-origin" + # https://html.spec.whatwg.org/#fetch-a-classic-worker-script + # - the request mode of all "top-level "module" worker scripts is set to + # "same-origin": + # https://html.spec.whatwg.org/#fetch-a-single-module-script + worker-dedicated-constructor.sub.html: [] + + appcache-manifest.sub.https.html: [{}] + css-images.sub.html: + - filename_flags: [tentative] + css-font-face.sub.html: + - filename_flags: [tentative] + element-a.sub.html: [{}] + element-area.sub.html: [{}] + element-audio.sub.html: [{}] + element-embed.sub.html: [{}] + element-frame.sub.html: [{}] + element-iframe.sub.html: [{}] + element-img.sub.html: + - sourceAttr: src + - sourceAttr: srcset + element-img-environment-change.sub.html: [{}] + element-input-image.sub.html: [{}] + element-link-icon.sub.html: [{}] + element-link-prefetch.optional.sub.html: [{}] + element-meta-refresh.optional.sub.html: [{}] + element-picture.sub.html: [{}] + element-script.sub.html: + - {} + - elementAttrs: { type: module } + element-video.sub.html: [{}] + element-video-poster.sub.html: [{}] + fetch.sub.html: [{}] + form-submission.sub.html: + - method: GET + - method: POST + header-link.sub.html: + - rel: icon + - rel: stylesheet + header-refresh.optional.sub.html: [{}] + window-location.sub.html: [{}] + script-module-import-dynamic.sub.html: [{}] + script-module-import-static.sub.html: [{}] + svg-image.sub.html: [{}] + worker-dedicated-importscripts.sub.html: [{}] + + # Sec-Fetch-Site - redirection from HTTPS + - all_subtests: + headerName: sec-fetch-site + filename_flags: [https] + common_axis: + - description: Same-Origin -> Cross-Site -> Same-Origin redirect + origins: [httpsOrigin, httpsCrossSite, httpsOrigin] + expected: cross-site + - description: Same-Origin -> Same-Site -> Same-Origin redirect + origins: [httpsOrigin, httpsSameSite, httpsOrigin] + expected: same-site + - description: Cross-Site -> Same Origin + origins: [httpsCrossSite, httpsOrigin] + expected: cross-site + - description: Cross-Site -> Same-Site + origins: [httpsCrossSite, httpsSameSite] + expected: cross-site + - description: Cross-Site -> Cross-Site + origins: [httpsCrossSite, httpsCrossSite] + expected: cross-site + - description: Same-Origin -> Same Origin + origins: [httpsOrigin, httpsOrigin] + expected: same-origin + - description: Same-Origin -> Same-Site + origins: [httpsOrigin, httpsSameSite] + expected: same-site + - description: Same-Origin -> Cross-Site + origins: [httpsOrigin, httpsCrossSite] + expected: cross-site + - description: Same-Site -> Same Origin + origins: [httpsSameSite, httpsOrigin] + expected: same-site + - description: Same-Site -> Same-Site + origins: [httpsSameSite, httpsSameSite] + expected: same-site + - description: Same-Site -> Cross-Site + origins: [httpsSameSite, httpsCrossSite] + expected: cross-site + template_axes: + # Service Workers' redirect mode is "error" + serviceworker.https.sub.html: [] + # Interstitial locations in an HTTP redirect chain are not added to the + # session history, so these requests cannot be initiated using the + # History API. + window-history.sub.html: [] + # Unused + # - the request mode of all "classic" worker scripts is set to + # "same-origin" + # https://html.spec.whatwg.org/#fetch-a-classic-worker-script + # - the request mode of all "top-level "module" worker scripts is set to + # "same-origin": + # https://html.spec.whatwg.org/#fetch-a-single-module-script + worker-dedicated-constructor.sub.html: [] + + appcache-manifest.sub.https.html: [{}] + audioworklet.https.sub.html: [{}] + css-images.sub.html: + - filename_flags: [tentative] + css-font-face.sub.html: + - filename_flags: [tentative] + element-a.sub.html: [{}] + element-area.sub.html: [{}] + element-audio.sub.html: [{}] + element-embed.sub.html: [{}] + element-frame.sub.html: [{}] + element-iframe.sub.html: [{}] + element-img.sub.html: + - sourceAttr: src + - sourceAttr: srcset + element-img-environment-change.sub.html: [{}] + element-input-image.sub.html: [{}] + element-link-icon.sub.html: [{}] + element-link-prefetch.optional.sub.html: [{}] + element-meta-refresh.optional.sub.html: [{}] + element-picture.sub.html: [{}] + element-script.sub.html: + - {} + - elementAttrs: { type: module } + element-video.sub.html: [{}] + element-video-poster.sub.html: [{}] + fetch.sub.html: [{ init: { mode: no-cors } }] + fetch-via-serviceworker.https.sub.html: [{ init: { mode: no-cors } }] + form-submission.sub.html: + - method: GET + - method: POST + header-link.sub.html: + - rel: icon + - rel: stylesheet + header-refresh.optional.sub.html: [{}] + window-location.sub.html: [{}] + script-module-import-dynamic.sub.html: [{}] + script-module-import-static.sub.html: [{}] + svg-image.sub.html: [{}] + worker-dedicated-importscripts.sub.html: [{}] + + # Sec-Fetch-Site - redirection with mixed content + # These tests verify the effect that redirection has on the request's "site". + # The initial request must be made to a resource that is "same-site" with its + # origin. This avoids false positives because if the request were made to a + # cross-site resource, the value of "cross-site" would be assigned regardless + # of the subseqent redirection. + # + # Because these conditions necessarily warrant mixed content, only templates + # which can be configured to allow mixed content [1] can be used. + # + # [1] https://w3c.github.io/webappsec-mixed-content/#should-block-fetch + + - common_axis: + - description: HTTPS downgrade-upgrade + headerName: sec-fetch-site + origins: [httpsOrigin, httpOrigin, httpsOrigin] + expected: cross-site + filename_flags: [https] + template_axes: + # Mixed Content considers only a small subset of requests as + # "optionally-blockable." These are the only requests that can be tested + # for the "downgrade-upgrade" scenario, so all other templates must be + # explicitly ignored. + audioworklet.https.sub.html: [] + css-font-face.sub.html: [] + element-embed.sub.html: [] + element-frame.sub.html: [] + element-iframe.sub.html: [] + element-img-environment-change.sub.html: [] + element-link-icon.sub.html: [] + element-link-prefetch.optional.sub.html: [] + element-picture.sub.html: [] + element-script.sub.html: [] + fetch.sub.html: [] + fetch-via-serviceworker.https.sub.html: [] + header-link.sub.html: [] + script-module-import-static.sub.html: [] + script-module-import-dynamic.sub.html: [] + # Service Workers' redirect mode is "error" + serviceworker.https.sub.html: [] + # Interstitial locations in an HTTP redirect chain are not added to the + # session history, so these requests cannot be initiated using the + # History API. + window-history.sub.html: [] + worker-dedicated-constructor.sub.html: [] + worker-dedicated-importscripts.sub.html: [] + # Avoid duplicate subtest for 'sec-fetch-site - HTTPS downgrade-upgrade' + appcache-manifest.sub.https.html: [] + css-images.sub.html: + - filename_flags: [tentative] + element-a.sub.html: [{}] + element-area.sub.html: [{}] + element-audio.sub.html: [{}] + element-img.sub.html: + # srcset omitted because it is not "optionally-blockable" + # https://w3c.github.io/webappsec-mixed-content/#category-optionally-blockable + - sourceAttr: src + element-input-image.sub.html: [{}] + element-meta-refresh.optional.sub.html: [{}] + element-video.sub.html: [{}] + element-video-poster.sub.html: [{}] + form-submission.sub.html: + - method: GET + - method: POST + header-refresh.optional.sub.html: [{}] + svg-image.sub.html: [{}] + window-location.sub.html: [{}] + + # Sec-Fetch-Mode + # These tests are served over HTTPS so the induced requests will be both + # same-origin with the document [1] and a potentially-trustworthy URL [2]. + # + # [1] https://html.spec.whatwg.org/multipage/origin.html#same-origin + # [2] https://w3c.github.io/webappsec-secure-contexts/#potentially-trustworthy-url + - common_axis: + - headerName: sec-fetch-mode + filename_flags: [https] + origins: [] + template_axes: + appcache-manifest.sub.https.html: + - expected: no-cors + audioworklet.https.sub.html: + # https://html.spec.whatwg.org/multipage/webappapis.html#fetch-a-single-module-script + - expected: cors + css-images.sub.html: + - expected: no-cors + filename_flags: [tentative] + css-font-face.sub.html: + - expected: cors + filename_flags: [tentative] + element-a.sub.html: + - expected: navigate + # https://html.spec.whatwg.org/multipage/links.html#downloading-hyperlinks + - elementAttrs: {download: ''} + expected: no-cors + element-area.sub.html: + - expected: navigate + # https://html.spec.whatwg.org/multipage/links.html#downloading-hyperlinks + - elementAttrs: {download: ''} + expected: no-cors + element-audio.sub.html: + - expected: no-cors + - expected: cors + elementAttrs: { crossorigin: '' } + - expected: cors + elementAttrs: { crossorigin: anonymous } + - expected: cors + elementAttrs: { crossorigin: use-credentials } + element-embed.sub.html: + - expected: no-cors + element-frame.sub.html: + - expected: navigate + element-iframe.sub.html: + - expected: navigate + element-img.sub.html: + - sourceAttr: src + expected: no-cors + - sourceAttr: src + expected: cors + elementAttrs: { crossorigin: '' } + - sourceAttr: src + expected: cors + elementAttrs: { crossorigin: anonymous } + - sourceAttr: src + expected: cors + elementAttrs: { crossorigin: use-credentials } + - sourceAttr: srcset + expected: no-cors + - sourceAttr: srcset + expected: cors + elementAttrs: { crossorigin: '' } + - sourceAttr: srcset + expected: cors + elementAttrs: { crossorigin: anonymous } + - sourceAttr: srcset + expected: cors + elementAttrs: { crossorigin: use-credentials } + element-img-environment-change.sub.html: + - expected: no-cors + - expected: cors + elementAttrs: { crossorigin: '' } + - expected: cors + elementAttrs: { crossorigin: anonymous } + - expected: cors + elementAttrs: { crossorigin: use-credentials } + element-input-image.sub.html: + - expected: no-cors + element-link-icon.sub.html: + - expected: no-cors + - expected: cors + elementAttrs: { crossorigin: '' } + - expected: cors + elementAttrs: { crossorigin: anonymous } + - expected: cors + elementAttrs: { crossorigin: use-credentials } + element-link-prefetch.optional.sub.html: + - expected: no-cors + - expected: cors + elementAttrs: { crossorigin: '' } + - expected: cors + elementAttrs: { crossorigin: anonymous } + - expected: cors + elementAttrs: { crossorigin: use-credentials } + element-meta-refresh.optional.sub.html: + - expected: navigate + element-picture.sub.html: + - expected: no-cors + - expected: cors + elementAttrs: { crossorigin: '' } + - expected: cors + elementAttrs: { crossorigin: anonymous } + - expected: cors + elementAttrs: { crossorigin: use-credentials } + element-script.sub.html: + - expected: no-cors + - expected: cors + elementAttrs: { type: module } + - expected: cors + elementAttrs: { crossorigin: '' } + - expected: cors + elementAttrs: { crossorigin: anonymous } + - expected: cors + elementAttrs: { crossorigin: use-credentials } + element-video.sub.html: + - expected: no-cors + - expected: cors + elementAttrs: { crossorigin: '' } + - expected: cors + elementAttrs: { crossorigin: anonymous } + - expected: cors + elementAttrs: { crossorigin: use-credentials } + element-video-poster.sub.html: + - expected: no-cors + fetch.sub.html: + - expected: cors + - expected: cors + init: { mode: cors } + - expected: no-cors + init: { mode: no-cors } + - expected: same-origin + init: { mode: same-origin } + fetch-via-serviceworker.https.sub.html: + - expected: cors + - expected: cors + init: { mode: cors } + - expected: no-cors + init: { mode: no-cors } + - expected: same-origin + init: { mode: same-origin } + form-submission.sub.html: + - method: GET + expected: navigate + - method: POST + expected: navigate + header-link.sub.html: + - rel: icon + expected: no-cors + - rel: stylesheet + expected: no-cors + header-refresh.optional.sub.html: + - expected: navigate + window-history.sub.html: + - expected: navigate + window-location.sub.html: + - expected: navigate + script-module-import-dynamic.sub.html: + - expected: cors + script-module-import-static.sub.html: + - expected: cors + # https://svgwg.org/svg2-draft/linking.html#processingURL-fetch + svg-image.sub.html: + - expected: no-cors + - expected: cors + elementAttrs: { crossorigin: '' } + - expected: cors + elementAttrs: { crossorigin: anonymous } + - expected: cors + elementAttrs: { crossorigin: use-credentials } + serviceworker.https.sub.html: + - expected: same-origin + options: { type: 'classic' } + # https://github.com/whatwg/html/pull/5875 + - expected: same-origin + worker-dedicated-constructor.sub.html: + - expected: same-origin + - options: { type: module } + expected: same-origin + worker-dedicated-importscripts.sub.html: + - expected: no-cors + + # Sec-Fetch-Dest + - common_axis: + - headerName: sec-fetch-dest + filename_flags: [https] + origins: [] + template_axes: + appcache-manifest.sub.https.html: + - expected: empty + audioworklet.https.sub.html: + # https://github.com/WebAudio/web-audio-api/issues/2203 + - expected: audioworklet + css-images.sub.html: + - expected: image + filename_flags: [tentative] + css-font-face.sub.html: + - expected: font + filename_flags: [tentative] + element-a.sub.html: + - expected: document + # https://html.spec.whatwg.org/multipage/links.html#downloading-hyperlinks + - elementAttrs: {download: ''} + expected: empty + element-area.sub.html: + - expected: document + # https://html.spec.whatwg.org/multipage/links.html#downloading-hyperlinks + - elementAttrs: {download: ''} + expected: empty + element-audio.sub.html: + - expected: audio + element-embed.sub.html: + - expected: embed + element-frame.sub.html: + # https://github.com/whatwg/html/pull/4976 + - expected: frame + element-iframe.sub.html: + # https://github.com/whatwg/html/pull/4976 + - expected: iframe + element-img.sub.html: + - sourceAttr: src + expected: image + - sourceAttr: srcset + expected: image + element-img-environment-change.sub.html: + - expected: image + element-input-image.sub.html: + - expected: image + element-link-icon.sub.html: + - expected: empty + element-link-prefetch.optional.sub.html: + - expected: empty + - elementAttrs: { as: audio } + expected: audio + - elementAttrs: { as: document } + expected: document + - elementAttrs: { as: embed } + expected: embed + - elementAttrs: { as: fetch } + expected: fetch + - elementAttrs: { as: font } + expected: font + - elementAttrs: { as: image } + expected: image + - elementAttrs: { as: object } + expected: object + - elementAttrs: { as: script } + expected: script + - elementAttrs: { as: style } + expected: style + - elementAttrs: { as: track } + expected: track + - elementAttrs: { as: video } + expected: video + - elementAttrs: { as: worker } + expected: worker + element-meta-refresh.optional.sub.html: + - expected: document + element-picture.sub.html: + - expected: image + element-script.sub.html: + - expected: script + element-video.sub.html: + - expected: video + element-video-poster.sub.html: + - expected: image + fetch.sub.html: + - expected: empty + fetch-via-serviceworker.https.sub.html: + - expected: empty + form-submission.sub.html: + - method: GET + expected: document + - method: POST + expected: document + header-link.sub.html: + - rel: icon + expected: empty + - rel: stylesheet + filename_flags: [tentative] + expected: style + header-refresh.optional.sub.html: + - expected: document + window-history.sub.html: + - expected: document + window-location.sub.html: + - expected: document + script-module-import-dynamic.sub.html: + - expected: script + script-module-import-static.sub.html: + - expected: script + serviceworker.https.sub.html: + - expected: serviceworker + # Implemented as "image" in Chromium and Firefox, but specified as + # "empty" + # https://github.com/w3c/svgwg/issues/782 + svg-image.sub.html: + - expected: empty + worker-dedicated-constructor.sub.html: + - expected: worker + - options: { type: module } + expected: worker + worker-dedicated-importscripts.sub.html: + - expected: script + + # Sec-Fetch-User + - common_axis: + - headerName: sec-fetch-user + filename_flags: [https] + origins: [] + template_axes: + appcache-manifest.sub.https.html: + - expected: NULL + audioworklet.https.sub.html: + - expected: NULL + css-images.sub.html: + - expected: NULL + filename_flags: [tentative] + css-font-face.sub.html: + - expected: NULL + filename_flags: [tentative] + element-a.sub.html: + - expected: NULL + - userActivated: TRUE + expected: ?1 + element-area.sub.html: + - expected: NULL + - userActivated: TRUE + expected: ?1 + element-audio.sub.html: + - expected: NULL + element-embed.sub.html: + - expected: NULL + element-frame.sub.html: + - expected: NULL + - userActivated: TRUE + expected: ?1 + element-iframe.sub.html: + - expected: NULL + - userActivated: TRUE + expected: ?1 + element-img.sub.html: + - sourceAttr: src + expected: NULL + - sourceAttr: srcset + expected: NULL + element-img-environment-change.sub.html: + - expected: NULL + element-input-image.sub.html: + - expected: NULL + element-link-icon.sub.html: + - expected: NULL + element-link-prefetch.optional.sub.html: + - expected: NULL + element-meta-refresh.optional.sub.html: + - expected: NULL + element-picture.sub.html: + - expected: NULL + element-script.sub.html: + - expected: NULL + element-video.sub.html: + - expected: NULL + element-video-poster.sub.html: + - expected: NULL + fetch.sub.html: + - expected: NULL + fetch-via-serviceworker.https.sub.html: + - expected: NULL + form-submission.sub.html: + - method: GET + expected: NULL + - method: GET + userActivated: TRUE + expected: ?1 + - method: POST + expected: NULL + - method: POST + userActivated: TRUE + expected: ?1 + header-link.sub.html: + - rel: icon + expected: NULL + - rel: stylesheet + expected: NULL + header-refresh.optional.sub.html: + - expected: NULL + window-history.sub.html: + - expected: NULL + window-location.sub.html: + - expected: NULL + - userActivated: TRUE + expected: ?1 + script-module-import-dynamic.sub.html: + - expected: NULL + script-module-import-static.sub.html: + - expected: NULL + serviceworker.https.sub.html: + - expected: NULL + svg-image.sub.html: + - expected: NULL + worker-dedicated-constructor.sub.html: + - expected: NULL + - options: { type: module } + expected: NULL + worker-dedicated-importscripts.sub.html: + - expected: NULL diff --git a/test/wpt/tests/fetch/metadata/tools/generate.py b/test/wpt/tests/fetch/metadata/tools/generate.py new file mode 100644 index 0000000..fa850c8 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/generate.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python3 + +import itertools +import os + +import jinja2 +import yaml + +HERE = os.path.abspath(os.path.dirname(__file__)) +PROJECT_ROOT = os.path.join(HERE, '..', '..', '..') + +def find_templates(starting_directory): + for directory, subdirectories, file_names in os.walk(starting_directory): + for file_name in file_names: + if file_name.startswith('.'): + continue + yield file_name, os.path.join(directory, file_name) + +def test_name(directory, template_name, subtest_flags): + ''' + Create a test name based on a template and the WPT file name flags [1] + required for a given subtest. This name is used to determine how subtests + may be grouped together. In order to promote grouping, the combination uses + a few aspects of how file name flags are interpreted: + + - repeated flags have no effect, so duplicates are removed + - flag sequence does not matter, so flags are consistently sorted + + directory | template_name | subtest_flags | result + ----------|------------------|-----------------|------- + cors | image.html | [] | cors/image.html + cors | image.https.html | [] | cors/image.https.html + cors | image.html | [https] | cors/image.https.html + cors | image.https.html | [https] | cors/image.https.html + cors | image.https.html | [https] | cors/image.https.html + cors | image.sub.html | [https] | cors/image.https.sub.html + cors | image.https.html | [sub] | cors/image.https.sub.html + + [1] docs/writing-tests/file-names.md + ''' + template_name_parts = template_name.split('.') + flags = set(subtest_flags) | set(template_name_parts[1:-1]) + test_name_parts = ( + [template_name_parts[0]] + + sorted(flags) + + [template_name_parts[-1]] + ) + return os.path.join(directory, '.'.join(test_name_parts)) + +def merge(a, b): + if type(a) != type(b): + raise Exception('Cannot merge disparate types') + if type(a) == list: + return a + b + if type(a) == dict: + merged = {} + + for key in a: + if key in b: + merged[key] = merge(a[key], b[key]) + else: + merged[key] = a[key] + + for key in b: + if not key in a: + merged[key] = b[key] + + return merged + + raise Exception('Cannot merge {} type'.format(type(a).__name__)) + +def product(a, b): + ''' + Given two lists of objects, compute their Cartesian product by merging the + elements together. For example, + + product( + [{'a': 1}, {'b': 2}], + [{'c': 3}, {'d': 4}, {'e': 5}] + ) + + returns the following list: + + [ + {'a': 1, 'c': 3}, + {'a': 1, 'd': 4}, + {'a': 1, 'e': 5}, + {'b': 2, 'c': 3}, + {'b': 2, 'd': 4}, + {'b': 2, 'e': 5} + ] + ''' + result = [] + + for a_object in a: + for b_object in b: + result.append(merge(a_object, b_object)) + + return result + +def make_provenance(project_root, cases, template): + return '\n'.join([ + 'This test was procedurally generated. Please do not modify it directly.', + 'Sources:', + '- {}'.format(os.path.relpath(cases, project_root)), + '- {}'.format(os.path.relpath(template, project_root)) + ]) + +def collection_filter(obj, title): + if not obj: + return 'no {}'.format(title) + + members = [] + for name, value in obj.items(): + if value == '': + members.append(name) + else: + members.append('{}={}'.format(name, value)) + + return '{}: {}'.format(title, ', '.join(members)) + +def pad_filter(value, side, padding): + if not value: + return '' + if side == 'start': + return padding + value + + return value + padding + +def main(config_file): + with open(config_file, 'r') as handle: + config = yaml.safe_load(handle.read()) + + templates_directory = os.path.normpath( + os.path.join(os.path.dirname(config_file), config['templates']) + ) + + environment = jinja2.Environment( + variable_start_string='[%', + variable_end_string='%]' + ) + environment.filters['collection'] = collection_filter + environment.filters['pad'] = pad_filter + templates = {} + subtests = {} + + for template_name, path in find_templates(templates_directory): + subtests[template_name] = [] + with open(path, 'r') as handle: + templates[template_name] = environment.from_string(handle.read()) + + for case in config['cases']: + unused_templates = set(templates) - set(case['template_axes']) + + # This warning is intended to help authors avoid mistakenly omitting + # templates. It can be silenced by extending the`template_axes` + # dictionary with an empty list for templates which are intentionally + # unused. + if unused_templates: + print( + 'Warning: case does not reference the following templates:' + ) + print('\n'.join('- {}'.format(name) for name in unused_templates)) + + common_axis = product( + case['common_axis'], [case.get('all_subtests', {})] + ) + + for template_name, template_axis in case['template_axes'].items(): + subtests[template_name].extend(product(common_axis, template_axis)) + + for template_name, template in templates.items(): + provenance = make_provenance( + PROJECT_ROOT, + config_file, + os.path.join(templates_directory, template_name) + ) + get_filename = lambda subtest: test_name( + config['output_directory'], + template_name, + subtest['filename_flags'] + ) + subtests_by_filename = itertools.groupby( + sorted(subtests[template_name], key=get_filename), + key=get_filename + ) + for filename, some_subtests in subtests_by_filename: + with open(filename, 'w') as handle: + handle.write(templates[template_name].render( + subtests=list(some_subtests), + provenance=provenance + ) + '\n') + +if __name__ == '__main__': + main('fetch-metadata.conf.yml') diff --git a/test/wpt/tests/fetch/metadata/tools/templates/appcache-manifest.sub.https.html b/test/wpt/tests/fetch/metadata/tools/templates/appcache-manifest.sub.https.html new file mode 100644 index 0000000..0dfc084 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/appcache-manifest.sub.https.html @@ -0,0 +1,63 @@ + + + + + HTTP headers on request for Appcache manifest + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/audioworklet.https.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/audioworklet.https.sub.html new file mode 100644 index 0000000..7be309c --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/audioworklet.https.sub.html @@ -0,0 +1,53 @@ + + + + + HTTP headers on request for AudioWorklet module + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/css-font-face.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/css-font-face.sub.html new file mode 100644 index 0000000..94b33f4 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/css-font-face.sub.html @@ -0,0 +1,60 @@ + + + + + HTTP headers on request for CSS font-face + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/css-images.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/css-images.sub.html new file mode 100644 index 0000000..e394f9f --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/css-images.sub.html @@ -0,0 +1,137 @@ + + + + + {%- if subtests|length > 10 %} + + {%- endif %} + HTTP headers on request for CSS image-accepting properties + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-a.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-a.sub.html new file mode 100644 index 0000000..2bd8e8a --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-a.sub.html @@ -0,0 +1,72 @@ + + + + + {%- if subtests|length > 10 %} + + {%- endif %} + HTTP headers on request for HTML "a" element navigation + + + {%- if subtests|selectattr('userActivated')|list %} + + + {%- endif %} + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-area.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-area.sub.html new file mode 100644 index 0000000..0cef5b2 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-area.sub.html @@ -0,0 +1,72 @@ + + + + + {%- if subtests|length > 10 %} + + {%- endif %} + HTTP headers on request for HTML "area" element navigation + + + {%- if subtests|selectattr('userActivated')|list %} + + + {%- endif %} + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-audio.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-audio.sub.html new file mode 100644 index 0000000..92bc221 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-audio.sub.html @@ -0,0 +1,51 @@ + + + + + HTTP headers on request for HTML "audio" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-embed.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-embed.sub.html new file mode 100644 index 0000000..18ce09e --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-embed.sub.html @@ -0,0 +1,54 @@ + + + + + HTTP headers on request for HTML "embed" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-frame.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-frame.sub.html new file mode 100644 index 0000000..ce90171 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-frame.sub.html @@ -0,0 +1,62 @@ + + + + + HTTP headers on request for HTML "frame" element source + + + {%- if subtests|selectattr('userActivated')|list %} + + + {%- endif %} + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-iframe.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-iframe.sub.html new file mode 100644 index 0000000..43a632a --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-iframe.sub.html @@ -0,0 +1,62 @@ + + + + + HTTP headers on request for HTML "frame" element source + + + {%- if subtests|selectattr('userActivated')|list %} + + + {%- endif %} + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-img-environment-change.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-img-environment-change.sub.html new file mode 100644 index 0000000..5a65114 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-img-environment-change.sub.html @@ -0,0 +1,78 @@ + + + + + HTTP headers on image request triggered by change to environment + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-img.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-img.sub.html new file mode 100644 index 0000000..1dac584 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-img.sub.html @@ -0,0 +1,52 @@ + + + + + HTTP headers on request for HTML "img" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-input-image.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-input-image.sub.html new file mode 100644 index 0000000..3c50008 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-input-image.sub.html @@ -0,0 +1,48 @@ + + + + + HTTP headers on request for HTML "input" element with type="button" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-link-icon.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-link-icon.sub.html new file mode 100644 index 0000000..18ce12a --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-link-icon.sub.html @@ -0,0 +1,75 @@ + + + + + {%- if subtests|length > 10 %} + + {%- endif %} + HTTP headers on request for HTML "link" element with rel="icon" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-link-prefetch.optional.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-link-prefetch.optional.sub.html new file mode 100644 index 0000000..59d677d --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-link-prefetch.optional.sub.html @@ -0,0 +1,71 @@ + + + + + {%- if subtests|length > 10 %} + + {%- endif %} + HTTP headers on request for HTML "link" element with rel="prefetch" + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-meta-refresh.optional.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-meta-refresh.optional.sub.html new file mode 100644 index 0000000..5a8d8f8 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-meta-refresh.optional.sub.html @@ -0,0 +1,60 @@ + + + + + HTTP headers on request for HTML "meta" element with http-equiv="refresh" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-picture.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-picture.sub.html new file mode 100644 index 0000000..903aeed --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-picture.sub.html @@ -0,0 +1,101 @@ + + + + + HTTP headers on request for HTML "picture" element source + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-script.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-script.sub.html new file mode 100644 index 0000000..4a281ae --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-script.sub.html @@ -0,0 +1,54 @@ + + + + + HTTP headers on request for HTML "script" element source + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-video-poster.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-video-poster.sub.html new file mode 100644 index 0000000..9cdaf06 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-video-poster.sub.html @@ -0,0 +1,62 @@ + + + + + HTTP headers on request for HTML "video" element "poster" + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/element-video.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/element-video.sub.html new file mode 100644 index 0000000..1b7b976 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/element-video.sub.html @@ -0,0 +1,51 @@ + + + + + HTTP headers on request for HTML "video" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/fetch-via-serviceworker.https.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/fetch-via-serviceworker.https.sub.html new file mode 100644 index 0000000..eead710 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/fetch-via-serviceworker.https.sub.html @@ -0,0 +1,88 @@ + + + + + {%- if subtests|length > 10 %} + + {%- endif %} + HTTP headers on request using the "fetch" API and passing through a Serive Worker + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/fetch.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/fetch.sub.html new file mode 100644 index 0000000..a8dc536 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/fetch.sub.html @@ -0,0 +1,42 @@ + + + + + HTTP headers on request using the "fetch" API + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/form-submission.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/form-submission.sub.html new file mode 100644 index 0000000..4c9c8c5 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/form-submission.sub.html @@ -0,0 +1,87 @@ + + + + + + HTTP headers on request for HTML form navigation + + + {%- if subtests|selectattr('userActivated')|list %} + + + {%- endif %} + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/header-link.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/header-link.sub.html new file mode 100644 index 0000000..2831f22 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/header-link.sub.html @@ -0,0 +1,56 @@ + + + + + HTTP headers on request for HTTP "Link" header + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/header-refresh.optional.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/header-refresh.optional.sub.html new file mode 100644 index 0000000..ec963d5 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/header-refresh.optional.sub.html @@ -0,0 +1,59 @@ + + + + + {%- if subtests|length > 10 %} + + {%- endif %} + HTTP headers on request for HTTP "Refresh" header + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/script-module-import-dynamic.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/script-module-import-dynamic.sub.html new file mode 100644 index 0000000..653d3cd --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/script-module-import-dynamic.sub.html @@ -0,0 +1,35 @@ + + + + + HTTP headers on request for dynamic ECMAScript module import + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/script-module-import-static.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/script-module-import-static.sub.html new file mode 100644 index 0000000..c8d5f95 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/script-module-import-static.sub.html @@ -0,0 +1,53 @@ + + + + + HTTP headers on request for static ECMAScript module import + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/serviceworker.https.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/serviceworker.https.sub.html new file mode 100644 index 0000000..8284325 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/serviceworker.https.sub.html @@ -0,0 +1,72 @@ + + + + + + HTTP headers on request for Service Workers + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/svg-image.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/svg-image.sub.html new file mode 100644 index 0000000..52f7806 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/svg-image.sub.html @@ -0,0 +1,75 @@ + + + + + {%- if subtests|length > 10 %} + + {%- endif %} + HTTP headers on request for SVG "image" element source + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/window-history.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/window-history.sub.html new file mode 100644 index 0000000..286d019 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/window-history.sub.html @@ -0,0 +1,134 @@ + + + + + {%- if subtests|length > 10 %} + + {%- endif %} + HTTP headers on request for navigation via the HTML History API + + + {%- if subtests|selectattr('userActivated')|list %} + + + {%- endif %} + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/window-location.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/window-location.sub.html new file mode 100644 index 0000000..96f3912 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/window-location.sub.html @@ -0,0 +1,128 @@ + + + + + {%- if subtests|length > 10 %} + + {%- endif %} + HTTP headers on request for navigation via the HTML Location API + + + {%- if subtests|selectattr('userActivated')|list %} + + + {%- endif %} + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/worker-dedicated-constructor.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/worker-dedicated-constructor.sub.html new file mode 100644 index 0000000..fede596 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/worker-dedicated-constructor.sub.html @@ -0,0 +1,49 @@ + + + + + HTTP headers on request for dedicated worker via the "Worker" constructor + + + + + diff --git a/test/wpt/tests/fetch/metadata/tools/templates/worker-dedicated-importscripts.sub.html b/test/wpt/tests/fetch/metadata/tools/templates/worker-dedicated-importscripts.sub.html new file mode 100644 index 0000000..93e6374 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/tools/templates/worker-dedicated-importscripts.sub.html @@ -0,0 +1,54 @@ + + + + + HTTP headers on request for dedicated worker via the "importScripts" API + + + + + diff --git a/test/wpt/tests/fetch/metadata/track.https.sub.html b/test/wpt/tests/fetch/metadata/track.https.sub.html new file mode 100644 index 0000000..346798f --- /dev/null +++ b/test/wpt/tests/fetch/metadata/track.https.sub.html @@ -0,0 +1,119 @@ + + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/trailing-dot.https.sub.any.js b/test/wpt/tests/fetch/metadata/trailing-dot.https.sub.any.js new file mode 100644 index 0000000..5e32fc4 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/trailing-dot.https.sub.any.js @@ -0,0 +1,30 @@ +// META: global=window,worker +// META: script=/fetch/metadata/resources/helper.js + +// Site +promise_test(t => { + return validate_expectations_custom_url("https://{{host}}.:{{ports[https][0]}}/fetch/metadata/resources/echo-as-json.py", {}, { + "site": "cross-site", + "user": "", + "mode": "cors", + "dest": "empty" + }, "Fetching a resource from the same origin, but spelled with a trailing dot."); +}, "Fetching a resource from the same origin, but spelled with a trailing dot."); + +promise_test(t => { + return validate_expectations_custom_url("https://{{hosts[][www]}}.:{{ports[https][0]}}/fetch/metadata/resources/echo-as-json.py", {}, { + "site": "cross-site", + "user": "", + "mode": "cors", + "dest": "empty" + }, "Fetching a resource from the same site, but spelled with a trailing dot."); +}, "Fetching a resource from the same site, but spelled with a trailing dot."); + +promise_test(t => { + return validate_expectations_custom_url("https://{{hosts[alt][www]}}.:{{ports[https][0]}}/fetch/metadata/resources/echo-as-json.py", {}, { + "site": "cross-site", + "user": "", + "mode": "cors", + "dest": "empty" + }, "Fetching a resource from a cross-site host, spelled with a trailing dot."); +}, "Fetching a resource from a cross-site host, spelled with a trailing dot."); diff --git a/test/wpt/tests/fetch/metadata/unload.https.sub.html b/test/wpt/tests/fetch/metadata/unload.https.sub.html new file mode 100644 index 0000000..bc26048 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/unload.https.sub.html @@ -0,0 +1,64 @@ + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/window-open.https.sub.html b/test/wpt/tests/fetch/metadata/window-open.https.sub.html new file mode 100644 index 0000000..94ba76a --- /dev/null +++ b/test/wpt/tests/fetch/metadata/window-open.https.sub.html @@ -0,0 +1,199 @@ + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/worker.https.sub.html b/test/wpt/tests/fetch/metadata/worker.https.sub.html new file mode 100644 index 0000000..20a4fe5 --- /dev/null +++ b/test/wpt/tests/fetch/metadata/worker.https.sub.html @@ -0,0 +1,24 @@ + + + + + + + + + diff --git a/test/wpt/tests/fetch/metadata/xslt.https.sub.html b/test/wpt/tests/fetch/metadata/xslt.https.sub.html new file mode 100644 index 0000000..dc72d7b --- /dev/null +++ b/test/wpt/tests/fetch/metadata/xslt.https.sub.html @@ -0,0 +1,25 @@ + + + + + + + + diff --git a/test/wpt/tests/fetch/nosniff/image.html b/test/wpt/tests/fetch/nosniff/image.html new file mode 100644 index 0000000..9dfdb94 --- /dev/null +++ b/test/wpt/tests/fetch/nosniff/image.html @@ -0,0 +1,39 @@ + + +
+ diff --git a/test/wpt/tests/fetch/nosniff/importscripts.html b/test/wpt/tests/fetch/nosniff/importscripts.html new file mode 100644 index 0000000..920b6bd --- /dev/null +++ b/test/wpt/tests/fetch/nosniff/importscripts.html @@ -0,0 +1,14 @@ + + +
+ diff --git a/test/wpt/tests/fetch/nosniff/importscripts.js b/test/wpt/tests/fetch/nosniff/importscripts.js new file mode 100644 index 0000000..1895280 --- /dev/null +++ b/test/wpt/tests/fetch/nosniff/importscripts.js @@ -0,0 +1,28 @@ +// Testing importScripts() +function log(w) { this.postMessage(w) } +function f() { log("FAIL") } +function p() { log("PASS") } + +const get_url = (mime, outcome) => { + let url = "resources/js.py" + if (mime != null) { + url += "?type=" + encodeURIComponent(mime) + } + if (outcome) { + url += "&outcome=p" + } + return url +} + +[null, "", "x", "x/x", "text/html", "text/json"].forEach(function(mime) { + try { + importScripts(get_url(mime)) + } catch(e) { + (e.name == "NetworkError") ? p() : log("FAIL (no NetworkError exception): " + mime) + } + +}) +importScripts(get_url("text/javascript", true)) +importScripts(get_url("text/ecmascript", true)) +importScripts(get_url("text/ecmascript;blah", true)) +log("END") diff --git a/test/wpt/tests/fetch/nosniff/parsing-nosniff.window.js b/test/wpt/tests/fetch/nosniff/parsing-nosniff.window.js new file mode 100644 index 0000000..2a26486 --- /dev/null +++ b/test/wpt/tests/fetch/nosniff/parsing-nosniff.window.js @@ -0,0 +1,27 @@ +promise_test(() => fetch("resources/x-content-type-options.json").then(res => res.json()).then(runTests), "Loading JSON…"); + +function runTests(allTestData) { + for (let i = 0; i < allTestData.length; i++) { + const testData = allTestData[i], + input = encodeURIComponent(testData.input); + promise_test(t => { + let resolve; + const promise = new Promise(r => resolve = r); + const script = document.createElement("script"); + t.add_cleanup(() => script.remove()); + // A + +
+ diff --git a/test/wpt/tests/fetch/nosniff/stylesheet.html b/test/wpt/tests/fetch/nosniff/stylesheet.html new file mode 100644 index 0000000..8f2b547 --- /dev/null +++ b/test/wpt/tests/fetch/nosniff/stylesheet.html @@ -0,0 +1,60 @@ + + + +
+ diff --git a/test/wpt/tests/fetch/nosniff/worker.html b/test/wpt/tests/fetch/nosniff/worker.html new file mode 100644 index 0000000..c8c1076 --- /dev/null +++ b/test/wpt/tests/fetch/nosniff/worker.html @@ -0,0 +1,28 @@ + + +
+ diff --git a/test/wpt/tests/fetch/orb/resources/data.json b/test/wpt/tests/fetch/orb/resources/data.json new file mode 100644 index 0000000..f2a886f --- /dev/null +++ b/test/wpt/tests/fetch/orb/resources/data.json @@ -0,0 +1,3 @@ +{ + "hello": "world" +} diff --git a/test/wpt/tests/fetch/orb/resources/data_non_ascii.json b/test/wpt/tests/fetch/orb/resources/data_non_ascii.json new file mode 100644 index 0000000..64566c5 --- /dev/null +++ b/test/wpt/tests/fetch/orb/resources/data_non_ascii.json @@ -0,0 +1 @@ +["你好"] diff --git a/test/wpt/tests/fetch/orb/resources/empty.json b/test/wpt/tests/fetch/orb/resources/empty.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/test/wpt/tests/fetch/orb/resources/empty.json @@ -0,0 +1 @@ +{} diff --git a/test/wpt/tests/fetch/orb/resources/font.ttf b/test/wpt/tests/fetch/orb/resources/font.ttf new file mode 100644 index 0000000..9023592 Binary files /dev/null and b/test/wpt/tests/fetch/orb/resources/font.ttf differ diff --git a/test/wpt/tests/fetch/orb/resources/image.png b/test/wpt/tests/fetch/orb/resources/image.png new file mode 100644 index 0000000..820f8ca Binary files /dev/null and b/test/wpt/tests/fetch/orb/resources/image.png differ diff --git a/test/wpt/tests/fetch/orb/resources/js-unlabeled-utf16-without-bom.json b/test/wpt/tests/fetch/orb/resources/js-unlabeled-utf16-without-bom.json new file mode 100644 index 0000000..157a8f5 Binary files /dev/null and b/test/wpt/tests/fetch/orb/resources/js-unlabeled-utf16-without-bom.json differ diff --git a/test/wpt/tests/fetch/orb/resources/js-unlabeled.js b/test/wpt/tests/fetch/orb/resources/js-unlabeled.js new file mode 100644 index 0000000..a880a5b --- /dev/null +++ b/test/wpt/tests/fetch/orb/resources/js-unlabeled.js @@ -0,0 +1 @@ +window.has_executed_script = true; diff --git a/test/wpt/tests/fetch/orb/resources/png-mislabeled-as-html.png b/test/wpt/tests/fetch/orb/resources/png-mislabeled-as-html.png new file mode 100644 index 0000000..820f8ca Binary files /dev/null and b/test/wpt/tests/fetch/orb/resources/png-mislabeled-as-html.png differ diff --git a/test/wpt/tests/fetch/orb/resources/png-mislabeled-as-html.png.headers b/test/wpt/tests/fetch/orb/resources/png-mislabeled-as-html.png.headers new file mode 100644 index 0000000..156209f --- /dev/null +++ b/test/wpt/tests/fetch/orb/resources/png-mislabeled-as-html.png.headers @@ -0,0 +1 @@ +Content-Type: text/html diff --git a/test/wpt/tests/fetch/orb/resources/png-unlabeled.png b/test/wpt/tests/fetch/orb/resources/png-unlabeled.png new file mode 100644 index 0000000..820f8ca Binary files /dev/null and b/test/wpt/tests/fetch/orb/resources/png-unlabeled.png differ diff --git a/test/wpt/tests/fetch/orb/resources/script-asm-js-invalid.js b/test/wpt/tests/fetch/orb/resources/script-asm-js-invalid.js new file mode 100644 index 0000000..8d1bbd6 --- /dev/null +++ b/test/wpt/tests/fetch/orb/resources/script-asm-js-invalid.js @@ -0,0 +1,4 @@ +function f() { + "use asm"; + return; +} diff --git a/test/wpt/tests/fetch/orb/resources/script-asm-js-valid.js b/test/wpt/tests/fetch/orb/resources/script-asm-js-valid.js new file mode 100644 index 0000000..79b375f --- /dev/null +++ b/test/wpt/tests/fetch/orb/resources/script-asm-js-valid.js @@ -0,0 +1,4 @@ +function f() { + "use asm"; + return {}; +} diff --git a/test/wpt/tests/fetch/orb/resources/script-iso-8559-1.js b/test/wpt/tests/fetch/orb/resources/script-iso-8559-1.js new file mode 100644 index 0000000..3bccb6a --- /dev/null +++ b/test/wpt/tests/fetch/orb/resources/script-iso-8559-1.js @@ -0,0 +1,4 @@ +"use strict"; +function fn() { + return "§A¦n"; +} diff --git a/test/wpt/tests/fetch/orb/resources/script-utf16-bom.js b/test/wpt/tests/fetch/orb/resources/script-utf16-bom.js new file mode 100644 index 0000000..16b76e9 Binary files /dev/null and b/test/wpt/tests/fetch/orb/resources/script-utf16-bom.js differ diff --git a/test/wpt/tests/fetch/orb/resources/script-utf16-without-bom.js b/test/wpt/tests/fetch/orb/resources/script-utf16-without-bom.js new file mode 100644 index 0000000..d983086 Binary files /dev/null and b/test/wpt/tests/fetch/orb/resources/script-utf16-without-bom.js differ diff --git a/test/wpt/tests/fetch/orb/resources/script.js b/test/wpt/tests/fetch/orb/resources/script.js new file mode 100644 index 0000000..19675d2 --- /dev/null +++ b/test/wpt/tests/fetch/orb/resources/script.js @@ -0,0 +1,4 @@ +"use strict"; +function fn() { + return 42; +} diff --git a/test/wpt/tests/fetch/orb/resources/sound.mp3 b/test/wpt/tests/fetch/orb/resources/sound.mp3 new file mode 100644 index 0000000..a15d1de Binary files /dev/null and b/test/wpt/tests/fetch/orb/resources/sound.mp3 differ diff --git a/test/wpt/tests/fetch/orb/resources/text.txt b/test/wpt/tests/fetch/orb/resources/text.txt new file mode 100644 index 0000000..270c611 --- /dev/null +++ b/test/wpt/tests/fetch/orb/resources/text.txt @@ -0,0 +1 @@ +hello, world! diff --git a/test/wpt/tests/fetch/orb/resources/utils.js b/test/wpt/tests/fetch/orb/resources/utils.js new file mode 100644 index 0000000..94a2177 --- /dev/null +++ b/test/wpt/tests/fetch/orb/resources/utils.js @@ -0,0 +1,18 @@ +function header(name, value) { + return `header(${name},${value})`; +} + +function contentType(type) { + return header("Content-Type", type); +} + +function contentTypeOptions(type) { + return header("X-Content-Type-Options", type); +} + +function fetchORB(file, options, ...pipe) { + return fetch(`${file}${pipe.length ? `?pipe=${pipe.join("|")}` : ""}`, { + ...(options || {}), + mode: "no-cors", + }); +} diff --git a/test/wpt/tests/fetch/orb/tentative/compressed-image-sniffing.sub.html b/test/wpt/tests/fetch/orb/tentative/compressed-image-sniffing.sub.html new file mode 100644 index 0000000..38e70c6 --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/compressed-image-sniffing.sub.html @@ -0,0 +1,20 @@ + + + +
+ + diff --git a/test/wpt/tests/fetch/orb/tentative/content-range.sub.any.js b/test/wpt/tests/fetch/orb/tentative/content-range.sub.any.js new file mode 100644 index 0000000..ee97521 --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/content-range.sub.any.js @@ -0,0 +1,31 @@ +// META: script=/fetch/orb/resources/utils.js + +const url = + "http://{{domains[www1]}}:{{ports[http][0]}}/fetch/orb/resources/image.png"; + +promise_test(async () => { + let headers = new Headers([["Range", "bytes=0-99"]]); + await fetchORB( + url, + { headers }, + header("Content-Range", "bytes 0-99/1010"), + "slice(null,100)", + "status(206)" + ); +}, "ORB shouldn't block opaque range of image/png starting at zero"); + +promise_test( + t => + promise_rejects_js( + t, + TypeError, + fetchORB( + url, + { headers: new Headers([["Range", "bytes 10-99"]]) }, + header("Content-Range", "bytes 10-99/1010"), + "slice(10,100)", + "status(206)" + ) + ), + "ORB should block opaque range of image/png not starting at zero, that isn't subsequent" +); diff --git a/test/wpt/tests/fetch/orb/tentative/img-mime-types-coverage.tentative.sub.html b/test/wpt/tests/fetch/orb/tentative/img-mime-types-coverage.tentative.sub.html new file mode 100644 index 0000000..5dc6c5d --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/img-mime-types-coverage.tentative.sub.html @@ -0,0 +1,126 @@ + + + +
+ + diff --git a/test/wpt/tests/fetch/orb/tentative/img-png-mislabeled-as-html.sub-ref.html b/test/wpt/tests/fetch/orb/tentative/img-png-mislabeled-as-html.sub-ref.html new file mode 100644 index 0000000..66462fb --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/img-png-mislabeled-as-html.sub-ref.html @@ -0,0 +1,5 @@ + + + + + diff --git a/test/wpt/tests/fetch/orb/tentative/img-png-mislabeled-as-html.sub.html b/test/wpt/tests/fetch/orb/tentative/img-png-mislabeled-as-html.sub.html new file mode 100644 index 0000000..aa03f4d --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/img-png-mislabeled-as-html.sub.html @@ -0,0 +1,7 @@ + + + + + + + diff --git a/test/wpt/tests/fetch/orb/tentative/img-png-unlabeled.sub-ref.html b/test/wpt/tests/fetch/orb/tentative/img-png-unlabeled.sub-ref.html new file mode 100644 index 0000000..2d5e3bb --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/img-png-unlabeled.sub-ref.html @@ -0,0 +1,5 @@ + + + + + diff --git a/test/wpt/tests/fetch/orb/tentative/img-png-unlabeled.sub.html b/test/wpt/tests/fetch/orb/tentative/img-png-unlabeled.sub.html new file mode 100644 index 0000000..77415f6 --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/img-png-unlabeled.sub.html @@ -0,0 +1,7 @@ + + + + + + + diff --git a/test/wpt/tests/fetch/orb/tentative/known-mime-type.sub.any.js b/test/wpt/tests/fetch/orb/tentative/known-mime-type.sub.any.js new file mode 100644 index 0000000..b0521e8 --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/known-mime-type.sub.any.js @@ -0,0 +1,86 @@ +// META: script=/fetch/orb/resources/utils.js + +const path = "http://{{domains[www1]}}:{{ports[http][0]}}/fetch/orb/resources"; + +promise_test( + t => + promise_rejects_js( + t, + TypeError, + fetchORB(`${path}/font.ttf`, null, contentType("font/ttf")) + ), + "ORB should block opaque font/ttf" +); + +promise_test( + t => + promise_rejects_js( + t, + TypeError, + fetchORB(`${path}/text.txt`, null, contentType("text/plain")) + ), + "ORB should block opaque text/plain" +); + +promise_test( + t => + promise_rejects_js( + t, + TypeError, + fetchORB(`${path}/data.json`, null, contentType("application/json")) + ), + "ORB should block opaque application/json (non-empty)" +); + +promise_test( + t => + promise_rejects_js( + t, + TypeError, + fetchORB(`${path}/empty.json`, null, contentType("application/json")) + ), + "ORB should block opaque application/json (empty)" +); + +promise_test( + t => + promise_rejects_js( + t, + TypeError, + fetchORB(`${path}/data_non_ascii.json`, null, contentType("application/json")) + ), + "ORB should block opaque application/json which contains non ascii characters" +); + +promise_test(async () => { + fetchORB(`${path}/image.png`, null, contentType("image/png")); +}, "ORB shouldn't block opaque image/png"); + +promise_test(async () => { + await fetchORB(`${path}/script.js`, null, contentType("text/javascript")); +}, "ORB shouldn't block opaque text/javascript"); + +// Test javascript validation can correctly decode the content with BOM. +promise_test(async () => { + await fetchORB(`${path}/script-utf16-bom.js`, null, contentType("application/json")); +}, "ORB shouldn't block opaque text/javascript (utf16 encoded with BOM)"); + +// Test javascript validation can correctly decode the content with the http charset hint. +promise_test(async () => { + await fetchORB(`${path}/script-utf16-without-bom.js`, null, contentType("application/json; charset=utf-16")); +}, "ORB shouldn't block opaque text/javascript (utf16 encoded without BOM but charset is provided in content-type)"); + +// Test javascript validation can correctly decode the content for iso-8559-1 (fallback decoder in Firefox). +promise_test(async () => { + await fetchORB(`${path}/script-iso-8559-1.js`, null, contentType("application/json")); +}, "ORB shouldn't block opaque text/javascript (iso-8559-1 encoded)"); + +// Test javascript validation can correctly parse asm.js. +promise_test(async () => { + await fetchORB(`${path}/script-asm-js-valid.js`, null, contentType("application/json")); +}, "ORB shouldn't block text/javascript with valid asm.js"); + +// Test javascript validation can correctly parse invalid asm.js with valid JS syntax. +promise_test(async () => { + await fetchORB(`${path}/script-asm-js-invalid.js`, null, contentType("application/json")); +}, "ORB shouldn't block text/javascript with invalid asm.js"); diff --git a/test/wpt/tests/fetch/orb/tentative/nosniff.sub.any.js b/test/wpt/tests/fetch/orb/tentative/nosniff.sub.any.js new file mode 100644 index 0000000..3df9d22 --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/nosniff.sub.any.js @@ -0,0 +1,59 @@ +// META: script=/fetch/orb/resources/utils.js + +const path = "http://{{domains[www1]}}:{{ports[http][0]}}/fetch/orb/resources"; + +promise_test( + t => + promise_rejects_js( + t, + TypeError, + fetchORB( + `${path}/text.txt`, + null, + contentType("text/plain"), + contentTypeOptions("nosniff") + ) + ), + "ORB should block opaque text/plain with nosniff" +); + +promise_test( + t => + promise_rejects_js( + t, + TypeError, + fetchORB( + `${path}/data.json`, + null, + contentType("application/json"), + contentTypeOptions("nosniff") + ) + ), + "ORB should block opaque-response-blocklisted MIME type with nosniff" +); + +promise_test( + t => + promise_rejects_js( + t, + TypeError, + fetchORB( + `${path}/data.json`, + null, + contentType(""), + contentTypeOptions("nosniff") + ) + ), + "ORB should block opaque response with empty Content-Type and nosniff" +); + +promise_test( + () => + fetchORB( + `${path}/image.png`, + null, + contentType(""), + contentTypeOptions("nosniff") + ), + "ORB shouldn't block opaque image with empty Content-Type and nosniff" +); diff --git a/test/wpt/tests/fetch/orb/tentative/script-js-unlabeled-gziped.sub.html b/test/wpt/tests/fetch/orb/tentative/script-js-unlabeled-gziped.sub.html new file mode 100644 index 0000000..fe85440 --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/script-js-unlabeled-gziped.sub.html @@ -0,0 +1,24 @@ + + + + + +
+ + + + + + + + diff --git a/test/wpt/tests/fetch/orb/tentative/script-unlabeled.sub.html b/test/wpt/tests/fetch/orb/tentative/script-unlabeled.sub.html new file mode 100644 index 0000000..4987f13 --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/script-unlabeled.sub.html @@ -0,0 +1,24 @@ + + + + + +
+ + + + + + + + diff --git a/test/wpt/tests/fetch/orb/tentative/script-utf16-without-bom-hint-charset.sub.html b/test/wpt/tests/fetch/orb/tentative/script-utf16-without-bom-hint-charset.sub.html new file mode 100644 index 0000000..b15f976 --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/script-utf16-without-bom-hint-charset.sub.html @@ -0,0 +1,22 @@ + + + + +
+ + + + + + + diff --git a/test/wpt/tests/fetch/orb/tentative/status.sub.any.js b/test/wpt/tests/fetch/orb/tentative/status.sub.any.js new file mode 100644 index 0000000..b94d8b7 --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/status.sub.any.js @@ -0,0 +1,33 @@ +// META: script=/fetch/orb/resources/utils.js + +const path = "http://{{domains[www1]}}:{{ports[http][0]}}/fetch/orb/resources"; + +promise_test( + t => + promise_rejects_js( + t, + TypeError, + fetchORB( + `${path}/data.json`, + null, + contentType("application/json"), + "status(206)" + ) + ), + "ORB should block opaque-response-blocklisted MIME type with status 206" +); + +promise_test( + t => + promise_rejects_js( + t, + TypeError, + fetchORB( + `${path}/data.json`, + null, + contentType("application/json"), + "status(302)" + ) + ), + "ORB should block opaque response with non-ok status" +); diff --git a/test/wpt/tests/fetch/orb/tentative/status.sub.html b/test/wpt/tests/fetch/orb/tentative/status.sub.html new file mode 100644 index 0000000..a62bdeb --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/status.sub.html @@ -0,0 +1,17 @@ +'use strict'; + + + +
+ diff --git a/test/wpt/tests/fetch/orb/tentative/unknown-mime-type.sub.any.js b/test/wpt/tests/fetch/orb/tentative/unknown-mime-type.sub.any.js new file mode 100644 index 0000000..f72ff92 --- /dev/null +++ b/test/wpt/tests/fetch/orb/tentative/unknown-mime-type.sub.any.js @@ -0,0 +1,28 @@ +// META: script=/fetch/orb/resources/utils.js + +const path = "http://{{domains[www1]}}:{{ports[http][0]}}/fetch/orb/resources"; + +promise_test( + () => fetchORB(`${path}/font.ttf`, null, contentType("")), + "ORB shouldn't block opaque failed missing MIME type (font/ttf)" +); + +promise_test( + () => fetchORB(`${path}/text.txt`, null, contentType("")), + "ORB shouldn't block opaque failed missing MIME type (text/plain)" +); + +promise_test( + t => fetchORB(`${path}/data.json`, null, contentType("")), + "ORB shouldn't block opaque failed missing MIME type (application/json)" +); + +promise_test( + () => fetchORB(`${path}/image.png`, null, contentType("")), + "ORB shouldn't block opaque failed missing MIME type (image/png)" +); + +promise_test( + () => fetchORB(`${path}/script.js`, null, contentType("")), + "ORB shouldn't block opaque failed missing MIME type (text/javascript)" +); diff --git a/test/wpt/tests/fetch/origin/assorted.window.js b/test/wpt/tests/fetch/origin/assorted.window.js new file mode 100644 index 0000000..033d010 --- /dev/null +++ b/test/wpt/tests/fetch/origin/assorted.window.js @@ -0,0 +1,211 @@ +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js + +const origins = get_host_info(); + +promise_test(async function () { + const stash = token(), + redirectPath = "/fetch/origin/resources/redirect-and-stash.py"; + + // Cross-origin -> same-origin will result in setting the tainted origin flag for the second + // request. + let url = origins.HTTP_ORIGIN + redirectPath + "?stash=" + stash; + url = origins.HTTP_REMOTE_ORIGIN + redirectPath + "?stash=" + stash + "&location=" + encodeURIComponent(url) + "&dummyJS"; + + await fetch(url, { mode: "no-cors", method: "POST" }); + + const json = await (await fetch(redirectPath + "?dump&stash=" + stash)).json(); + + assert_equals(json[0], origins.HTTP_ORIGIN); + assert_equals(json[1], "null"); +}, "Origin header and 308 redirect"); + +promise_test(async function () { + const stash = token(), + redirectPath = "/fetch/origin/resources/redirect-and-stash.py"; + + let url = origins.HTTP_ORIGIN + redirectPath + "?stash=" + stash; + url = origins.HTTP_REMOTE_ORIGIN + redirectPath + "?stash=" + stash + "&location=" + encodeURIComponent(url); + + await new Promise(resolve => { + const frame = document.createElement("iframe"); + frame.src = url; + frame.onload = () => { + resolve(); + frame.remove(); + } + document.body.appendChild(frame); + }); + + const json = await (await fetch(redirectPath + "?dump&stash=" + stash)).json(); + + assert_equals(json[0], "no Origin header"); + assert_equals(json[1], "no Origin header"); +}, "Origin header and GET navigation"); + +promise_test(async function () { + const stash = token(), + redirectPath = "/fetch/origin/resources/redirect-and-stash.py"; + + let url = origins.HTTP_ORIGIN + redirectPath + "?stash=" + stash; + url = origins.HTTP_REMOTE_ORIGIN + redirectPath + "?stash=" + stash + "&location=" + encodeURIComponent(url); + + await new Promise(resolve => { + const frame = document.createElement("iframe"); + self.addEventListener("message", e => { + if (e.data === "loaded") { + resolve(); + frame.remove(); + } + }, { once: true }); + frame.onload = () => { + const doc = frame.contentDocument, + form = doc.body.appendChild(doc.createElement("form")), + submit = form.appendChild(doc.createElement("input")); + form.action = url; + form.method = "POST"; + submit.type = "submit"; + submit.click(); + } + document.body.appendChild(frame); + }); + + const json = await (await fetch(redirectPath + "?dump&stash=" + stash)).json(); + + assert_equals(json[0], origins.HTTP_ORIGIN); + assert_equals(json[1], "null"); +}, "Origin header and POST navigation"); + +function navigationReferrerPolicy(referrerPolicy, destination, expectedOrigin) { + return async function () { + const stash = token(); + const referrerPolicyPath = "/fetch/origin/resources/referrer-policy.py"; + const redirectPath = "/fetch/origin/resources/redirect-and-stash.py"; + + let postUrl = + (destination === "same-origin" ? origins.HTTP_ORIGIN + : origins.HTTP_REMOTE_ORIGIN) + + redirectPath + "?stash=" + stash; + + await new Promise(resolve => { + const frame = document.createElement("iframe"); + document.body.appendChild(frame); + frame.src = origins.HTTP_ORIGIN + referrerPolicyPath + + "?referrerPolicy=" + referrerPolicy; + self.addEventListener("message", function listener(e) { + if (e.data === "loaded") { + resolve(); + frame.remove(); + self.removeEventListener("message", listener); + } else if (e.data === "action") { + const doc = frame.contentDocument, + form = doc.body.appendChild(doc.createElement("form")), + submit = form.appendChild(doc.createElement("input")); + form.action = postUrl; + form.method = "POST"; + submit.type = "submit"; + submit.click(); + } + }); + }); + + const json = await (await fetch(redirectPath + "?dump&stash=" + stash)).json(); + + assert_equals(json[0], expectedOrigin); + }; +} + +function fetchReferrerPolicy(referrerPolicy, destination, fetchMode, expectedOrigin, httpMethod) { + return async function () { + const stash = token(); + const redirectPath = "/fetch/origin/resources/redirect-and-stash.py"; + + let fetchUrl = + (destination === "same-origin" ? origins.HTTP_ORIGIN + : origins.HTTP_REMOTE_ORIGIN) + + redirectPath + "?stash=" + stash + "&dummyJS"; + + await fetch(fetchUrl, { mode: fetchMode, method: httpMethod , "referrerPolicy": referrerPolicy}); + + const json = await (await fetch(redirectPath + "?dump&stash=" + stash)).json(); + + assert_equals(json[0], expectedOrigin); + }; +} + +function referrerPolicyTestString(referrerPolicy, method, destination) { + return "Origin header and " + method + " " + destination + " with Referrer-Policy " + + referrerPolicy; +} + +[ + { + "policy": "no-referrer", + "expectedOriginForSameOrigin": "null", + "expectedOriginForCrossOrigin": "null" + }, + { + "policy": "same-origin", + "expectedOriginForSameOrigin": origins.HTTP_ORIGIN, + "expectedOriginForCrossOrigin": "null" + }, + { + "policy": "origin-when-cross-origin", + "expectedOriginForSameOrigin": origins.HTTP_ORIGIN, + "expectedOriginForCrossOrigin": origins.HTTP_ORIGIN + }, + { + "policy": "no-referrer-when-downgrade", + "expectedOriginForSameOrigin": origins.HTTP_ORIGIN, + "expectedOriginForCrossOrigin": origins.HTTP_ORIGIN + }, + { + "policy": "unsafe-url", + "expectedOriginForSameOrigin": origins.HTTP_ORIGIN, + "expectedOriginForCrossOrigin": origins.HTTP_ORIGIN + }, +].forEach(testObj => { + [ + { + "name": "same-origin", + "expectedOrigin": testObj.expectedOriginForSameOrigin + }, + { + "name": "cross-origin", + "expectedOrigin": testObj.expectedOriginForCrossOrigin + } + ].forEach(destination => { + // Test form POST navigation + promise_test(navigationReferrerPolicy(testObj.policy, + destination.name, + destination.expectedOrigin), + referrerPolicyTestString(testObj.policy, "POST", + destination.name + " navigation")); + // Test fetch + promise_test(fetchReferrerPolicy(testObj.policy, + destination.name, + "no-cors", + destination.expectedOrigin, + "POST"), + referrerPolicyTestString(testObj.policy, "POST", + destination.name + " fetch no-cors mode")); + + // Test cors mode POST + promise_test(fetchReferrerPolicy(testObj.policy, + destination.name, + "cors", + origins.HTTP_ORIGIN, + "POST"), + referrerPolicyTestString(testObj.policy, "POST", + destination.name + " fetch cors mode")); + + // Test cors mode GET + promise_test(fetchReferrerPolicy(testObj.policy, + destination.name, + "cors", + (destination.name == "same-origin") ? "no Origin header" : origins.HTTP_ORIGIN, + "GET"), + referrerPolicyTestString(testObj.policy, "GET", + destination.name + " fetch cors mode")); + }); +}); diff --git a/test/wpt/tests/fetch/origin/resources/redirect-and-stash.py b/test/wpt/tests/fetch/origin/resources/redirect-and-stash.py new file mode 100644 index 0000000..36c584c --- /dev/null +++ b/test/wpt/tests/fetch/origin/resources/redirect-and-stash.py @@ -0,0 +1,38 @@ +import json + +from wptserve.utils import isomorphic_decode + +def main(request, response): + key = request.GET.first(b"stash") + origin = request.headers.get(b"origin") + if origin is None: + origin = b"no Origin header" + + origin_list = request.server.stash.take(key) + + if b"dump" in request.GET: + response.headers.set(b"Content-Type", b"application/json") + response.content = json.dumps(origin_list) + return + + if origin_list is None: + origin_list = [isomorphic_decode(origin)] + else: + origin_list.append(isomorphic_decode(origin)) + + request.server.stash.put(key, origin_list) + + if b"location" in request.GET: + location = request.GET.first(b"location") + if b"dummyJS" in request.GET: + location += b"&dummyJS" + response.status = 308 + response.headers.set(b"Location", location) + return + + response.headers.set(b"Content-Type", b"text/html") + response.headers.set(b"Access-Control-Allow-Origin", b"*") + if b"dummyJS" in request.GET: + response.content = b"console.log('dummy JS')" + else: + response.content = b"\n" diff --git a/test/wpt/tests/fetch/origin/resources/referrer-policy.py b/test/wpt/tests/fetch/origin/resources/referrer-policy.py new file mode 100644 index 0000000..15716e0 --- /dev/null +++ b/test/wpt/tests/fetch/origin/resources/referrer-policy.py @@ -0,0 +1,7 @@ +def main(request, response): + if b"referrerPolicy" in request.GET: + response.headers.set(b"Referrer-Policy", + request.GET.first(b"referrerPolicy")) + response.status = 200 + response.headers.set(b"Content-Type", b"text/html") + response.content = b"\n" diff --git a/test/wpt/tests/fetch/private-network-access/META.yml b/test/wpt/tests/fetch/private-network-access/META.yml new file mode 100644 index 0000000..944ce6f --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/META.yml @@ -0,0 +1,7 @@ +spec: https://wicg.github.io/private-network-access/ +suggested_reviewers: + - letitz + - lyf + - hemeryar + - camillelamy + - mikewest diff --git a/test/wpt/tests/fetch/private-network-access/README.md b/test/wpt/tests/fetch/private-network-access/README.md new file mode 100644 index 0000000..a69aab4 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/README.md @@ -0,0 +1,10 @@ +# Private Network Access tests + +This directory contains tests for Private Network Access' integration with +the Fetch specification. + +See also: + +* [The specification](https://wicg.github.io/private-network-access/) +* [The repository](https://github.com/WICG/private-network-access/) +* [Open issues](https://github.com/WICG/private-network-access/issues/) diff --git a/test/wpt/tests/fetch/private-network-access/fenced-frame-no-preflight-required.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/fenced-frame-no-preflight-required.tentative.https.window.js new file mode 100644 index 0000000..21233f6 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/fenced-frame-no-preflight-required.tentative.https.window.js @@ -0,0 +1,91 @@ +// META: script=/common/dispatcher/dispatcher.js +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// META: script=/fenced-frame/resources/utils.js +// META: timeout=long +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests verify that contexts can navigate fenced frames to more-public or +// same address spaces without private network access preflight request header. + +setup(() => { + assert_true(window.isSecureContext); +}); + +// Source: secure local context. +// +// All fetches unaffected by Private Network Access. + +promise_test_parallel( + t => fencedFrameTest(t, { + source: {server: Server.HTTPS_LOCAL}, + target: {server: Server.HTTPS_LOCAL}, + expected: FrameTestResult.SUCCESS, + }), + 'local to local: no preflight required.'); + +promise_test_parallel( + t => fencedFrameTest(t, { + source: {server: Server.HTTPS_LOCAL}, + target: {server: Server.HTTPS_PRIVATE}, + expected: FrameTestResult.SUCCESS, + }), + 'local to private: no preflight required.'); + +promise_test_parallel( + t => fencedFrameTest(t, { + source: {server: Server.HTTPS_LOCAL}, + target: {server: Server.HTTPS_PUBLIC}, + expected: FrameTestResult.SUCCESS, + }), + 'local to public: no preflight required.'); + +promise_test_parallel( + t => fencedFrameTest(t, { + source: {server: Server.HTTPS_PRIVATE}, + target: {server: Server.HTTPS_PRIVATE}, + expected: FrameTestResult.SUCCESS, + }), + 'private to private: no preflight required.'); + +promise_test_parallel( + t => fencedFrameTest(t, { + source: {server: Server.HTTPS_PRIVATE}, + target: {server: Server.HTTPS_PUBLIC}, + expected: FrameTestResult.SUCCESS, + }), + 'private to public: no preflight required.'); + +promise_test_parallel( + t => fencedFrameTest(t, { + source: {server: Server.HTTPS_PUBLIC}, + target: {server: Server.HTTPS_PUBLIC}, + expected: FrameTestResult.SUCCESS, + }), + 'public to public: no preflight required.'); + +promise_test_parallel( + t => fencedFrameTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: {server: Server.HTTPS_PUBLIC}, + expected: FrameTestResult.SUCCESS, + }), + 'treat-as-public-address to public: no preflight required.'); + +promise_test_parallel( + t => fencedFrameTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: {preflight: PreflightBehavior.optionalSuccess(token())} + }, + expected: FrameTestResult.SUCCESS, + }), + 'treat-as-public-address to local: optional preflight'); diff --git a/test/wpt/tests/fetch/private-network-access/fenced-frame-subresource-fetch.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/fenced-frame-subresource-fetch.tentative.https.window.js new file mode 100644 index 0000000..2dff325 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/fenced-frame-subresource-fetch.tentative.https.window.js @@ -0,0 +1,330 @@ +// META: script=/common/subset-tests-by-key.js +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// META: script=/fenced-frame/resources/utils.js +// META: variant=?include=baseline +// META: variant=?include=from-local +// META: variant=?include=from-private +// META: variant=?include=from-public +// META: timeout=long +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests verify that secure contexts can fetch subresources in fenced +// frames from all address spaces, provided that the target server, if more +// private than the initiator, respond affirmatively to preflight requests. +// + +setup(() => { + // Making sure we are in a secure context, as expected. + assert_true(window.isSecureContext); +}); + +// Source: secure local context. +// +// All fetches unaffected by Private Network Access. + +subsetTestByKey( + 'from-local', promise_test, t => fencedFrameFetchTest(t, { + source: {server: Server.HTTPS_LOCAL}, + target: {server: Server.HTTPS_LOCAL}, + fetchOptions: {method: 'GET', mode: 'cors'}, + expected: FetchTestResult.SUCCESS, + }), + 'local to local: no preflight required.'); + +subsetTestByKey( + 'from-local', promise_test, + t => fencedFrameFetchTest(t, { + source: {server: Server.HTTPS_LOCAL}, + target: { + server: Server.HTTPS_PRIVATE, + behavior: {response: ResponseBehavior.allowCrossOrigin()}, + }, + fetchOptions: {method: 'GET', mode: 'cors'}, + expected: FetchTestResult.SUCCESS, + }), + 'local to private: no preflight required.'); + + +subsetTestByKey( + 'from-local', promise_test, + t => fencedFrameFetchTest(t, { + source: {server: Server.HTTPS_LOCAL}, + target: { + server: Server.HTTPS_PUBLIC, + behavior: {response: ResponseBehavior.allowCrossOrigin()}, + }, + fetchOptions: {method: 'GET', mode: 'cors'}, + expected: FetchTestResult.SUCCESS, + }), + 'local to public: no preflight required.'); + +// Strictly speaking, the following two tests do not exercise PNA-specific +// logic, but they serve as a baseline for comparison, ensuring that non-PNA +// preflight requests are sent and handled as expected. + +subsetTestByKey( + 'baseline', promise_test, + t => fencedFrameFetchTest(t, { + source: {server: Server.HTTPS_LOCAL}, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { + preflight: PreflightBehavior.failure(), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: {method: 'PUT', mode: 'cors'}, + expected: FetchTestResult.FAILURE, + }), + 'local to public: PUT preflight failure.'); + +subsetTestByKey( + 'baseline', promise_test, + t => fencedFrameFetchTest(t, { + source: {server: Server.HTTPS_LOCAL}, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + } + }, + fetchOptions: {method: 'PUT', mode: 'cors'}, + expected: FetchTestResult.SUCCESS, + }), + 'local to public: PUT preflight success.'); + +// Generates tests of preflight behavior for a single (source, target) pair. +// +// Scenarios: +// +// - cors mode: +// - preflight response has non-2xx HTTP code +// - preflight response is missing CORS headers +// - preflight response is missing the PNA-specific `Access-Control` header +// - final response is missing CORS headers +// - success +// - success with PUT method (non-"simple" request) +// - no-cors mode: +// - preflight response has non-2xx HTTP code +// - preflight response is missing CORS headers +// - preflight response is missing the PNA-specific `Access-Control` header +// - success +// +function makePreflightTests({ + subsetKey, + source, + sourceDescription, + targetServer, + targetDescription, +}) { + const prefix = `${sourceDescription} to ${targetDescription}: `; + + subsetTestByKey( + subsetKey, promise_test, + t => fencedFrameFetchTest(t, { + source, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.failure(), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: {method: 'GET', mode: 'cors'}, + expected: FetchTestResult.FAILURE, + }), + prefix + 'failed preflight.'); + + subsetTestByKey( + subsetKey, promise_test, + t => fencedFrameFetchTest(t, { + source, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.noCorsHeader(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: {method: 'GET', mode: 'cors'}, + expected: FetchTestResult.FAILURE, + }), + prefix + 'missing CORS headers on preflight response.'); + + subsetTestByKey( + subsetKey, promise_test, + t => fencedFrameFetchTest(t, { + source, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.noPnaHeader(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: {method: 'GET', mode: 'cors'}, + expected: FetchTestResult.FAILURE, + }), + prefix + 'missing PNA header on preflight response.'); + + subsetTestByKey( + subsetKey, promise_test, + t => fencedFrameFetchTest(t, { + source, + target: { + server: targetServer, + behavior: {preflight: PreflightBehavior.success(token())}, + }, + fetchOptions: {method: 'GET', mode: 'cors'}, + expected: FetchTestResult.FAILURE, + }), + prefix + 'missing CORS headers on final response.'); + + subsetTestByKey( + subsetKey, promise_test, + t => fencedFrameFetchTest(t, { + source, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: {method: 'GET', mode: 'cors'}, + expected: FetchTestResult.SUCCESS, + }), + prefix + 'success.'); + + subsetTestByKey( + subsetKey, promise_test, + t => fencedFrameFetchTest(t, { + source, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: {method: 'PUT', mode: 'cors'}, + expected: FetchTestResult.SUCCESS, + }), + prefix + 'PUT success.'); + + subsetTestByKey( + subsetKey, promise_test, t => fencedFrameFetchTest(t, { + source, + target: {server: targetServer}, + fetchOptions: {method: 'GET', mode: 'no-cors'}, + expected: FetchTestResult.FAILURE, + }), + prefix + 'no-CORS mode failed preflight.'); + + subsetTestByKey( + subsetKey, promise_test, + t => fencedFrameFetchTest(t, { + source, + target: { + server: targetServer, + behavior: {preflight: PreflightBehavior.noCorsHeader(token())}, + }, + fetchOptions: {method: 'GET', mode: 'no-cors'}, + expected: FetchTestResult.FAILURE, + }), + prefix + 'no-CORS mode missing CORS headers on preflight response.'); + + subsetTestByKey( + subsetKey, promise_test, + t => fencedFrameFetchTest(t, { + source, + target: { + server: targetServer, + behavior: {preflight: PreflightBehavior.noPnaHeader(token())}, + }, + fetchOptions: {method: 'GET', mode: 'no-cors'}, + expected: FetchTestResult.FAILURE, + }), + prefix + 'no-CORS mode missing PNA header on preflight response.'); + + subsetTestByKey( + subsetKey, promise_test, + t => fencedFrameFetchTest(t, { + source, + target: { + server: targetServer, + behavior: {preflight: PreflightBehavior.success(token())}, + }, + fetchOptions: {method: 'GET', mode: 'no-cors'}, + expected: FetchTestResult.OPAQUE, + }), + prefix + 'no-CORS mode success.'); +} + +// Source: private secure context. +// +// Fetches to the local address space require a successful preflight response +// carrying a PNA-specific header. + +makePreflightTests({ + subsetKey: 'from-private', + source: {server: Server.HTTPS_PRIVATE}, + sourceDescription: 'private', + targetServer: Server.HTTPS_LOCAL, + targetDescription: 'local', +}); + +subsetTestByKey( + 'from-private', promise_test, t => fencedFrameFetchTest(t, { + source: {server: Server.HTTPS_PRIVATE}, + target: {server: Server.HTTPS_PRIVATE}, + fetchOptions: {method: 'GET', mode: 'cors'}, + expected: FetchTestResult.SUCCESS, + }), + 'private to private: no preflight required.'); + +subsetTestByKey( + 'from-private', promise_test, + t => fencedFrameFetchTest(t, { + source: {server: Server.HTTPS_PRIVATE}, + target: { + server: Server.HTTPS_PRIVATE, + behavior: {response: ResponseBehavior.allowCrossOrigin()}, + }, + fetchOptions: {method: 'GET', mode: 'cors'}, + expected: FetchTestResult.SUCCESS, + }), + 'private to public: no preflight required.'); + +// Source: public secure context. +// +// Fetches to the local and private address spaces require a successful +// preflight response carrying a PNA-specific header. + +makePreflightTests({ + subsetKey: 'from-public', + source: {server: Server.HTTPS_PUBLIC}, + sourceDescription: 'public', + targetServer: Server.HTTPS_LOCAL, + targetDescription: 'local', +}); + +makePreflightTests({ + subsetKey: 'from-public', + source: {server: Server.HTTPS_PUBLIC}, + sourceDescription: 'public', + targetServer: Server.HTTPS_PRIVATE, + targetDescription: 'private', +}); + +subsetTestByKey( + 'from-public', promise_test, t => fencedFrameFetchTest(t, { + source: {server: Server.HTTPS_PUBLIC}, + target: {server: Server.HTTPS_PUBLIC}, + fetchOptions: {method: 'GET', mode: 'cors'}, + expected: FetchTestResult.SUCCESS, + }), + 'public to public: no preflight required.'); diff --git a/test/wpt/tests/fetch/private-network-access/fenced-frame.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/fenced-frame.tentative.https.window.js new file mode 100644 index 0000000..370cc9f --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/fenced-frame.tentative.https.window.js @@ -0,0 +1,150 @@ +// META: script=/common/dispatcher/dispatcher.js +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// META: script=/fenced-frame/resources/utils.js +// META: timeout=long +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests verify that contexts can navigate fenced frames to less-public +// address spaces iff the target server responds affirmatively to preflight +// requests. + +setup(() => { + assert_true(window.isSecureContext); +}); + +// Generates tests of preflight behavior for a single (source, target) pair. +// +// Scenarios: +// +// - parent navigates child: +// - preflight response has non-2xx HTTP code +// - preflight response is missing CORS headers +// - preflight response is missing the PNA-specific `Access-Control` header +// - preflight response has the required PNA related headers, but still fails +// because of the limitation of fenced frame that subjects to PNA checks. +// +function makePreflightTests({ + sourceName, + sourceServer, + sourceTreatAsPublic, + targetName, + targetServer, +}) { + const prefix = `${sourceName} to ${targetName}: `; + + const source = { + server: sourceServer, + treatAsPublic: sourceTreatAsPublic, + }; + + promise_test_parallel( + t => fencedFrameTest(t, { + source, + target: { + server: targetServer, + behavior: {preflight: PreflightBehavior.failure()}, + }, + expected: FrameTestResult.FAILURE, + }), + prefix + 'failed preflight.'); + + promise_test_parallel( + t => fencedFrameTest(t, { + source, + target: { + server: targetServer, + behavior: {preflight: PreflightBehavior.noCorsHeader(token())}, + }, + expected: FrameTestResult.FAILURE, + }), + prefix + 'missing CORS headers.'); + + promise_test_parallel( + t => fencedFrameTest(t, { + source, + target: { + server: targetServer, + behavior: {preflight: PreflightBehavior.noPnaHeader(token())}, + }, + expected: FrameTestResult.FAILURE, + }), + prefix + 'missing PNA header.'); + + promise_test_parallel( + t => fencedFrameTest(t, { + source, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin() + }, + }, + expected: FrameTestResult.FAILURE, + }), + prefix + 'failed because fenced frames are incompatible with PNA.'); +} + +// Source: private secure context. +// +// Fetches to the local address space require a successful preflight response +// carrying a PNA-specific header. + +makePreflightTests({ + sourceServer: Server.HTTPS_PRIVATE, + sourceName: 'private', + targetServer: Server.HTTPS_LOCAL, + targetName: 'local', +}); + +// Source: public secure context. +// +// Fetches to the local and private address spaces require a successful +// preflight response carrying a PNA-specific header. + +makePreflightTests({ + sourceServer: Server.HTTPS_PUBLIC, + sourceName: 'public', + targetServer: Server.HTTPS_LOCAL, + targetName: 'local', +}); + +makePreflightTests({ + sourceServer: Server.HTTPS_PUBLIC, + sourceName: 'public', + targetServer: Server.HTTPS_PRIVATE, + targetName: 'private', +}); + +// The following tests verify that `CSP: treat-as-public-address` makes +// documents behave as if they had been served from a public IP address. + +makePreflightTests({ + sourceServer: Server.HTTPS_LOCAL, + sourceTreatAsPublic: true, + sourceName: 'treat-as-public-address', + targetServer: Server.OTHER_HTTPS_LOCAL, + targetName: 'local', +}); + +promise_test_parallel( + t => fencedFrameTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: {server: Server.HTTPS_LOCAL}, + expected: FrameTestResult.FAILURE, + }), + 'treat-as-public-address to local (same-origin): fenced frame embedder ' + + 'initiated navigation has opaque origin.'); + +makePreflightTests({ + sourceServer: Server.HTTPS_LOCAL, + sourceTreatAsPublic: true, + sourceName: 'treat-as-public-address', + targetServer: Server.HTTPS_PRIVATE, + targetName: 'private', +}); diff --git a/test/wpt/tests/fetch/private-network-access/fetch-from-treat-as-public.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/fetch-from-treat-as-public.tentative.https.window.js new file mode 100644 index 0000000..084e032 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/fetch-from-treat-as-public.tentative.https.window.js @@ -0,0 +1,80 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests verify that documents fetched from the `local` or `private` +// address space yet carrying the `treat-as-public-address` CSP directive are +// treated as if they had been fetched from the `public` address space. + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + preflight: PreflightBehavior.noPnaHeader(token()), + }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public-address to local: failed preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.SUCCESS, +}), "treat-as-public-address to local: success."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_LOCAL }, + expected: FetchTestResult.SUCCESS, +}), "treat-as-public-address to local (same-origin): no preflight required."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_PRIVATE }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public-address to private: failed preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.SUCCESS, +}), "treat-as-public-address to private: success."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: FetchTestResult.SUCCESS, +}), "treat-as-public-address to public: no preflight required."); diff --git a/test/wpt/tests/fetch/private-network-access/fetch.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/fetch.tentative.https.window.js new file mode 100644 index 0000000..dbc4f23 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/fetch.tentative.https.window.js @@ -0,0 +1,271 @@ +// META: script=/common/subset-tests-by-key.js +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// META: variant=?include=baseline +// META: variant=?include=from-local +// META: variant=?include=from-private +// META: variant=?include=from-public +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests verify that secure contexts can fetch subresources from all +// address spaces, provided that the target server, if more private than the +// initiator, respond affirmatively to preflight requests. +// +// This file covers only those tests that must execute in a secure context. +// Other tests are defined in: fetch.window.js + +setup(() => { + // Making sure we are in a secure context, as expected. + assert_true(window.isSecureContext); +}); + +// Source: secure local context. +// +// All fetches unaffected by Private Network Access. + +subsetTestByKey("from-local", promise_test, t => fetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { server: Server.HTTPS_LOCAL }, + expected: FetchTestResult.SUCCESS, +}), "local to local: no preflight required."); + +subsetTestByKey("from-local", promise_test, t => fetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: FetchTestResult.SUCCESS, +}), "local to private: no preflight required."); + + +subsetTestByKey("from-local", promise_test, t => fetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: FetchTestResult.SUCCESS, +}), "local to public: no preflight required."); + +// Strictly speaking, the following two tests do not exercise PNA-specific +// logic, but they serve as a baseline for comparison, ensuring that non-PNA +// preflight requests are sent and handled as expected. + +subsetTestByKey("baseline", promise_test, t => fetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { + preflight: PreflightBehavior.failure(), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: { method: "PUT" }, + expected: FetchTestResult.FAILURE, +}), "local to public: PUT preflight failure."); + +subsetTestByKey("baseline", promise_test, t => fetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + } + }, + fetchOptions: { method: "PUT" }, + expected: FetchTestResult.SUCCESS, +}), "local to public: PUT preflight success."); + +// Generates tests of preflight behavior for a single (source, target) pair. +// +// Scenarios: +// +// - cors mode: +// - preflight response has non-2xx HTTP code +// - preflight response is missing CORS headers +// - preflight response is missing the PNA-specific `Access-Control` header +// - final response is missing CORS headers +// - success +// - success with PUT method (non-"simple" request) +// - no-cors mode: +// - preflight response has non-2xx HTTP code +// - preflight response is missing CORS headers +// - preflight response is missing the PNA-specific `Access-Control` header +// - success +// +function makePreflightTests({ + subsetKey, + source, + sourceDescription, + targetServer, + targetDescription, +}) { + const prefix = + `${sourceDescription} to ${targetDescription}: `; + + subsetTestByKey(subsetKey, promise_test, t => fetchTest(t, { + source, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.failure(), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.FAILURE, + }), prefix + "failed preflight."); + + subsetTestByKey(subsetKey, promise_test, t => fetchTest(t, { + source, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.noCorsHeader(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.FAILURE, + }), prefix + "missing CORS headers on preflight response."); + + subsetTestByKey(subsetKey, promise_test, t => fetchTest(t, { + source, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.noPnaHeader(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.FAILURE, + }), prefix + "missing PNA header on preflight response."); + + subsetTestByKey(subsetKey, promise_test, t => fetchTest(t, { + source, + target: { + server: targetServer, + behavior: { preflight: PreflightBehavior.success(token()) }, + }, + expected: FetchTestResult.FAILURE, + }), prefix + "missing CORS headers on final response."); + + subsetTestByKey(subsetKey, promise_test, t => fetchTest(t, { + source, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.SUCCESS, + }), prefix + "success."); + + subsetTestByKey(subsetKey, promise_test, t => fetchTest(t, { + source, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: { method: "PUT" }, + expected: FetchTestResult.SUCCESS, + }), prefix + "PUT success."); + + subsetTestByKey(subsetKey, promise_test, t => fetchTest(t, { + source, + target: { server: targetServer }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.FAILURE, + }), prefix + "no-CORS mode failed preflight."); + + subsetTestByKey(subsetKey, promise_test, t => fetchTest(t, { + source, + target: { + server: targetServer, + behavior: { preflight: PreflightBehavior.noCorsHeader(token()) }, + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.FAILURE, + }), prefix + "no-CORS mode missing CORS headers on preflight response."); + + subsetTestByKey(subsetKey, promise_test, t => fetchTest(t, { + source, + target: { + server: targetServer, + behavior: { preflight: PreflightBehavior.noPnaHeader(token()) }, + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.FAILURE, + }), prefix + "no-CORS mode missing PNA header on preflight response."); + + subsetTestByKey(subsetKey, promise_test, t => fetchTest(t, { + source, + target: { + server: targetServer, + behavior: { preflight: PreflightBehavior.success(token()) }, + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.OPAQUE, + }), prefix + "no-CORS mode success."); +} + +// Source: private secure context. +// +// Fetches to the local address space require a successful preflight response +// carrying a PNA-specific header. + +makePreflightTests({ + subsetKey: "from-private", + source: { server: Server.HTTPS_PRIVATE }, + sourceDescription: "private", + targetServer: Server.HTTPS_LOCAL, + targetDescription: "local", +}); + +subsetTestByKey("from-private", promise_test, t => fetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { server: Server.HTTPS_PRIVATE }, + expected: FetchTestResult.SUCCESS, +}), "private to private: no preflight required."); + +subsetTestByKey("from-private", promise_test, t => fetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: FetchTestResult.SUCCESS, +}), "private to public: no preflight required."); + +// Source: public secure context. +// +// Fetches to the local and private address spaces require a successful +// preflight response carrying a PNA-specific header. + +makePreflightTests({ + subsetKey: "from-public", + source: { server: Server.HTTPS_PUBLIC }, + sourceDescription: "public", + targetServer: Server.HTTPS_LOCAL, + targetDescription: "local", +}); + +makePreflightTests({ + subsetKey: "from-public", + source: { server: Server.HTTPS_PUBLIC }, + sourceDescription: "public", + targetServer: Server.HTTPS_PRIVATE, + targetDescription: "private", +}); + +subsetTestByKey("from-public", promise_test, t => fetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: FetchTestResult.SUCCESS, +}), "public to public: no preflight required."); + diff --git a/test/wpt/tests/fetch/private-network-access/fetch.tentative.window.js b/test/wpt/tests/fetch/private-network-access/fetch.tentative.window.js new file mode 100644 index 0000000..8ee54c9 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/fetch.tentative.window.js @@ -0,0 +1,183 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests verify that non-secure contexts cannot fetch subresources from +// less-public address spaces, and can fetch them otherwise. +// +// This file covers only those tests that must execute in a non secure context. +// Other tests are defined in: fetch.https.window.js + +setup(() => { + // Making sure we are in a non secure context, as expected. + assert_false(window.isSecureContext); +}); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { server: Server.HTTP_LOCAL }, + expected: FetchTestResult.SUCCESS, +}), "local to local: no preflight required."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: FetchTestResult.SUCCESS, +}), "local to private: no preflight required."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { + server: Server.HTTP_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: FetchTestResult.SUCCESS, +}), "local to public: no preflight required."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.FAILURE, +}), "private to local: failure."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { server: Server.HTTP_PRIVATE }, + expected: FetchTestResult.SUCCESS, +}), "private to private: no preflight required."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { + server: Server.HTTP_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: FetchTestResult.SUCCESS, +}), "private to public: no preflight required."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.FAILURE, +}), "public to local: failure."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.FAILURE, +}), "public to private: failure."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_PUBLIC }, + expected: FetchTestResult.SUCCESS, +}), "public to public: no preflight required."); + +// These tests verify that documents fetched from the `local` address space yet +// carrying the `treat-as-public-address` CSP directive are treated as if they +// had been fetched from the `public` address space. + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public-address to local: failure."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public-address to private: failure."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: FetchTestResult.SUCCESS, +}), "treat-as-public-address to public: no preflight required."); + +// These tests verify that HTTPS iframes embedded in an HTTP top-level document +// cannot fetch subresources from less-public address spaces. Indeed, even +// though the iframes have HTTPS origins, they are non-secure contexts because +// their parent is a non-secure context. + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.FAILURE, +}), "private https to local: failure."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.FAILURE, +}), "public https to local: failure."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.FAILURE, +}), "public https to private: failure."); diff --git a/test/wpt/tests/fetch/private-network-access/iframe.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/iframe.tentative.https.window.js new file mode 100644 index 0000000..0c12970 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/iframe.tentative.https.window.js @@ -0,0 +1,266 @@ +// META: script=/common/subset-tests-by-key.js +// META: script=/common/dispatcher/dispatcher.js +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// META: timeout=long +// META: variant=?include=from-local +// META: variant=?include=from-private +// META: variant=?include=from-public +// META: variant=?include=from-treat-as-public +// META: variant=?include=grandparent +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests verify that contexts can navigate iframes to less-public address +// spaces iff the target server responds affirmatively to preflight requests. +// +// This file covers only those tests that must execute in a secure context. +// Other tests are defined in: iframe.tentative.window.js + +setup(() => { + assert_true(window.isSecureContext); +}); + +// Source: secure local context. +// +// All fetches unaffected by Private Network Access. + +subsetTestByKey("from-local", promise_test_parallel, t => iframeTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { server: Server.HTTPS_LOCAL }, + expected: FrameTestResult.SUCCESS, +}), "local to local: no preflight required."); + +subsetTestByKey("from-local", promise_test_parallel, t => iframeTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { server: Server.HTTPS_PRIVATE }, + expected: FrameTestResult.SUCCESS, +}), "local to private: no preflight required."); + +subsetTestByKey("from-local", promise_test_parallel, t => iframeTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { server: Server.HTTPS_PUBLIC }, + expected: FrameTestResult.SUCCESS, +}), "local to public: no preflight required."); + +// Generates tests of preflight behavior for a single (source, target) pair. +// +// Scenarios: +// +// - parent navigates child: +// - preflight response has non-2xx HTTP code +// - preflight response is missing CORS headers +// - preflight response is missing the PNA-specific `Access-Control` header +// - success +// +function makePreflightTests({ + key, + sourceName, + sourceServer, + sourceTreatAsPublic, + targetName, + targetServer, +}) { + const prefix = + `${sourceName} to ${targetName}: `; + + const source = { + server: sourceServer, + treatAsPublic: sourceTreatAsPublic, + }; + + promise_test_parallel(t => iframeTest(t, { + source, + target: { + server: targetServer, + behavior: { preflight: PreflightBehavior.failure() }, + }, + expected: FrameTestResult.FAILURE, + }), prefix + "failed preflight."); + + promise_test_parallel(t => iframeTest(t, { + source, + target: { + server: targetServer, + behavior: { preflight: PreflightBehavior.noCorsHeader(token()) }, + }, + expected: FrameTestResult.FAILURE, + }), prefix + "missing CORS headers."); + + promise_test_parallel(t => iframeTest(t, { + source, + target: { + server: targetServer, + behavior: { preflight: PreflightBehavior.noPnaHeader(token()) }, + }, + expected: FrameTestResult.FAILURE, + }), prefix + "missing PNA header."); + + promise_test_parallel(t => iframeTest(t, { + source, + target: { + server: targetServer, + behavior: { preflight: PreflightBehavior.success(token()) }, + }, + expected: FrameTestResult.SUCCESS, + }), prefix + "success."); +} + +// Source: private secure context. +// +// Fetches to the local address space require a successful preflight response +// carrying a PNA-specific header. + +subsetTestByKey('from-private', makePreflightTests, { + sourceServer: Server.HTTPS_PRIVATE, + sourceName: 'private', + targetServer: Server.HTTPS_LOCAL, + targetName: 'local', +}); + +subsetTestByKey("from-private", promise_test_parallel, t => iframeTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { server: Server.HTTPS_PRIVATE }, + expected: FrameTestResult.SUCCESS, +}), "private to private: no preflight required."); + +subsetTestByKey("from-private", promise_test_parallel, t => iframeTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { server: Server.HTTPS_PUBLIC }, + expected: FrameTestResult.SUCCESS, +}), "private to public: no preflight required."); + +// Source: public secure context. +// +// Fetches to the local and private address spaces require a successful +// preflight response carrying a PNA-specific header. + +subsetTestByKey('from-public', makePreflightTests, { + sourceServer: Server.HTTPS_PUBLIC, + sourceName: "public", + targetServer: Server.HTTPS_LOCAL, + targetName: "local", +}); + +subsetTestByKey('from-public', makePreflightTests, { + sourceServer: Server.HTTPS_PUBLIC, + sourceName: "public", + targetServer: Server.HTTPS_PRIVATE, + targetName: "private", +}); + +subsetTestByKey("from-public", promise_test_parallel, t => iframeTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: FrameTestResult.SUCCESS, +}), "public to public: no preflight required."); + +// The following tests verify that `CSP: treat-as-public-address` makes +// documents behave as if they had been served from a public IP address. + +subsetTestByKey('from-treat-as-public', makePreflightTests, { + sourceServer: Server.HTTPS_LOCAL, + sourceTreatAsPublic: true, + sourceName: "treat-as-public-address", + targetServer: Server.OTHER_HTTPS_LOCAL, + targetName: "local", +}); + +subsetTestByKey( + 'from-treat-as-public', promise_test_parallel, + t => iframeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: {server: Server.HTTPS_LOCAL}, + expected: FrameTestResult.SUCCESS, + }), + 'treat-as-public-address to local (same-origin): no preflight required.' +); + +subsetTestByKey('from-treat-as-public', makePreflightTests, { + sourceServer: Server.HTTPS_LOCAL, + sourceTreatAsPublic: true, + sourceName: "treat-as-public-address", + targetServer: Server.HTTPS_PRIVATE, + targetName: "private", +}); + +subsetTestByKey( + 'from-treat-as-public', promise_test_parallel, + t => iframeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: {server: Server.HTTPS_PUBLIC}, + expected: FrameTestResult.SUCCESS, + }), + 'treat-as-public-address to public: no preflight required.' +); + +subsetTestByKey( + 'from-treat-as-public', promise_test_parallel, + t => iframeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: {preflight: PreflightBehavior.optionalSuccess(token())} + }, + expected: FrameTestResult.SUCCESS, + }), + 'treat-as-public-address to local: optional preflight' +); + +// The following tests verify that when a grandparent frame navigates its +// grandchild, the IP address space of the grandparent is compared against the +// IP address space of the response. Indeed, the navigation initiator in this +// case is the grandparent, not the parent. + +subsetTestByKey('grandparent', iframeGrandparentTest, { + name: 'local to local, grandparent navigates: no preflight required.', + grandparentServer: Server.HTTPS_LOCAL, + child: {server: Server.HTTPS_PUBLIC}, + grandchild: {server: Server.OTHER_HTTPS_LOCAL}, + expected: FrameTestResult.SUCCESS, +}); + +subsetTestByKey('grandparent', iframeGrandparentTest, { + name: "local to local (same-origin), grandparent navigates: no preflight required.", + grandparentServer: Server.HTTPS_LOCAL, + child: { server: Server.HTTPS_PUBLIC }, + grandchild: { server: Server.HTTPS_LOCAL }, + expected: FrameTestResult.SUCCESS, +}); + +subsetTestByKey('grandparent', iframeGrandparentTest, { + name: "public to local, grandparent navigates: failure.", + grandparentServer: Server.HTTPS_PUBLIC, + child: { + server: Server.HTTPS_LOCAL, + behavior: { preflight: PreflightBehavior.success(token()) }, + }, + grandchild: { + server: Server.HTTPS_LOCAL, + behavior: { preflight: PreflightBehavior.failure() }, + }, + expected: FrameTestResult.FAILURE, +}); + +subsetTestByKey('grandparent', iframeGrandparentTest, { + name: "public to local, grandparent navigates: success.", + grandparentServer: Server.HTTPS_PUBLIC, + child: { + server: Server.HTTPS_LOCAL, + behavior: { preflight: PreflightBehavior.success(token()) }, + }, + grandchild: { + server: Server.HTTPS_LOCAL, + behavior: { preflight: PreflightBehavior.success(token()) }, + }, + expected: FrameTestResult.SUCCESS, +}); diff --git a/test/wpt/tests/fetch/private-network-access/iframe.tentative.window.js b/test/wpt/tests/fetch/private-network-access/iframe.tentative.window.js new file mode 100644 index 0000000..c0770df --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/iframe.tentative.window.js @@ -0,0 +1,110 @@ +// META: script=/common/dispatcher/dispatcher.js +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests verify that non-secure contexts cannot navigate iframes to +// less-public address spaces, and can navigate them otherwise. +// +// This file covers only those tests that must execute in a non secure context. +// Other tests are defined in: iframe.https.window.js + +setup(() => { + // Making sure we are in a non secure context, as expected. + assert_false(window.isSecureContext); +}); + +promise_test_parallel(t => iframeTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { server: Server.HTTP_LOCAL }, + expected: FrameTestResult.SUCCESS, +}), "local to local: no preflight required."); + +promise_test_parallel(t => iframeTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { server: Server.HTTP_PRIVATE }, + expected: FrameTestResult.SUCCESS, +}), "local to private: no preflight required."); + +promise_test_parallel(t => iframeTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { server: Server.HTTP_PUBLIC }, + expected: FrameTestResult.SUCCESS, +}), "local to public: no preflight required."); + +promise_test_parallel(t => iframeTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { server: Server.HTTP_LOCAL }, + expected: FrameTestResult.FAILURE, +}), "private to local: failure."); + +promise_test_parallel(t => iframeTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { server: Server.HTTP_PRIVATE }, + expected: FrameTestResult.SUCCESS, +}), "private to private: no preflight required."); + +promise_test_parallel(t => iframeTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { server: Server.HTTP_PUBLIC }, + expected: FrameTestResult.SUCCESS, +}), "private to public: no preflight required."); + +promise_test_parallel(t => iframeTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_LOCAL }, + expected: FrameTestResult.FAILURE, +}), "public to local: failure."); + +promise_test_parallel(t => iframeTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_PRIVATE }, + expected: FrameTestResult.FAILURE, +}), "public to private: failure."); + +promise_test_parallel(t => iframeTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_PUBLIC }, + expected: FrameTestResult.SUCCESS, +}), "public to public: no preflight required."); + +promise_test_parallel(t => iframeTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTP_LOCAL }, + expected: FrameTestResult.FAILURE, +}), "treat-as-public-address to local: failure."); + +promise_test_parallel(t => iframeTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTP_PRIVATE }, + expected: FrameTestResult.FAILURE, +}), "treat-as-public-address to private: failure."); + +promise_test_parallel(t => iframeTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTP_PUBLIC }, + expected: FrameTestResult.SUCCESS, +}), "treat-as-public-address to public: no preflight required."); + +// The following test verifies that when a grandparent frame navigates its +// grandchild, the IP address space of the grandparent is compared against the +// IP address space of the response. Indeed, the navigation initiator in this +// case is the grandparent, not the parent. + +iframeGrandparentTest({ + name: "local to local, grandparent navigates: success.", + grandparentServer: Server.HTTP_LOCAL, + child: { server: Server.HTTP_PUBLIC }, + grandchild: { server: Server.HTTP_LOCAL }, + expected: FrameTestResult.SUCCESS, +}); diff --git a/test/wpt/tests/fetch/private-network-access/mixed-content-fetch.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/mixed-content-fetch.tentative.https.window.js new file mode 100644 index 0000000..54485dc --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/mixed-content-fetch.tentative.https.window.js @@ -0,0 +1,277 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access +// +// These tests verify that secure contexts can fetch non-secure subresources +// from more private address spaces, avoiding mixed context checks, as long as +// they specify a valid `targetAddressSpace` fetch option that matches the +// target server's address space. + +setup(() => { + // Making sure we are in a secure context, as expected. + assert_true(window.isSecureContext); +}); + +// Given `addressSpace`, returns the other three possible IP address spaces. +function otherAddressSpaces(addressSpace) { + switch (addressSpace) { + case "local": return ["unknown", "private", "public"]; + case "private": return ["unknown", "local", "public"]; + case "public": return ["unknown", "local", "private"]; + } +} + +// Generates tests of `targetAddressSpace` for the given (source, target) +// address space pair, expecting fetches to succeed iff `targetAddressSpace` is +// correct. +// +// Scenarios exercised: +// +// - cors mode: +// - missing targetAddressSpace option +// - incorrect targetAddressSpace option (x3, see `otherAddressSpaces()`) +// - failed preflight +// - success +// - success with PUT method (non-"simple" request) +// - no-cors mode: +// - success +// +function makeTests({ source, target }) { + const sourceServer = Server.get("https", source); + const targetServer = Server.get("http", target); + + const makeTest = ({ + fetchOptions, + targetBehavior, + name, + expected + }) => { + promise_test_parallel(t => fetchTest(t, { + source: { server: sourceServer }, + target: { + server: targetServer, + behavior: targetBehavior, + }, + fetchOptions, + expected, + }), `${sourceServer.name} to ${targetServer.name}: ${name}.`); + }; + + makeTest({ + name: "missing targetAddressSpace", + targetBehavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + expected: FetchTestResult.FAILURE, + }); + + const correctAddressSpace = targetServer.addressSpace; + + for (const targetAddressSpace of otherAddressSpaces(correctAddressSpace)) { + makeTest({ + name: `wrong targetAddressSpace "${targetAddressSpace}"`, + targetBehavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + fetchOptions: { targetAddressSpace }, + expected: FetchTestResult.FAILURE, + }); + } + + makeTest({ + name: "failed preflight", + targetBehavior: { + preflight: PreflightBehavior.failure(), + response: ResponseBehavior.allowCrossOrigin(), + }, + fetchOptions: { targetAddressSpace: correctAddressSpace }, + expected: FetchTestResult.FAILURE, + }); + + makeTest({ + name: "success", + targetBehavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + fetchOptions: { targetAddressSpace: correctAddressSpace }, + expected: FetchTestResult.SUCCESS, + }); + + makeTest({ + name: "PUT success", + targetBehavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + fetchOptions: { + targetAddressSpace: correctAddressSpace, + method: "PUT", + }, + expected: FetchTestResult.SUCCESS, + }); + + makeTest({ + name: "no-cors success", + targetBehavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + fetchOptions: { + targetAddressSpace: correctAddressSpace, + mode: "no-cors", + }, + expected: FetchTestResult.OPAQUE, + }); +} + +// Generates tests for the given (source, target) address space pair expecting +// that `targetAddressSpace` cannot be used to bypass mixed content. +// +// Scenarios exercised: +// +// - wrong `targetAddressSpace` (x3, see `otherAddressSpaces()`) +// - correct `targetAddressSpace` +// +function makeNoBypassTests({ source, target }) { + const sourceServer = Server.get("https", source); + const targetServer = Server.get("http", target); + + const prefix = `${sourceServer.name} to ${targetServer.name}: `; + + const correctAddressSpace = targetServer.addressSpace; + for (const targetAddressSpace of otherAddressSpaces(correctAddressSpace)) { + promise_test_parallel(t => fetchTest(t, { + source: { server: sourceServer }, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: { targetAddressSpace }, + expected: FetchTestResult.FAILURE, + }), prefix + `wrong targetAddressSpace "${targetAddressSpace}".`); + } + + promise_test_parallel(t => fetchTest(t, { + source: { server: sourceServer }, + target: { + server: targetServer, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: { targetAddressSpace: correctAddressSpace }, + expected: FetchTestResult.FAILURE, + }), prefix + 'not a private network request.'); +} + +// Source: local secure context. +// +// Fetches to the local and private address spaces cannot use +// `targetAddressSpace` to bypass mixed content, as they are not otherwise +// blocked by Private Network Access. + +makeNoBypassTests({ source: "local", target: "local" }); +makeNoBypassTests({ source: "local", target: "private" }); +makeNoBypassTests({ source: "local", target: "public" }); + +// Source: private secure context. +// +// Fetches to the local address space requires the right `targetAddressSpace` +// option, as well as a successful preflight response carrying a PNA-specific +// header. +// +// Fetches to the private address space cannot use `targetAddressSpace` to +// bypass mixed content, as they are not otherwise blocked by Private Network +// Access. + +makeTests({ source: "private", target: "local" }); + +makeNoBypassTests({ source: "private", target: "private" }); +makeNoBypassTests({ source: "private", target: "public" }); + +// Source: public secure context. +// +// Fetches to the local and private address spaces require the right +// `targetAddressSpace` option, as well as a successful preflight response +// carrying a PNA-specific header. + +makeTests({ source: "public", target: "local" }); +makeTests({ source: "public", target: "private" }); + +makeNoBypassTests({ source: "public", target: "public" }); + +// These tests verify that documents fetched from the `local` address space yet +// carrying the `treat-as-public-address` CSP directive are treated as if they +// had been fetched from the `public` address space. + +promise_test_parallel(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: { targetAddressSpace: "private" }, + expected: FetchTestResult.FAILURE, +}), 'https-treat-as-public to http-local: wrong targetAddressSpace "private".'); + +promise_test_parallel(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: { targetAddressSpace: "local" }, + expected: FetchTestResult.SUCCESS, +}), "https-treat-as-public to http-local: success."); + +promise_test_parallel(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: { targetAddressSpace: "local" }, + expected: FetchTestResult.FAILURE, +}), 'https-treat-as-public to http-private: wrong targetAddressSpace "local".'); + +promise_test_parallel(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + fetchOptions: { targetAddressSpace: "private" }, + expected: FetchTestResult.SUCCESS, +}), "https-treat-as-public to http-private: success."); diff --git a/test/wpt/tests/fetch/private-network-access/nested-worker.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/nested-worker.tentative.https.window.js new file mode 100644 index 0000000..3eeb435 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/nested-worker.tentative.https.window.js @@ -0,0 +1,36 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that initial `Worker` script fetches from within worker +// scopes are subject to Private Network Access checks, just like a worker +// script fetches from within document scopes (for non-nested workers). The +// latter are tested in: worker.https.window.js +// +// This file covers only those tests that must execute in a secure context. +// Other tests are defined in: nested-worker.window.js + +promise_test(t => nestedWorkerScriptTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_LOCAL }, + expected: WorkerScriptTestResult.FAILURE, +}), "treat-as-public to local: failure."); + +promise_test(t => nestedWorkerScriptTest(t, { + source: { + server: Server.HTTPS_PRIVATE, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_PRIVATE }, + expected: WorkerScriptTestResult.FAILURE, +}), "treat-as-public to private: failure."); + +promise_test(t => nestedWorkerScriptTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: WorkerScriptTestResult.SUCCESS, +}), "public to public: success."); diff --git a/test/wpt/tests/fetch/private-network-access/nested-worker.tentative.window.js b/test/wpt/tests/fetch/private-network-access/nested-worker.tentative.window.js new file mode 100644 index 0000000..6d246e1 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/nested-worker.tentative.window.js @@ -0,0 +1,36 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that initial `Worker` script fetches from within worker +// scopes are subject to Private Network Access checks, just like a worker +// script fetches from within document scopes (for non-nested workers). The +// latter are tested in: worker.window.js +// +// This file covers only those tests that must execute in a non secure context. +// Other tests are defined in: nested-worker.https.window.js + +promise_test(t => nestedWorkerScriptTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTP_LOCAL }, + expected: WorkerScriptTestResult.FAILURE, +}), "treat-as-public to local: failure."); + +promise_test(t => nestedWorkerScriptTest(t, { + source: { + server: Server.HTTP_PRIVATE, + treatAsPublic: true, + }, + target: { server: Server.HTTP_PRIVATE }, + expected: WorkerScriptTestResult.FAILURE, +}), "treat-as-public to private: failure."); + +promise_test(t => nestedWorkerScriptTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_PUBLIC }, + expected: WorkerScriptTestResult.SUCCESS, +}), "public to public: success."); diff --git a/test/wpt/tests/fetch/private-network-access/preflight-cache.https.tentative.window.js b/test/wpt/tests/fetch/private-network-access/preflight-cache.https.tentative.window.js new file mode 100644 index 0000000..87dbf50 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/preflight-cache.https.tentative.window.js @@ -0,0 +1,88 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#cors-preflight +// +// These tests verify that PNA preflight responses are cached. +// +// TODO(https://crbug.com/1268312): We cannot currently test that cache +// entries are keyed by target IP address space because that requires +// loading the same URL from different IP address spaces, and the WPT +// framework does not allow that. +promise_test(async t => { + let uuid = token(); + await fetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.singlePreflight(uuid), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.SUCCESS, + }); + await fetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.singlePreflight(uuid), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.SUCCESS, + }); +}, "private to local: success."); + +promise_test(async t => { + let uuid = token(); + await fetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.singlePreflight(uuid), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.SUCCESS, + }); + await fetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.singlePreflight(uuid), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.SUCCESS, + }); +}, "public to local: success."); + +promise_test(async t => { + let uuid = token(); + await fetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.singlePreflight(uuid), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.SUCCESS, + }); + await fetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.singlePreflight(uuid), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: FetchTestResult.SUCCESS, + }); +}, "public to private: success."); \ No newline at end of file diff --git a/test/wpt/tests/fetch/private-network-access/redirect.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/redirect.tentative.https.window.js new file mode 100644 index 0000000..efbd8f3 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/redirect.tentative.https.window.js @@ -0,0 +1,640 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// This test verifies that Private Network Access checks are applied to all +// the endpoints in a redirect chain, relative to the same client context. + +// local -> private -> public +// +// Request 1 (local -> private): no preflight. +// Request 2 (local -> public): no preflight. + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.HTTPS_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }), + } + }, + expected: FetchTestResult.SUCCESS, +}), "local to private to public: success."); + +// local -> private -> local +// +// Request 1 (local -> private): no preflight. +// Request 2 (local -> local): no preflight. +// +// This checks that the client for the second request is still the initial +// context, not the redirector. + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }), + } + }, + expected: FetchTestResult.SUCCESS, +}), "local to private to local: success."); + +// private -> private -> local +// +// Request 1 (private -> private): no preflight. +// Request 2 (private -> local): preflight required. +// +// This verifies that PNA checks are applied after redirects. + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + redirect: preflightUrl({ + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }), + } + }, + expected: FetchTestResult.FAILURE, +}), "private to private to local: failed preflight."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + redirect: preflightUrl({ + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }), + } + }, + expected: FetchTestResult.SUCCESS, +}), "private to private to local: success."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + redirect: preflightUrl({ + server: Server.HTTPS_LOCAL, + behavior: { preflight: PreflightBehavior.success(token()) }, + }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.OPAQUE, +}), "private to private to local: no-cors success."); + +// private -> local -> private +// +// Request 1 (private -> local): preflight required. +// Request 2 (private -> private): no preflight. +// +// This verifies that PNA checks are applied independently to every step in a +// redirect chain. + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.HTTPS_PRIVATE, + }), + } + }, + expected: FetchTestResult.FAILURE, +}), "private to local to private: failed preflight."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }), + } + }, + expected: FetchTestResult.SUCCESS, +}), "private to local to private: success."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + redirect: preflightUrl({ server: Server.HTTPS_PRIVATE }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.OPAQUE, +}), "private to local to private: no-cors success."); + +// public -> private -> local +// +// Request 1 (public -> private): preflight required. +// Request 2 (public -> local): preflight required. +// +// This verifies that PNA checks are applied to every step in a redirect chain. + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }), + } + }, + expected: FetchTestResult.FAILURE, +}), "public to private to local: failed first preflight."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.HTTPS_LOCAL, + behavior: { + response: ResponseBehavior.allowCrossOrigin(), + }, + }), + } + }, + expected: FetchTestResult.FAILURE, +}), "public to private to local: failed second preflight."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }), + } + }, + expected: FetchTestResult.SUCCESS, +}), "public to private to local: success."); + +promise_test(t => fetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + redirect: preflightUrl({ + server: Server.HTTPS_LOCAL, + behavior: { preflight: PreflightBehavior.success(token()) }, + }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.OPAQUE, +}), "public to private to local: no-cors success."); + +// treat-as-public -> local -> private + +// Request 1 (treat-as-public -> local): preflight required. +// Request 2 (treat-as-public -> private): preflight required. + +// This verifies that PNA checks are applied to every step in a redirect chain. + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + redirect: preflightUrl({ + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }), + response: ResponseBehavior.allowCrossOrigin(), + } + }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to local to private: failed first preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + redirect: preflightUrl({ + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.noPnaHeader(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }), + response: ResponseBehavior.allowCrossOrigin(), + } + }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to local to private: failed second preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + redirect: preflightUrl({ + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }), + response: ResponseBehavior.allowCrossOrigin(), + } + }, + expected: FetchTestResult.SUCCESS, +}), "treat-as-public to local to private: success."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + redirect: preflightUrl({ + server: Server.HTTPS_PRIVATE, + behavior: { preflight: PreflightBehavior.success(token()) }, + }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to local to private: no-cors failed first preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + redirect: preflightUrl({ server: Server.HTTPS_PRIVATE }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to local to private: no-cors failed second preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + redirect: preflightUrl({ + server: Server.HTTPS_PRIVATE, + behavior: { preflight: PreflightBehavior.success(token()) }, + }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.OPAQUE, +}), "treat-as-public to local to private: no-cors success."); + +// treat-as-public -> local (same-origin) -> private + +// Request 1 (treat-as-public -> local (same-origin)): no preflight required. +// Request 2 (treat-as-public -> private): preflight required. + +// This verifies that PNA checks are applied only to the second step in a +// redirect chain if the first step is same-origin and the origin is potentially +// trustworthy. + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + redirect: preflightUrl({ + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.noPnaHeader(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }), + } + }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to local (same-origin) to private: failed second preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + redirect: preflightUrl({ + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }), + } + }, + expected: FetchTestResult.SUCCESS, +}), "treat-as-public to local (same-origin) to private: success."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + redirect: preflightUrl({ server: Server.HTTPS_PRIVATE }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to local (same-origin) to private: no-cors failed second preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + redirect: preflightUrl({ + server: Server.HTTPS_PRIVATE, + behavior: { preflight: PreflightBehavior.success(token()) }, + }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.OPAQUE, +}), "treat-as-public to local (same-origin) to private: no-cors success."); + +// treat-as-public -> private -> local + +// Request 1 (treat-as-public -> private): preflight required. +// Request 2 (treat-as-public -> local): preflight required. + +// This verifies that PNA checks are applied to every step in a redirect chain. + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.noPnaHeader(token()), + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }), + } + }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to private to local: failed first preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.OTHER_HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }), + } + }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to private to local: failed second preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }), + } + }, + expected: FetchTestResult.SUCCESS, +}), "treat-as-public to private to local: success."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + redirect: preflightUrl({ + server: Server.OTHER_HTTPS_LOCAL, + behavior: { preflight: PreflightBehavior.success(token()) }, + }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to private to local: no-cors failed first preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + redirect: preflightUrl({ server: Server.OTHER_HTTPS_LOCAL }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to private to local: no-cors failed second preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + redirect: preflightUrl({ + server: Server.OTHER_HTTPS_LOCAL, + behavior: { preflight: PreflightBehavior.success(token()) }, + }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.OPAQUE, +}), "treat-as-public to private to local: no-cors success."); + +// treat-as-public -> private -> local (same-origin) + +// Request 1 (treat-as-public -> private): preflight required. +// Request 2 (treat-as-public -> local (same-origin)): no preflight required. + +// This verifies that PNA checks are only applied to the first step in a +// redirect chain if the second step is same-origin and the origin is +// potentially trustworthy. + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.noPnaHeader(token()), + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }), + } + }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to private to local (same-origin): failed first preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + redirect: preflightUrl({ + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }), + } + }, + expected: FetchTestResult.SUCCESS, +}), "treat-as-public to private to local (same-origin): success."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + redirect: preflightUrl({ server: Server.HTTPS_LOCAL }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.FAILURE, +}), "treat-as-public to private to local (same-origin): no-cors failed first preflight."); + +promise_test(t => fetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + redirect: preflightUrl({ server: Server.HTTPS_LOCAL }), + } + }, + fetchOptions: { mode: "no-cors" }, + expected: FetchTestResult.OPAQUE, +}), "treat-as-public to private to local (same-origin): no-cors success."); diff --git a/test/wpt/tests/fetch/private-network-access/resources/executor.html b/test/wpt/tests/fetch/private-network-access/resources/executor.html new file mode 100644 index 0000000..d712129 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/executor.html @@ -0,0 +1,9 @@ + + +Executor + + + diff --git a/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-fetcher.https.html b/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-fetcher.https.html new file mode 100644 index 0000000..b14601d --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-fetcher.https.html @@ -0,0 +1,25 @@ + + + +Fetcher + \ No newline at end of file diff --git a/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-fetcher.https.html.headers b/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-fetcher.https.html.headers new file mode 100644 index 0000000..6247f6d --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-fetcher.https.html.headers @@ -0,0 +1 @@ +Supports-Loading-Mode: fenced-frame \ No newline at end of file diff --git a/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access-target.https.html b/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access-target.https.html new file mode 100644 index 0000000..2b55e05 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access-target.https.html @@ -0,0 +1,8 @@ + + + +Fenced frame target + diff --git a/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access.https.html b/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access.https.html new file mode 100644 index 0000000..98f1184 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access.https.html @@ -0,0 +1,14 @@ + + + + + +Fenced frame + + diff --git a/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access.https.html.headers b/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access.https.html.headers new file mode 100644 index 0000000..6247f6d --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/fenced-frame-local-network-access.https.html.headers @@ -0,0 +1 @@ +Supports-Loading-Mode: fenced-frame \ No newline at end of file diff --git a/test/wpt/tests/fetch/private-network-access/resources/fetcher.html b/test/wpt/tests/fetch/private-network-access/resources/fetcher.html new file mode 100644 index 0000000..000a5cc --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/fetcher.html @@ -0,0 +1,21 @@ + + +Fetcher + diff --git a/test/wpt/tests/fetch/private-network-access/resources/fetcher.js b/test/wpt/tests/fetch/private-network-access/resources/fetcher.js new file mode 100644 index 0000000..3a18598 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/fetcher.js @@ -0,0 +1,20 @@ +async function doFetch(url) { + const response = await fetch(url); + const body = await response.text(); + return { + status: response.status, + body, + }; +} + +async function fetchAndPost(url) { + try { + const message = await doFetch(url); + self.postMessage(message); + } catch(e) { + self.postMessage({ error: e.name }); + } +} + +const url = new URL(self.location.href).searchParams.get("url"); +fetchAndPost(url); diff --git a/test/wpt/tests/fetch/private-network-access/resources/iframed.html b/test/wpt/tests/fetch/private-network-access/resources/iframed.html new file mode 100644 index 0000000..c889c28 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/iframed.html @@ -0,0 +1,7 @@ + + +Iframed + diff --git a/test/wpt/tests/fetch/private-network-access/resources/iframer.html b/test/wpt/tests/fetch/private-network-access/resources/iframer.html new file mode 100644 index 0000000..304cc54 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/iframer.html @@ -0,0 +1,9 @@ + + +Iframer + + diff --git a/test/wpt/tests/fetch/private-network-access/resources/preflight.py b/test/wpt/tests/fetch/private-network-access/resources/preflight.py new file mode 100644 index 0000000..be3abdb --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/preflight.py @@ -0,0 +1,175 @@ +# This endpoint responds to both preflight requests and the subsequent requests. +# +# Its behavior can be configured with various search/GET parameters, all of +# which are optional: +# +# - treat-as-public-once: Must be a valid UUID if set. +# If set, then this endpoint expects to receive a non-preflight request first, +# for which it sets the `Content-Security-Policy: treat-as-public-address` +# response header. This allows testing "DNS rebinding", where a URL first +# resolves to the public IP address space, then a non-public IP address space. +# - preflight-uuid: Must be a valid UUID if set, distinct from the value of the +# `treat-as-public-once` parameter if both are set. +# If set, then this endpoint expects to receive a preflight request first +# followed by a regular request, as in the regular CORS protocol. If the +# `treat-as-public-once` header is also set, it takes precedence: this +# endpoint expects to receive a non-preflight request first, then a preflight +# request, then finally a regular request. +# If unset, then this endpoint expects to receive no preflight request, only +# a regular (non-OPTIONS) request. +# - preflight-headers: Valid values are: +# - cors: this endpoint responds with valid CORS headers to preflights. These +# should be sufficient for non-PNA preflight requests to succeed, but not +# for PNA-specific preflight requests. +# - cors+pna: this endpoint responds with valid CORS and PNA headers to +# preflights. These should be sufficient for both non-PNA preflight +# requests and PNA-specific preflight requests to succeed. +# - cors+pna+sw: this endpoint responds with valid CORS and PNA headers and +# "Access-Control-Allow-Headers: Service-Worker" to preflights. These should +# be sufficient for both non-PNA preflight requests and PNA-specific +# preflight requests to succeed. This allows the main request to fetch a +# service worker script. +# - unspecified, or any other value: this endpoint responds with no CORS or +# PNA headers. Preflight requests should fail. +# - final-headers: Valid values are: +# - cors: this endpoint responds with valid CORS headers to CORS-enabled +# non-preflight requests. These should be sufficient for non-preflighted +# CORS-enabled requests to succeed. +# - unspecified: this endpoint responds with no CORS headers to non-preflight +# requests. This should fail CORS-enabled requests, but be sufficient for +# no-CORS requests. +# +# The following parameters only affect non-preflight responses: +# +# - redirect: If set, the response code is set to 301 and the `Location` +# response header is set to this value. +# - mime-type: If set, the `Content-Type` response header is set to this value. +# - file: Specifies a path (relative to this file's directory) to a file. If +# set, the response body is copied from this file. +# - random-js-prefix: If set to any value, the response body is prefixed with +# a Javascript comment line containing a random value. This is useful in +# service worker tests, since service workers are only updated if the new +# script is not byte-for-byte identical with the old script. +# - body: If set and `file` is not, the response body is set to this value. +# + +import os +import random + +from wptserve.utils import isomorphic_encode + +_ACAO = ("Access-Control-Allow-Origin", "*") +_ACAPN = ("Access-Control-Allow-Private-Network", "true") +_ACAH = ("Access-Control-Allow-Headers", "Service-Worker") + +def _get_response_headers(method, mode): + acam = ("Access-Control-Allow-Methods", method) + + if mode == b"cors": + return [acam, _ACAO] + + if mode == b"cors+pna": + return [acam, _ACAO, _ACAPN] + + if mode == b"cors+pna+sw": + return [acam, _ACAO, _ACAPN, _ACAH] + + return [] + +def _get_expect_single_preflight(request): + return request.GET.get(b"expect-single-preflight") + +def _is_preflight_optional(request): + return request.GET.get(b"is-preflight-optional") + +def _get_preflight_uuid(request): + return request.GET.get(b"preflight-uuid") + +def _is_loaded_in_fenced_frame(request): + return request.GET.get(b"is-loaded-in-fenced-frame") + +def _should_treat_as_public_once(request): + uuid = request.GET.get(b"treat-as-public-once") + if uuid is None: + # If the search parameter is not given, never treat as public. + return False + + # If the parameter is given, we treat the request as public only if the UUID + # has never been seen and stashed. + result = request.server.stash.take(uuid) is None + request.server.stash.put(uuid, "") + return result + +def _handle_preflight_request(request, response): + if _should_treat_as_public_once(request): + return (400, [], "received preflight for first treat-as-public request") + + uuid = _get_preflight_uuid(request) + if uuid is None: + return (400, [], "missing `preflight-uuid` param from preflight URL") + + value = request.server.stash.take(uuid) + request.server.stash.put(uuid, "preflight") + if _get_expect_single_preflight(request) and value is not None: + return (400, [], "received duplicated preflight") + + method = request.headers.get("Access-Control-Request-Method") + mode = request.GET.get(b"preflight-headers") + headers = _get_response_headers(method, mode) + + return (headers, "preflight") + +def _final_response_body(request): + file_name = request.GET.get(b"file") + if file_name is None: + return request.GET.get(b"body") or "success" + + prefix = b"" + if request.GET.get(b"random-js-prefix"): + value = random.randint(0, 1000000000) + prefix = isomorphic_encode("// Random value: {}\n\n".format(value)) + + path = os.path.join(os.path.dirname(isomorphic_encode(__file__)), file_name) + with open(path, 'rb') as f: + contents = f.read() + + return prefix + contents + +def _handle_final_request(request, response): + if _should_treat_as_public_once(request): + headers = [("Content-Security-Policy", "treat-as-public-address"),] + else: + uuid = _get_preflight_uuid(request) + if uuid is not None: + if (request.server.stash.take(uuid) is None and + not _is_preflight_optional(request)): + return (405, [], "no preflight received") + request.server.stash.put(uuid, "final") + + mode = request.GET.get(b"final-headers") + headers = _get_response_headers(request.method, mode) + + redirect = request.GET.get(b"redirect") + if redirect is not None: + headers.append(("Location", redirect)) + return (301, headers, b"") + + mime_type = request.GET.get(b"mime-type") + if mime_type is not None: + headers.append(("Content-Type", mime_type),) + + if _is_loaded_in_fenced_frame(request): + headers.append(("Supports-Loading-Mode", "fenced-frame")) + + body = _final_response_body(request) + return (headers, body) + +def main(request, response): + try: + if request.method == "OPTIONS": + return _handle_preflight_request(request, response) + else: + return _handle_final_request(request, response) + except BaseException as e: + # Surface exceptions to the client, where they show up as assertion errors. + return (500, [("X-exception", str(e))], "exception: {}".format(e)) diff --git a/test/wpt/tests/fetch/private-network-access/resources/service-worker-bridge.html b/test/wpt/tests/fetch/private-network-access/resources/service-worker-bridge.html new file mode 100644 index 0000000..816de53 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/service-worker-bridge.html @@ -0,0 +1,155 @@ + + +ServiceWorker Bridge + + + diff --git a/test/wpt/tests/fetch/private-network-access/resources/service-worker.js b/test/wpt/tests/fetch/private-network-access/resources/service-worker.js new file mode 100644 index 0000000..bca71ad --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/service-worker.js @@ -0,0 +1,18 @@ +self.addEventListener("install", () => { + // Skip waiting before replacing the previously-active service worker, if any. + // This allows the bridge script to notice the controller change and query + // the install time via fetch. + self.skipWaiting(); +}); + +self.addEventListener("activate", (event) => { + // Claim all clients so that the bridge script notices the activation. + event.waitUntil(self.clients.claim()); +}); + +self.addEventListener("fetch", (event) => { + const url = new URL(event.request.url).searchParams.get("proxied-url"); + if (url) { + event.respondWith(fetch(url)); + } +}); diff --git a/test/wpt/tests/fetch/private-network-access/resources/shared-fetcher.js b/test/wpt/tests/fetch/private-network-access/resources/shared-fetcher.js new file mode 100644 index 0000000..30bde1e --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/shared-fetcher.js @@ -0,0 +1,23 @@ +async function doFetch(url) { + const response = await fetch(url); + const body = await response.text(); + return { + status: response.status, + body, + }; +} + +async function fetchAndPost(url, port) { + try { + const message = await doFetch(url); + port.postMessage(message); + } catch(e) { + port.postMessage({ error: e.name }); + } +} + +const url = new URL(self.location.href).searchParams.get("url"); + +self.addEventListener("connect", async (evt) => { + await fetchAndPost(url, evt.ports[0]); +}); diff --git a/test/wpt/tests/fetch/private-network-access/resources/shared-worker-blob-fetcher.html b/test/wpt/tests/fetch/private-network-access/resources/shared-worker-blob-fetcher.html new file mode 100644 index 0000000..a79869b --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/shared-worker-blob-fetcher.html @@ -0,0 +1,50 @@ + + +SharedWorker Blob Fetcher + diff --git a/test/wpt/tests/fetch/private-network-access/resources/shared-worker-fetcher.html b/test/wpt/tests/fetch/private-network-access/resources/shared-worker-fetcher.html new file mode 100644 index 0000000..4af4b1f --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/shared-worker-fetcher.html @@ -0,0 +1,19 @@ + + +SharedWorker Fetcher + diff --git a/test/wpt/tests/fetch/private-network-access/resources/socket-opener.html b/test/wpt/tests/fetch/private-network-access/resources/socket-opener.html new file mode 100644 index 0000000..48d2721 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/socket-opener.html @@ -0,0 +1,15 @@ + + +WebSocket Opener + diff --git a/test/wpt/tests/fetch/private-network-access/resources/support.sub.js b/test/wpt/tests/fetch/private-network-access/resources/support.sub.js new file mode 100644 index 0000000..27d733d --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/support.sub.js @@ -0,0 +1,759 @@ +// Creates a new iframe in `doc`, calls `func` on it and appends it as a child +// of `doc`. +// Returns a promise that resolves to the iframe once loaded (successfully or +// not). +// The iframe is removed from `doc` once test `t` is done running. +// +// NOTE: There exists no interoperable way to check whether an iframe failed to +// load, so this should only be used when the iframe is expected to load. It +// also means we cannot wire the iframe's `error` event to a promise +// rejection. See: https://github.com/whatwg/html/issues/125 +function appendIframeWith(t, doc, func) { + return new Promise(resolve => { + const child = doc.createElement("iframe"); + t.add_cleanup(() => child.remove()); + + child.addEventListener("load", () => resolve(child), { once: true }); + func(child); + doc.body.appendChild(child); + }); +} + +// Appends a child iframe to `doc` sourced from `src`. +// +// See `appendIframeWith()` for more details. +function appendIframe(t, doc, src) { + return appendIframeWith(t, doc, child => { child.src = src; }); +} + +// Registers an event listener that will resolve this promise when this +// window receives a message posted to it. +// +// `options` has the following shape: +// +// { +// source: If specified, this function waits for the first message from the +// given source only, ignoring other messages. +// +// filter: If specified, this function calls `filter` on each incoming +// message, and resolves iff it returns true. +// } +// +function futureMessage(options) { + return new Promise(resolve => { + window.addEventListener("message", (e) => { + if (options?.source && options.source !== e.source) { + return; + } + + if (options?.filter && !options.filter(e.data)) { + return; + } + + resolve(e.data); + }); + }); +}; + +// Like `promise_test()`, but executes tests in parallel like `async_test()`. +// +// Cribbed from COEP tests. +function promise_test_parallel(promise, description) { + async_test(test => { + promise(test) + .then(() => test.done()) + .catch(test.step_func(error => { throw error; })); + }, description); +}; + +async function postMessageAndAwaitReply(target, message) { + const reply = futureMessage({ source: target }); + target.postMessage(message, "*"); + return await reply; +} + +// Maps protocol (without the trailing colon) and address space to port. +const SERVER_PORTS = { + "http": { + "local": {{ports[http][0]}}, + "private": {{ports[http-private][0]}}, + "public": {{ports[http-public][0]}}, + }, + "https": { + "local": {{ports[https][0]}}, + "other-local": {{ports[https][1]}}, + "private": {{ports[https-private][0]}}, + "public": {{ports[https-public][0]}}, + }, + "ws": { + "local": {{ports[ws][0]}}, + }, + "wss": { + "local": {{ports[wss][0]}}, + }, +}; + +// A `Server` is a web server accessible by tests. It has the following shape: +// +// { +// addressSpace: the IP address space of the server ("local", "private" or +// "public"), +// name: a human-readable name for the server, +// port: the port on which the server listens for connections, +// protocol: the protocol (including trailing colon) spoken by the server, +// } +// +// Constants below define the available servers, which can also be accessed +// programmatically with `get()`. +class Server { + // Maps the given `protocol` (without a trailing colon) and `addressSpace` to + // a server. Returns null if no such server exists. + static get(protocol, addressSpace) { + const ports = SERVER_PORTS[protocol]; + if (ports === undefined) { + return null; + } + + const port = ports[addressSpace]; + if (port === undefined) { + return null; + } + + return { + addressSpace, + name: `${protocol}-${addressSpace}`, + port, + protocol: protocol + ':', + }; + } + + static HTTP_LOCAL = Server.get("http", "local"); + static HTTP_PRIVATE = Server.get("http", "private"); + static HTTP_PUBLIC = Server.get("http", "public"); + static HTTPS_LOCAL = Server.get("https", "local"); + static OTHER_HTTPS_LOCAL = Server.get("https", "other-local"); + static HTTPS_PRIVATE = Server.get("https", "private"); + static HTTPS_PUBLIC = Server.get("https", "public"); + static WS_LOCAL = Server.get("ws", "local"); + static WSS_LOCAL = Server.get("wss", "local"); +}; + +// Resolves a URL relative to the current location, returning an absolute URL. +// +// `url` specifies the relative URL, e.g. "foo.html" or "http://foo.example". +// `options`, if defined, should have the following shape: +// +// { +// // Optional. Overrides the protocol of the returned URL. +// protocol, +// +// // Optional. Overrides the port of the returned URL. +// port, +// +// // Extra headers. +// headers, +// +// // Extra search params. +// searchParams, +// } +// +function resolveUrl(url, options) { + const result = new URL(url, window.location); + if (options === undefined) { + return result; + } + + const { port, protocol, headers, searchParams } = options; + if (port !== undefined) { + result.port = port; + } + if (protocol !== undefined) { + result.protocol = protocol; + } + if (headers !== undefined) { + const pipes = []; + for (key in headers) { + pipes.push(`header(${key},${headers[key]})`); + } + result.searchParams.append("pipe", pipes.join("|")); + } + if (searchParams !== undefined) { + for (key in searchParams) { + result.searchParams.append(key, searchParams[key]); + } + } + + return result; +} + +// Computes options to pass to `resolveUrl()` for a source document's URL. +// +// `server` identifies the server from which to load the document. +// `treatAsPublic`, if set to true, specifies that the source document should +// be artificially placed in the `public` address space using CSP. +function sourceResolveOptions({ server, treatAsPublic }) { + const options = {...server}; + if (treatAsPublic) { + options.headers = { "Content-Security-Policy": "treat-as-public-address" }; + } + return options; +} + +// Computes the URL of a preflight handler configured with the given options. +// +// `server` identifies the server from which to load the resource. +// `behavior` specifies the behavior of the target server. It may contain: +// - `preflight`: The result of calling one of `PreflightBehavior`'s methods. +// - `response`: The result of calling one of `ResponseBehavior`'s methods. +// - `redirect`: A URL to which the target should redirect GET requests. +function preflightUrl({ server, behavior }) { + assert_not_equals(server, undefined, 'server'); + const options = {...server}; + if (behavior) { + const { preflight, response, redirect } = behavior; + options.searchParams = { + ...preflight, + ...response, + }; + if (redirect !== undefined) { + options.searchParams.redirect = redirect; + } + } + + return resolveUrl("resources/preflight.py", options); +} + +// Methods generate behavior specifications for how `resources/preflight.py` +// should behave upon receiving a preflight request. +const PreflightBehavior = { + // The preflight response should fail with a non-2xx code. + failure: () => ({}), + + // The preflight response should be missing CORS headers. + // `uuid` should be a UUID that uniquely identifies the preflight request. + noCorsHeader: (uuid) => ({ + "preflight-uuid": uuid, + }), + + // The preflight response should be missing PNA headers. + // `uuid` should be a UUID that uniquely identifies the preflight request. + noPnaHeader: (uuid) => ({ + "preflight-uuid": uuid, + "preflight-headers": "cors", + }), + + // The preflight response should succeed. + // `uuid` should be a UUID that uniquely identifies the preflight request. + success: (uuid) => ({ + "preflight-uuid": uuid, + "preflight-headers": "cors+pna", + }), + + optionalSuccess: (uuid) => ({ + "preflight-uuid": uuid, + "preflight-headers": "cors+pna", + "is-preflight-optional": true, + }), + + // The preflight response should succeed and allow service-worker header. + // `uuid` should be a UUID that uniquely identifies the preflight request. + serviceWorkerSuccess: (uuid) => ({ + "preflight-uuid": uuid, + "preflight-headers": "cors+pna+sw", + }), + + // The preflight response should succeed only if it is the first preflight. + // `uuid` should be a UUID that uniquely identifies the preflight request. + singlePreflight: (uuid) => ({ + "preflight-uuid": uuid, + "preflight-headers": "cors+pna", + "expect-single-preflight": true, + }), +}; + +// Methods generate behavior specifications for how `resources/preflight.py` +// should behave upon receiving a regular (non-preflight) request. +const ResponseBehavior = { + // The response should succeed without CORS headers. + default: () => ({}), + + // The response should succeed with CORS headers. + allowCrossOrigin: () => ({ "final-headers": "cors" }), +}; + +const FetchTestResult = { + SUCCESS: { + ok: true, + body: "success", + }, + OPAQUE: { + ok: false, + type: "opaque", + body: "", + }, + FAILURE: { + error: "TypeError: Failed to fetch", + }, +}; + +// Runs a fetch test. Tries to fetch a given subresource from a given document. +// +// Main argument shape: +// +// { +// // Optional. Passed to `sourceResolveOptions()`. +// source, +// +// // Optional. Passed to `preflightUrl()`. +// target, +// +// // Optional. Passed to `fetch()`. +// fetchOptions, +// +// // Required. One of the values in `FetchTestResult`. +// expected, +// } +// +async function fetchTest(t, { source, target, fetchOptions, expected }) { + const sourceUrl = + resolveUrl("resources/fetcher.html", sourceResolveOptions(source)); + + const targetUrl = preflightUrl(target); + + const iframe = await appendIframe(t, document, sourceUrl); + const reply = futureMessage({ source: iframe.contentWindow }); + + const message = { + url: targetUrl.href, + options: fetchOptions, + }; + iframe.contentWindow.postMessage(message, "*"); + + const { error, ok, type, body } = await reply; + + assert_equals(error, expected.error, "error"); + + assert_equals(ok, expected.ok, "response ok"); + assert_equals(body, expected.body, "response body"); + + if (expected.type !== undefined) { + assert_equals(type, expected.type, "response type"); + } +} + +// Similar to `fetchTest`, but replaced iframes with fenced frames. +async function fencedFrameFetchTest(t, { source, target, fetchOptions, expected }) { + const fetcher_url = + resolveUrl("resources/fenced-frame-fetcher.https.html", sourceResolveOptions(source)); + + const target_url = preflightUrl(target); + target_url.searchParams.set("is-loaded-in-fenced-frame", true); + + fetcher_url.searchParams.set("mode", fetchOptions.mode); + fetcher_url.searchParams.set("method", fetchOptions.method); + fetcher_url.searchParams.set("url", target_url); + + const error_token = token(); + const ok_token = token(); + const body_token = token(); + const type_token = token(); + const source_url = generateURL(fetcher_url, [error_token, ok_token, body_token, type_token]); + + const urn = await generateURNFromFledge(source_url, []); + attachFencedFrame(urn); + + const error = await nextValueFromServer(error_token); + const ok = await nextValueFromServer(ok_token); + const body = await nextValueFromServer(body_token); + const type = await nextValueFromServer(type_token); + + assert_equals(error, expected.error || "" , "error"); + assert_equals(body, expected.body || "", "response body"); + assert_equals(ok, expected.ok !== undefined ? expected.ok.toString() : "", "response ok"); + if (expected.type !== undefined) { + assert_equals(type, expected.type, "response type"); + } +} + +const XhrTestResult = { + SUCCESS: { + loaded: true, + status: 200, + body: "success", + }, + FAILURE: { + loaded: false, + status: 0, + }, +}; + +// Runs an XHR test. Tries to fetch a given subresource from a given document. +// +// Main argument shape: +// +// { +// // Optional. Passed to `sourceResolveOptions()`. +// source, +// +// // Optional. Passed to `preflightUrl()`. +// target, +// +// // Optional. Method to use when sending the request. Defaults to "GET". +// method, +// +// // Required. One of the values in `XhrTestResult`. +// expected, +// } +// +async function xhrTest(t, { source, target, method, expected }) { + const sourceUrl = + resolveUrl("resources/xhr-sender.html", sourceResolveOptions(source)); + + const targetUrl = preflightUrl(target); + + const iframe = await appendIframe(t, document, sourceUrl); + const reply = futureMessage(); + + const message = { + url: targetUrl.href, + method: method, + }; + iframe.contentWindow.postMessage(message, "*"); + + const { loaded, status, body } = await reply; + + assert_equals(loaded, expected.loaded, "response loaded"); + assert_equals(status, expected.status, "response status"); + assert_equals(body, expected.body, "response body"); +} + +const FrameTestResult = { + SUCCESS: "loaded", + FAILURE: "timeout", +}; + +async function iframeTest(t, { source, target, expected }) { + // Allows running tests in parallel. + const uuid = token(); + + const targetUrl = preflightUrl(target); + targetUrl.searchParams.set("file", "iframed.html"); + targetUrl.searchParams.set("iframe-uuid", uuid); + + const sourceUrl = + resolveUrl("resources/iframer.html", sourceResolveOptions(source)); + sourceUrl.searchParams.set("url", targetUrl); + + const messagePromise = futureMessage({ + filter: (data) => data.uuid === uuid, + }); + const iframe = await appendIframe(t, document, sourceUrl); + + // The grandchild frame posts a message iff it loads successfully. + // There exists no interoperable way to check whether an iframe failed to + // load, so we use a timeout. + // See: https://github.com/whatwg/html/issues/125 + const result = await Promise.race([ + messagePromise.then((data) => data.message), + new Promise((resolve) => { + t.step_timeout(() => resolve("timeout"), 500 /* ms */); + }), + ]); + + assert_equals(result, expected); +} + +// Similar to `iframeTest`, but replaced iframes with fenced frames. +async function fencedFrameTest(t, { source, target, expected }) { + // Allows running tests in parallel. + const target_url = preflightUrl(target); + target_url.searchParams.set("file", "fenced-frame-local-network-access-target.https.html"); + target_url.searchParams.set("is-loaded-in-fenced-frame", true); + + const frame_loaded_key = token(); + const child_frame_target = generateURL(target_url, [frame_loaded_key]); + + const source_url = + resolveUrl("resources/fenced-frame-local-network-access.https.html", sourceResolveOptions(source)); + source_url.searchParams.set("fenced_frame_url", child_frame_target); + + const urn = await generateURNFromFledge(source_url, []); + attachFencedFrame(urn); + + // The grandchild fenced frame writes a value to the server iff it loads + // successfully. + const result = (expected == FrameTestResult.SUCCESS) ? + await nextValueFromServer(frame_loaded_key) : + await Promise.race([ + nextValueFromServer(frame_loaded_key), + new Promise((resolve) => { + t.step_timeout(() => resolve("timeout"), 10000 /* ms */); + }), + ]); + + assert_equals(result, expected); +} + +const iframeGrandparentTest = ({ + name, + grandparentServer, + child, + grandchild, + expected, +}) => promise_test_parallel(async (t) => { + // Allows running tests in parallel. + const grandparentUuid = token(); + const childUuid = token(); + const grandchildUuid = token(); + + const grandparentUrl = + resolveUrl("resources/executor.html", grandparentServer); + grandparentUrl.searchParams.set("executor-uuid", grandparentUuid); + + const childUrl = preflightUrl(child); + childUrl.searchParams.set("file", "executor.html"); + childUrl.searchParams.set("executor-uuid", childUuid); + + const grandchildUrl = preflightUrl(grandchild); + grandchildUrl.searchParams.set("file", "iframed.html"); + grandchildUrl.searchParams.set("iframe-uuid", grandchildUuid); + + const iframe = await appendIframe(t, document, grandparentUrl); + + const addChild = (url) => new Promise((resolve) => { + const child = document.createElement("iframe"); + child.src = url; + child.addEventListener("load", () => resolve(), { once: true }); + document.body.appendChild(child); + }); + + const grandparentCtx = new RemoteContext(grandparentUuid); + await grandparentCtx.execute_script(addChild, [childUrl]); + + // Add a blank grandchild frame inside the child. + // Apply a timeout to this step so that failures at this step do not block the + // execution of other tests. + const childCtx = new RemoteContext(childUuid); + await Promise.race([ + childCtx.execute_script(addChild, ["about:blank"]), + new Promise((resolve, reject) => t.step_timeout( + () => reject("timeout adding grandchild"), + 2000 /* ms */ + )), + ]); + + const messagePromise = futureMessage({ + filter: (data) => data.uuid === grandchildUuid, + }); + await grandparentCtx.execute_script((url) => { + const child = window.frames[0]; + const grandchild = child.frames[0]; + grandchild.location = url; + }, [grandchildUrl]); + + // The great-grandchild frame posts a message iff it loads successfully. + // There exists no interoperable way to check whether an iframe failed to + // load, so we use a timeout. + // See: https://github.com/whatwg/html/issues/125 + const result = await Promise.race([ + messagePromise.then((data) => data.message), + new Promise((resolve) => { + t.step_timeout(() => resolve("timeout"), 2000 /* ms */); + }), + ]); + + assert_equals(result, expected); +}, name); + +const WebsocketTestResult = { + SUCCESS: "open", + + // The code is a best guess. It is not yet entirely specified, so it may need + // to be changed in the future based on implementation experience. + FAILURE: "close: code 1006", +}; + +// Runs a websocket test. Attempts to open a websocket from `source` (in an +// iframe) to `target`, then checks that the result is as `expected`. +// +// Argument shape: +// +// { +// // Required. Passed to `sourceResolveOptions()`. +// source, +// +// // Required. +// target: { +// // Required. Target server. +// server, +// } +// +// // Required. Should be one of the values in `WebsocketTestResult`. +// expected, +// } +// +async function websocketTest(t, { source, target, expected }) { + const sourceUrl = + resolveUrl("resources/socket-opener.html", sourceResolveOptions(source)); + + const targetUrl = resolveUrl("/echo", target.server); + + const iframe = await appendIframe(t, document, sourceUrl); + + const reply = futureMessage(); + iframe.contentWindow.postMessage(targetUrl.href, "*"); + + assert_equals(await reply, expected); +} + +const WorkerScriptTestResult = { + SUCCESS: { loaded: true }, + FAILURE: { error: "unknown error" }, +}; + +function workerScriptUrl(target) { + const url = preflightUrl(target); + + url.searchParams.append("body", "postMessage({ loaded: true })") + url.searchParams.append("mime-type", "application/javascript") + + return url; +} + +async function workerScriptTest(t, { source, target, expected }) { + const sourceUrl = + resolveUrl("resources/worker-fetcher.html", sourceResolveOptions(source)); + + const targetUrl = workerScriptUrl(target); + + const iframe = await appendIframe(t, document, sourceUrl); + const reply = futureMessage(); + + iframe.contentWindow.postMessage({ url: targetUrl.href }, "*"); + + const { error, loaded } = await reply; + + assert_equals(error, expected.error, "worker error"); + assert_equals(loaded, expected.loaded, "response loaded"); +} + +async function nestedWorkerScriptTest(t, { source, target, expected }) { + const targetUrl = workerScriptUrl(target); + + const sourceUrl = resolveUrl( + "resources/worker-fetcher.js", sourceResolveOptions(source)); + sourceUrl.searchParams.append("url", targetUrl); + + // Iframe must be same-origin with the parent worker. + const iframeUrl = new URL("worker-fetcher.html", sourceUrl); + + const iframe = await appendIframe(t, document, iframeUrl); + const reply = futureMessage(); + + iframe.contentWindow.postMessage({ url: sourceUrl.href }, "*"); + + const { error, loaded } = await reply; + + assert_equals(error, expected.error, "worker error"); + assert_equals(loaded, expected.loaded, "response loaded"); +} + +async function sharedWorkerScriptTest(t, { source, target, expected }) { + const sourceUrl = resolveUrl("resources/shared-worker-fetcher.html", + sourceResolveOptions(source)); + const targetUrl = preflightUrl(target); + targetUrl.searchParams.append( + "body", "onconnect = (e) => e.ports[0].postMessage({ loaded: true })") + targetUrl.searchParams.append("mime-type", "application/javascript") + + const iframe = await appendIframe(t, document, sourceUrl); + const reply = futureMessage(); + + iframe.contentWindow.postMessage({ url: targetUrl.href }, "*"); + + const { error, loaded } = await reply; + + assert_equals(error, expected.error, "worker error"); + assert_equals(loaded, expected.loaded, "response loaded"); +} + +// Results that may be expected in tests. +const WorkerFetchTestResult = { + SUCCESS: { status: 200, body: "success" }, + FAILURE: { error: "TypeError" }, +}; + +async function workerFetchTest(t, { source, target, expected }) { + const targetUrl = preflightUrl(target); + + const sourceUrl = + resolveUrl("resources/fetcher.js", sourceResolveOptions(source)); + sourceUrl.searchParams.append("url", targetUrl.href); + + const fetcherUrl = new URL("worker-fetcher.html", sourceUrl); + + const reply = futureMessage(); + const iframe = await appendIframe(t, document, fetcherUrl); + + iframe.contentWindow.postMessage({ url: sourceUrl.href }, "*"); + + const { error, status, body } = await reply; + assert_equals(error, expected.error, "fetch error"); + assert_equals(status, expected.status, "response status"); + assert_equals(body, expected.body, "response body"); +} + +async function workerBlobFetchTest(t, { source, target, expected }) { + const targetUrl = preflightUrl(target); + + const fetcherUrl = resolveUrl( + 'resources/worker-blob-fetcher.html', sourceResolveOptions(source)); + + const reply = futureMessage(); + const iframe = await appendIframe(t, document, fetcherUrl); + + iframe.contentWindow.postMessage({ url: targetUrl.href }, "*"); + + const { error, status, body } = await reply; + assert_equals(error, expected.error, "fetch error"); + assert_equals(status, expected.status, "response status"); + assert_equals(body, expected.body, "response body"); +} + +async function sharedWorkerFetchTest(t, { source, target, expected }) { + const targetUrl = preflightUrl(target); + + const sourceUrl = + resolveUrl("resources/shared-fetcher.js", sourceResolveOptions(source)); + sourceUrl.searchParams.append("url", targetUrl.href); + + const fetcherUrl = new URL("shared-worker-fetcher.html", sourceUrl); + + const reply = futureMessage(); + const iframe = await appendIframe(t, document, fetcherUrl); + + iframe.contentWindow.postMessage({ url: sourceUrl.href }, "*"); + + const { error, status, body } = await reply; + assert_equals(error, expected.error, "fetch error"); + assert_equals(status, expected.status, "response status"); + assert_equals(body, expected.body, "response body"); +} + +async function sharedWorkerBlobFetchTest(t, { source, target, expected }) { + const targetUrl = preflightUrl(target); + + const fetcherUrl = resolveUrl( + 'resources/shared-worker-blob-fetcher.html', + sourceResolveOptions(source)); + + const reply = futureMessage(); + const iframe = await appendIframe(t, document, fetcherUrl); + + iframe.contentWindow.postMessage({ url: targetUrl.href }, "*"); + + const { error, status, body } = await reply; + assert_equals(error, expected.error, "fetch error"); + assert_equals(status, expected.status, "response status"); + assert_equals(body, expected.body, "response body"); +} diff --git a/test/wpt/tests/fetch/private-network-access/resources/worker-blob-fetcher.html b/test/wpt/tests/fetch/private-network-access/resources/worker-blob-fetcher.html new file mode 100644 index 0000000..5a50271 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/worker-blob-fetcher.html @@ -0,0 +1,45 @@ + + +Worker Blob Fetcher + diff --git a/test/wpt/tests/fetch/private-network-access/resources/worker-fetcher.html b/test/wpt/tests/fetch/private-network-access/resources/worker-fetcher.html new file mode 100644 index 0000000..bd155a5 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/worker-fetcher.html @@ -0,0 +1,18 @@ + + +Worker Fetcher + diff --git a/test/wpt/tests/fetch/private-network-access/resources/worker-fetcher.js b/test/wpt/tests/fetch/private-network-access/resources/worker-fetcher.js new file mode 100644 index 0000000..aab49af --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/worker-fetcher.js @@ -0,0 +1,11 @@ +const url = new URL(self.location).searchParams.get("url"); +const worker = new Worker(url); + +// Relay messages from the worker to the parent frame. +worker.addEventListener("message", (evt) => { + self.postMessage(evt.data); +}); + +worker.addEventListener("error", (evt) => { + self.postMessage({ error: evt.message || "unknown error" }); +}); diff --git a/test/wpt/tests/fetch/private-network-access/resources/xhr-sender.html b/test/wpt/tests/fetch/private-network-access/resources/xhr-sender.html new file mode 100644 index 0000000..b131fa4 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/resources/xhr-sender.html @@ -0,0 +1,33 @@ + + +XHR Sender + diff --git a/test/wpt/tests/fetch/private-network-access/service-worker-background-fetch.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/service-worker-background-fetch.tentative.https.window.js new file mode 100644 index 0000000..6369b16 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/service-worker-background-fetch.tentative.https.window.js @@ -0,0 +1,142 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// Spec: https://wicg.github.io/background-fetch/ +// +// These tests check that background fetches from within `ServiceWorker` scripts +// are not subject to Private Network Access checks. + +// Results that may be expected in tests. +const TestResult = { + SUCCESS: { ok: true, body: "success", result: "success", failureReason: "" }, +}; + +async function makeTest(t, { source, target, expected }) { + const scriptUrl = + resolveUrl("resources/service-worker.js", sourceResolveOptions(source)); + + const bridgeUrl = new URL("service-worker-bridge.html", scriptUrl); + + const targetUrl = preflightUrl(target); + + const iframe = await appendIframe(t, document, bridgeUrl); + + const request = (message) => { + const reply = futureMessage(); + iframe.contentWindow.postMessage(message, "*"); + return reply; + }; + + { + const { error, loaded } = await request({ + action: "register", + url: scriptUrl.href, + }); + + assert_equals(error, undefined, "register error"); + assert_true(loaded, "response loaded"); + } + + { + const { error, state } = await request({ + action: "set-permission", + name: "background-fetch", + state: "granted", + }); + + assert_equals(error, undefined, "set permission error"); + assert_equals(state, "granted", "permission state"); + } + + { + const { error, result, failureReason, ok, body } = await request({ + action: "background-fetch", + url: targetUrl.href, + }); + + assert_equals(error, expected.error, "error"); + assert_equals(failureReason, expected.failureReason, "fetch failure reason"); + assert_equals(result, expected.result, "fetch result"); + assert_equals(ok, expected.ok, "response ok"); + assert_equals(body, expected.body, "response body"); + } +} + +promise_test(t => makeTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { server: Server.HTTPS_LOCAL }, + expected: TestResult.SUCCESS, +}), "local to local: success."); + +promise_test(t => makeTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: TestResult.SUCCESS, +}), "private to local: success."); + +promise_test(t => makeTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { server: Server.HTTPS_PRIVATE }, + expected: TestResult.SUCCESS, +}), "private to private: success."); + +promise_test(t => makeTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: TestResult.SUCCESS, +}), "public to local: success."); + +promise_test(t => makeTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: TestResult.SUCCESS, +}), "public to private: success."); + +promise_test(t => makeTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: TestResult.SUCCESS, +}), "public to public: success."); + +promise_test(t => makeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_LOCAL }, + expected: TestResult.SUCCESS, +}), "treat-as-public to local: success."); + +promise_test(t => makeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: TestResult.SUCCESS, +}), "treat-as-public to private: success."); + +promise_test(t => makeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: TestResult.SUCCESS, +}), "treat-as-public to public: success."); diff --git a/test/wpt/tests/fetch/private-network-access/service-worker-fetch.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/service-worker-fetch.tentative.https.window.js new file mode 100644 index 0000000..cb6d1f7 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/service-worker-fetch.tentative.https.window.js @@ -0,0 +1,235 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// META: script=/common/subset-tests.js +// META: variant=?1-8 +// META: variant=?9-last +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that fetches from within `ServiceWorker` scripts are +// subject to Private Network Access checks, just like fetches from within +// documents. + +// Results that may be expected in tests. +const TestResult = { + SUCCESS: { ok: true, body: "success" }, + FAILURE: { error: "TypeError" }, +}; + +async function makeTest(t, { source, target, expected }) { + const bridgeUrl = resolveUrl( + "resources/service-worker-bridge.html", + sourceResolveOptions({ server: source.server })); + + const scriptUrl = + resolveUrl("resources/service-worker.js", sourceResolveOptions(source)); + + const realTargetUrl = preflightUrl(target); + + // Fetch a URL within the service worker's scope, but tell it which URL to + // really fetch. + const targetUrl = new URL("service-worker-proxy", scriptUrl); + targetUrl.searchParams.append("proxied-url", realTargetUrl.href); + + const iframe = await appendIframe(t, document, bridgeUrl); + + const request = (message) => { + const reply = futureMessage(); + iframe.contentWindow.postMessage(message, "*"); + return reply; + }; + + { + const { error, loaded } = await request({ + action: "register", + url: scriptUrl.href, + }); + + assert_equals(error, undefined, "register error"); + assert_true(loaded, "response loaded"); + } + + try { + const { controlled, numControllerChanges } = await request({ + action: "wait", + numControllerChanges: 1, + }); + + assert_equals(numControllerChanges, 1, "controller change"); + assert_true(controlled, "bridge script is controlled"); + + const { error, ok, body } = await request({ + action: "fetch", + url: targetUrl.href, + }); + + assert_equals(error, expected.error, "fetch error"); + assert_equals(ok, expected.ok, "response ok"); + assert_equals(body, expected.body, "response body"); + } finally { + // Always unregister the service worker. + const { error, unregistered } = await request({ + action: "unregister", + scope: new URL("./", scriptUrl).href, + }); + + assert_equals(error, undefined, "unregister error"); + assert_true(unregistered, "unregistered"); + } +} + +subsetTest(promise_test, t => makeTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { server: Server.HTTPS_LOCAL }, + expected: TestResult.SUCCESS, +}), "local to local: success."); + +subsetTest(promise_test, t => makeTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: TestResult.FAILURE, +}), "private to local: failed preflight."); + +subsetTest(promise_test, t => makeTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: TestResult.SUCCESS, +}), "private to local: success."); + +subsetTest(promise_test, t => makeTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { server: Server.HTTPS_PRIVATE }, + expected: TestResult.SUCCESS, +}), "private to private: success."); + +subsetTest(promise_test, t => makeTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: TestResult.FAILURE, +}), "public to local: failed preflight."); + +subsetTest(promise_test, t => makeTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: TestResult.SUCCESS, +}), "public to local: success."); + +subsetTest(promise_test, t => makeTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: TestResult.FAILURE, +}), "public to private: failed preflight."); + +subsetTest(promise_test, t => makeTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: TestResult.SUCCESS, +}), "public to private: success."); + +subsetTest(promise_test, t => makeTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: TestResult.SUCCESS, +}), "public to public: success."); + +subsetTest(promise_test, t => makeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: TestResult.FAILURE, +}), "treat-as-public to local: failed preflight."); + +subsetTest(promise_test, t => makeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: TestResult.SUCCESS, +}), "treat-as-public to local: success."); + +subsetTest(promise_test, t => makeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_LOCAL }, + expected: TestResult.SUCCESS, +}), "treat-as-public to local (same-origin): no preflight required."); + +subsetTest(promise_test, t => makeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: TestResult.FAILURE, +}), "treat-as-public to private: failed preflight."); + +subsetTest(promise_test, t => makeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: TestResult.SUCCESS, +}), "treat-as-public to private: success."); + +subsetTest(promise_test, t => makeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: TestResult.SUCCESS, +}), "treat-as-public to public: success."); diff --git a/test/wpt/tests/fetch/private-network-access/service-worker-update.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/service-worker-update.tentative.https.window.js new file mode 100644 index 0000000..4882d23 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/service-worker-update.tentative.https.window.js @@ -0,0 +1,106 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that `ServiceWorker` script update fetches are exempt from +// Private Network Access checks because they are always same-origin and the +// origin is potentially trustworthy. The client of the fetch, for PNA purposes, +// is taken to be the previous script. +// +// The tests is carried out by instantiating a service worker from a resource +// that carries the `Content-Security-Policy: treat-as-public-address` header, +// such that the registration is placed in the public IP address space. When +// the script is fetched for an update, the client is thus considered public, +// yet the same-origin fetch observes that the server's IP endpoint is not +// necessarily in the public IP address space. +// +// See also: worker.https.window.js + +// Results that may be expected in tests. +const TestResult = { + SUCCESS: { updated: true }, + FAILURE: { error: "TypeError" }, +}; + +async function makeTest(t, { target, expected }) { + // The bridge must be same-origin with the service worker script. + const bridgeUrl = resolveUrl( + "resources/service-worker-bridge.html", + sourceResolveOptions({ server: target.server })); + + const scriptUrl = preflightUrl(target); + scriptUrl.searchParams.append("treat-as-public-once", token()); + scriptUrl.searchParams.append("mime-type", "application/javascript"); + scriptUrl.searchParams.append("file", "service-worker.js"); + scriptUrl.searchParams.append("random-js-prefix", true); + + const iframe = await appendIframe(t, document, bridgeUrl); + + const request = (message) => { + const reply = futureMessage(); + iframe.contentWindow.postMessage(message, "*"); + return reply; + }; + + { + const { error, loaded } = await request({ + action: "register", + url: scriptUrl.href, + }); + + assert_equals(error, undefined, "register error"); + assert_true(loaded, "response loaded"); + } + + try { + let { controlled, numControllerChanges } = await request({ + action: "wait", + numControllerChanges: 1, + }); + + assert_equals(numControllerChanges, 1, "controller change"); + assert_true(controlled, "bridge script is controlled"); + + const { error, updated } = await request({ action: "update" }); + + assert_equals(error, expected.error, "update error"); + assert_equals(updated, expected.updated, "registration updated"); + + // Stop here if we do not expect the update to succeed. + if (!expected.updated) { + return; + } + + ({ controlled, numControllerChanges } = await request({ + action: "wait", + numControllerChanges: 2, + })); + + assert_equals(numControllerChanges, 2, "controller change"); + assert_true(controlled, "bridge script still controlled"); + } finally { + const { error, unregistered } = await request({ + action: "unregister", + scope: new URL("./", scriptUrl).href, + }); + + assert_equals(error, undefined, "unregister error"); + assert_true(unregistered, "unregistered"); + } +} + +promise_test(t => makeTest(t, { + target: { server: Server.HTTPS_LOCAL }, + expected: TestResult.SUCCESS, +}), "update public to local: success."); + +promise_test(t => makeTest(t, { + target: { server: Server.HTTPS_PRIVATE }, + expected: TestResult.SUCCESS, +}), "update public to private: success."); + +promise_test(t => makeTest(t, { + target: { server: Server.HTTPS_PUBLIC }, + expected: TestResult.SUCCESS, +}), "update public to public: success."); diff --git a/test/wpt/tests/fetch/private-network-access/service-worker.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/service-worker.tentative.https.window.js new file mode 100644 index 0000000..046f662 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/service-worker.tentative.https.window.js @@ -0,0 +1,84 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that initial `ServiceWorker` script fetches are exempt from +// Private Network Access checks because they are always same-origin and the +// origin is potentially trustworthy. +// +// See also: worker.https.window.js + +// Results that may be expected in tests. +const TestResult = { + SUCCESS: { + register: { loaded: true }, + unregister: { unregistered: true }, + }, + FAILURE: { + register: { error: "TypeError" }, + unregister: { unregistered: false, error: "no registration" }, + }, +}; + +async function makeTest(t, { source, target, expected }) { + const sourceUrl = resolveUrl("resources/service-worker-bridge.html", + sourceResolveOptions(source)); + + const targetUrl = preflightUrl(target); + targetUrl.searchParams.append("body", "undefined"); + targetUrl.searchParams.append("mime-type", "application/javascript"); + + const scope = resolveUrl(`resources/${token()}`, {...target.server}).href; + + const iframe = await appendIframe(t, document, sourceUrl); + + { + const reply = futureMessage(); + const message = { + action: "register", + url: targetUrl.href, + options: { scope }, + }; + iframe.contentWindow.postMessage(message, "*"); + + const { error, loaded } = await reply; + + assert_equals(error, expected.register.error, "register error"); + assert_equals(loaded, expected.register.loaded, "response loaded"); + } + + { + const reply = futureMessage(); + iframe.contentWindow.postMessage({ action: "unregister", scope }, "*"); + + const { error, unregistered } = await reply; + assert_equals(error, expected.unregister.error, "unregister error"); + assert_equals( + unregistered, expected.unregister.unregistered, "worker unregistered"); + } +} + +promise_test(t => makeTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_LOCAL }, + expected: TestResult.SUCCESS, +}), "treat-as-public to local: success."); + +promise_test(t => makeTest(t, { + source: { + server: Server.HTTPS_PRIVATE, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_PRIVATE }, + expected: TestResult.SUCCESS, +}), "treat-as-public to private: success."); + +promise_test(t => makeTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: TestResult.SUCCESS, +}), "public to public: success."); diff --git a/test/wpt/tests/fetch/private-network-access/shared-worker-blob-fetch.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/shared-worker-blob-fetch.tentative.https.window.js new file mode 100644 index 0000000..269abb7 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/shared-worker-blob-fetch.tentative.https.window.js @@ -0,0 +1,168 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that fetches from within `SharedWorker` scripts that are +// loaded from blob URLs are subject to Private Network Access checks, just like +// fetches from within documents. +// +// This file covers only those tests that must execute in a secure context. +// Other tests are defined in: shared-worker-blob-fetch.window.js + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { server: Server.HTTPS_LOCAL }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local to local: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private to local: failed preflight."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "private to local: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { server: Server.HTTPS_PRIVATE }, + expected: WorkerFetchTestResult.SUCCESS, +}), "private to private: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to local: failed preflight."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to local: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to private: failed preflight."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to private: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to public: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to local: failed preflight."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to local: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_LOCAL }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to local (same-origin): no preflight required."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to private: failed preflight."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to private: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to public: success."); + diff --git a/test/wpt/tests/fetch/private-network-access/shared-worker-blob-fetch.tentative.window.js b/test/wpt/tests/fetch/private-network-access/shared-worker-blob-fetch.tentative.window.js new file mode 100644 index 0000000..d430ea7 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/shared-worker-blob-fetch.tentative.window.js @@ -0,0 +1,173 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that fetches from within `SharedWorker` scripts that are +// loaded from blob URLs are subject to Private Network Access checks, just like +// fetches from within documents. +// +// This file covers only those tests that must execute in a non-secure context. +// Other tests are defined in: shared-worker-blob-fetch.https.window.js + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { server: Server.HTTP_LOCAL }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local to local: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private to local: failure."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { server: Server.HTTP_PRIVATE }, + expected: WorkerFetchTestResult.SUCCESS, +}), "private to private: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to local: failure."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to private: failure."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_PUBLIC }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to public: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_LOCAL, + behavior: { preflight: PreflightBehavior.optionalSuccess(token()) }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to local: failure."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to private: failure."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to public: success."); + +// The following tests verify that workers served over HTTPS are not allowed to +// make private network requests because they are not secure contexts. + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTP_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local https to local: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private https to local: failure."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public https to local: failure."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local https to local https: success."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private https to local https: failure."); + +promise_test(t => sharedWorkerBlobFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public https to local https: failure."); diff --git a/test/wpt/tests/fetch/private-network-access/shared-worker-fetch.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/shared-worker-fetch.tentative.https.window.js new file mode 100644 index 0000000..e5f2b94 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/shared-worker-fetch.tentative.https.window.js @@ -0,0 +1,167 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that fetches from within `SharedWorker` scripts are subject +// to Private Network Access checks, just like fetches from within documents. +// +// This file covers only those tests that must execute in a secure context. +// Other tests are defined in: shared-worker-fetch.window.js + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { server: Server.HTTPS_LOCAL }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local to local: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private to local: failure."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "private to local: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { server: Server.HTTPS_PRIVATE }, + expected: WorkerFetchTestResult.SUCCESS, +}), "private to private: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to local: failed preflight."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to local: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to private: failed preflight."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to private: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to public: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to local: failed preflight."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to local: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_LOCAL }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to local (same-origin): no preflight required."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to private: failed preflight."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to private: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to public: success."); + diff --git a/test/wpt/tests/fetch/private-network-access/shared-worker-fetch.tentative.window.js b/test/wpt/tests/fetch/private-network-access/shared-worker-fetch.tentative.window.js new file mode 100644 index 0000000..9bc1a89 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/shared-worker-fetch.tentative.window.js @@ -0,0 +1,154 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that fetches from within `SharedWorker` scripts are subject +// to Private Network Access checks, just like fetches from within documents. +// +// This file covers only those tests that must execute in a non-secure context. +// Other tests are defined in: shared-worker-fetch.https.window.js + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { server: Server.HTTP_LOCAL }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local to local: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private to local: failure."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { server: Server.HTTP_PRIVATE }, + expected: WorkerFetchTestResult.SUCCESS, +}), "private to private: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to local: failure."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to private: failure."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_PUBLIC }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to public: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_LOCAL, + behavior: { preflight: PreflightBehavior.optionalSuccess(token()) }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to local: failure."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to private: failure."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to public: success."); + +// The following tests verify that workers served over HTTPS are not allowed to +// make private network requests because they are not secure contexts. + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local https to local: success."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private https to local: failure."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public https to local: failure."); + +promise_test(t => sharedWorkerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public https to private: failure."); diff --git a/test/wpt/tests/fetch/private-network-access/shared-worker.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/shared-worker.tentative.https.window.js new file mode 100644 index 0000000..24ae108 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/shared-worker.tentative.https.window.js @@ -0,0 +1,34 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests mirror `Worker` tests, except using `SharedWorker`. +// See also: worker.https.window.js +// +// This file covers only those tests that must execute in a secure context. +// Other tests are defined in: shared-worker.window.js + +promise_test(t => sharedWorkerScriptTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_LOCAL }, + expected: WorkerScriptTestResult.SUCCESS, +}), "treat-as-public to local: success."); + +promise_test(t => sharedWorkerScriptTest(t, { + source: { + server: Server.HTTPS_PRIVATE, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_PRIVATE }, + expected: WorkerScriptTestResult.SUCCESS, +}), "treat-as-public to private: success."); + +promise_test(t => sharedWorkerScriptTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: WorkerScriptTestResult.SUCCESS, +}), "public to public: success."); diff --git a/test/wpt/tests/fetch/private-network-access/shared-worker.tentative.window.js b/test/wpt/tests/fetch/private-network-access/shared-worker.tentative.window.js new file mode 100644 index 0000000..ffa8a36 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/shared-worker.tentative.window.js @@ -0,0 +1,34 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests mirror `Worker` tests, except using `SharedWorker`. +// See also: shared-worker.window.js +// +// This file covers only those tests that must execute in a non secure context. +// Other tests are defined in: shared-worker.https.window.js + +promise_test(t => sharedWorkerScriptTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTP_LOCAL }, + expected: WorkerScriptTestResult.FAILURE, +}), "treat-as-public to local: failure."); + +promise_test(t => sharedWorkerScriptTest(t, { + source: { + server: Server.HTTP_PRIVATE, + treatAsPublic: true, + }, + target: { server: Server.HTTP_PRIVATE }, + expected: WorkerScriptTestResult.FAILURE, +}), "treat-as-public to private: failure."); + +promise_test(t => sharedWorkerScriptTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_PUBLIC }, + expected: WorkerScriptTestResult.SUCCESS, +}), "public to public: success."); diff --git a/test/wpt/tests/fetch/private-network-access/websocket.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/websocket.tentative.https.window.js new file mode 100644 index 0000000..0731896 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/websocket.tentative.https.window.js @@ -0,0 +1,40 @@ +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests verify that websocket connections behave similarly to fetches. +// +// This file covers only those tests that must execute in a secure context. +// Other tests are defined in: websocket.https.window.js + +setup(() => { + // Making sure we are in a secure context, as expected. + assert_true(window.isSecureContext); +}); + +promise_test(t => websocketTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { server: Server.WSS_LOCAL }, + expected: WebsocketTestResult.SUCCESS, +}), "local to local: websocket success."); + +promise_test(t => websocketTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { server: Server.WSS_LOCAL }, + expected: WebsocketTestResult.SUCCESS, +}), "private to local: websocket success."); + +promise_test(t => websocketTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.WSS_LOCAL }, + expected: WebsocketTestResult.SUCCESS, +}), "public to local: websocket success."); + +promise_test(t => websocketTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.WSS_LOCAL }, + expected: WebsocketTestResult.SUCCESS, +}), "treat-as-public to local: websocket success."); diff --git a/test/wpt/tests/fetch/private-network-access/websocket.tentative.window.js b/test/wpt/tests/fetch/private-network-access/websocket.tentative.window.js new file mode 100644 index 0000000..a44cfae --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/websocket.tentative.window.js @@ -0,0 +1,40 @@ +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch + +// These tests verify that websocket connections behave similarly to fetches. +// +// This file covers only those tests that must execute in a non secure context. +// Other tests are defined in: websocket.https.window.js + +setup(() => { + // Making sure we are in a non secure context, as expected. + assert_false(window.isSecureContext); +}); + +promise_test(t => websocketTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { server: Server.WS_LOCAL }, + expected: WebsocketTestResult.SUCCESS, +}), "local to local: websocket success."); + +promise_test(t => websocketTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { server: Server.WS_LOCAL }, + expected: WebsocketTestResult.FAILURE, +}), "private to local: websocket failure."); + +promise_test(t => websocketTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.WS_LOCAL }, + expected: WebsocketTestResult.FAILURE, +}), "public to local: websocket failure."); + +promise_test(t => websocketTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.WS_LOCAL }, + expected: WebsocketTestResult.FAILURE, +}), "treat-as-public to local: websocket failure."); diff --git a/test/wpt/tests/fetch/private-network-access/worker-blob-fetch.tentative.window.js b/test/wpt/tests/fetch/private-network-access/worker-blob-fetch.tentative.window.js new file mode 100644 index 0000000..e119746 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/worker-blob-fetch.tentative.window.js @@ -0,0 +1,155 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that fetches from within `Worker` scripts loaded from blob +// URLs are subject to Private Network Access checks, just like fetches from +// within documents. +// +// This file covers only those tests that must execute in a non-secure context. +// Other tests are defined in: worker-blob-fetch.https.window.js + +promise_test(t => workerBlobFetchTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { server: Server.HTTP_LOCAL }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local to local: success."); + +promise_test(t => workerBlobFetchTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private to local: failure."); + +promise_test(t => workerBlobFetchTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { server: Server.HTTP_PRIVATE }, + expected: WorkerFetchTestResult.SUCCESS, +}), "private to private: success."); + +promise_test(t => workerBlobFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to local: failure."); + +promise_test(t => workerBlobFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to private: failure."); + +promise_test(t => workerBlobFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_PUBLIC }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to public: success."); + +promise_test(t => workerBlobFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_LOCAL, + behavior: { preflight: PreflightBehavior.optionalSuccess(token()) }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to local: failure."); + +promise_test(t => workerBlobFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to private: failure."); + +promise_test(t => workerBlobFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to public: success."); + +// The following tests verify that workers served over HTTPS are not allowed to +// make private network requests because they are not secure contexts. + +promise_test(t => workerBlobFetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local https to local https: success."); + +promise_test(t => workerBlobFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private https to local https: failure."); + +promise_test(t => workerBlobFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public https to private https: failure."); + +promise_test(t => workerBlobFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public https to local https: failure."); diff --git a/test/wpt/tests/fetch/private-network-access/worker-fetch.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/worker-fetch.tentative.https.window.js new file mode 100644 index 0000000..89e0c3c --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/worker-fetch.tentative.https.window.js @@ -0,0 +1,151 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that fetches from within `Worker` scripts are subject to +// Private Network Access checks, just like fetches from within documents. +// +// This file covers only those tests that must execute in a secure context. +// Other tests are defined in: worker-fetch.window.js + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { server: Server.HTTPS_LOCAL }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local to local: success."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private to local: failed preflight."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "private to local: success."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { server: Server.HTTPS_PRIVATE }, + expected: WorkerFetchTestResult.SUCCESS, +}), "private to private: success."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to local: failed preflight."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to local: success."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to private: failed preflight."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to private: success."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to public: success."); + +promise_test(t => workerFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_LOCAL }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to local: failed preflight."); + +promise_test(t => workerFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { preflight: PreflightBehavior.optionalSuccess(token()) }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to local: success."); + +promise_test(t => workerFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to private: failed preflight."); + +promise_test(t => workerFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to private: success."); + +promise_test(t => workerFetchTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to public: success."); diff --git a/test/wpt/tests/fetch/private-network-access/worker-fetch.tentative.window.js b/test/wpt/tests/fetch/private-network-access/worker-fetch.tentative.window.js new file mode 100644 index 0000000..4d6b12f --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/worker-fetch.tentative.window.js @@ -0,0 +1,154 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that fetches from within `Worker` scripts are subject to +// Private Network Access checks, just like fetches from within documents. +// +// This file covers only those tests that must execute in a non-secure context. +// Other tests are defined in: worker-fetch.https.window.js + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { server: Server.HTTP_LOCAL }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local to local: success."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private to local: failure."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { server: Server.HTTP_PRIVATE }, + expected: WorkerFetchTestResult.SUCCESS, +}), "private to private: success."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to local: failure."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public to private: failure."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_PUBLIC }, + expected: WorkerFetchTestResult.SUCCESS, +}), "public to public: success."); + +promise_test(t => workerFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_LOCAL, + behavior: { preflight: PreflightBehavior.optionalSuccess(token()) }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to local: failure."); + +promise_test(t => workerFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "treat-as-public to private: failure."); + +promise_test(t => workerFetchTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "treat-as-public to public: success."); + +// The following tests verify that workers served over HTTPS are not allowed to +// make private network requests because they are not secure contexts. + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.SUCCESS, +}), "local https to local https: success."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "private https to local https: failure."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public https to private https: failure."); + +promise_test(t => workerFetchTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: WorkerFetchTestResult.FAILURE, +}), "public https to local https: failure."); diff --git a/test/wpt/tests/fetch/private-network-access/worker.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/worker.tentative.https.window.js new file mode 100644 index 0000000..a0f1931 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/worker.tentative.https.window.js @@ -0,0 +1,37 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that initial `Worker` script fetches in secure contexts are +// exempt from Private Network Access checks because workers can only be fetched +// same-origin and the origin is potentially trustworthy. The only way to test +// this is using the `treat-as-public` CSP directive to artificially place the +// parent document in the `public` IP address space. +// +// This file covers only those tests that must execute in a secure context. +// Other tests are defined in: worker.window.js + +promise_test(t => workerScriptTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_LOCAL }, + expected: WorkerScriptTestResult.SUCCESS, +}), "treat-as-public to local: success."); + +promise_test(t => workerScriptTest(t, { + source: { + server: Server.HTTPS_PRIVATE, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_PRIVATE }, + expected: WorkerScriptTestResult.SUCCESS, +}), "treat-as-public to private: success."); + +promise_test(t => workerScriptTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: WorkerScriptTestResult.SUCCESS, +}), "public to public: success."); diff --git a/test/wpt/tests/fetch/private-network-access/worker.tentative.window.js b/test/wpt/tests/fetch/private-network-access/worker.tentative.window.js new file mode 100644 index 0000000..118c099 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/worker.tentative.window.js @@ -0,0 +1,37 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests check that initial `Worker` script fetches are subject to Private +// Network Access checks, just like a regular `fetch()`. The main difference is +// that workers can only be fetched same-origin, so the only way to test this +// is using the `treat-as-public` CSP directive to artificially place the parent +// document in the `public` IP address space. +// +// This file covers only those tests that must execute in a non secure context. +// Other tests are defined in: worker.https.window.js + +promise_test(t => workerScriptTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTP_LOCAL }, + expected: WorkerScriptTestResult.FAILURE, +}), "treat-as-public to local: failure."); + +promise_test(t => workerScriptTest(t, { + source: { + server: Server.HTTP_PRIVATE, + treatAsPublic: true, + }, + target: { server: Server.HTTP_PRIVATE }, + expected: WorkerScriptTestResult.FAILURE, +}), "treat-as-public to private: failure."); + +promise_test(t => workerScriptTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_PUBLIC }, + expected: WorkerScriptTestResult.SUCCESS, +}), "public to public: success."); diff --git a/test/wpt/tests/fetch/private-network-access/xhr-from-treat-as-public.tentative.https.window.js b/test/wpt/tests/fetch/private-network-access/xhr-from-treat-as-public.tentative.https.window.js new file mode 100644 index 0000000..3aae305 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/xhr-from-treat-as-public.tentative.https.window.js @@ -0,0 +1,83 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests verify that documents fetched from the `local` address space yet +// carrying the `treat-as-public-address` CSP directive are treated as if they +// had been fetched from the `public` address space. + +promise_test(t => xhrTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.FAILURE, +}), "treat-as-public to local: failed preflight."); + +promise_test(t => xhrTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.OTHER_HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.SUCCESS, +}), "treat-as-public to local: success."); + +promise_test(t => xhrTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { server: Server.HTTPS_LOCAL }, + expected: XhrTestResult.SUCCESS, +}), "treat-as-public to local (same-origin): no preflight required."); + +promise_test(t => xhrTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.FAILURE, +}), "treat-as-public to private: failed preflight."); + +promise_test(t => xhrTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.SUCCESS, +}), "treat-as-public to private: success."); + +promise_test(t => xhrTest(t, { + source: { + server: Server.HTTPS_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.SUCCESS, +}), "treat-as-public to public: no preflight required."); diff --git a/test/wpt/tests/fetch/private-network-access/xhr.https.tentative.window.js b/test/wpt/tests/fetch/private-network-access/xhr.https.tentative.window.js new file mode 100644 index 0000000..4dc5da9 --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/xhr.https.tentative.window.js @@ -0,0 +1,142 @@ +// META: script=/common/subset-tests-by-key.js +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// META: variant=?include=from-local +// META: variant=?include=from-private +// META: variant=?include=from-public +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests mirror fetch.https.window.js, but use `XmlHttpRequest` instead of +// `fetch()` to perform subresource fetches. Preflights are tested less +// extensively due to coverage being already provided by `fetch()`. +// +// This file covers only those tests that must execute in a secure context. +// Other tests are defined in: xhr.window.js + +setup(() => { + // Making sure we are in a secure context, as expected. + assert_true(window.isSecureContext); +}); + +// Source: secure local context. +// +// All fetches unaffected by Private Network Access. + +subsetTestByKey("from-local", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { server: Server.HTTPS_LOCAL }, + expected: XhrTestResult.SUCCESS, +}), "local to local: no preflight required."); + +subsetTestByKey("from-local", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.SUCCESS, +}), "local to private: no preflight required."); + +subsetTestByKey("from-local", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.SUCCESS, +}), "local to public: no preflight required."); + +// Source: private secure context. +// +// Fetches to the local address space require a successful preflight response +// carrying a PNA-specific header. + +subsetTestByKey("from-private", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.FAILURE, +}), "private to local: failed preflight."); + +subsetTestByKey("from-private", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.SUCCESS, +}), "private to local: success."); + +subsetTestByKey("from-private", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { server: Server.HTTPS_PRIVATE }, + expected: XhrTestResult.SUCCESS, +}), "private to private: no preflight required."); + +subsetTestByKey("from-private", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.SUCCESS, +}), "private to public: no preflight required."); + +// Source: public secure context. +// +// Fetches to the local and private address spaces require a successful +// preflight response carrying a PNA-specific header. + +subsetTestByKey("from-public", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.FAILURE, +}), "public to local: failed preflight."); + +subsetTestByKey("from-public", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.SUCCESS, +}), "public to local: success."); + +subsetTestByKey("from-public", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.FAILURE, +}), "public to private: failed preflight."); + +subsetTestByKey("from-public", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.success(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.SUCCESS, +}), "public to private: success."); + +subsetTestByKey("from-public", promise_test, t => xhrTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { server: Server.HTTPS_PUBLIC }, + expected: XhrTestResult.SUCCESS, +}), "public to public: no preflight required."); diff --git a/test/wpt/tests/fetch/private-network-access/xhr.tentative.window.js b/test/wpt/tests/fetch/private-network-access/xhr.tentative.window.js new file mode 100644 index 0000000..fa307dc --- /dev/null +++ b/test/wpt/tests/fetch/private-network-access/xhr.tentative.window.js @@ -0,0 +1,195 @@ +// META: script=/common/utils.js +// META: script=resources/support.sub.js +// +// Spec: https://wicg.github.io/private-network-access/#integration-fetch +// +// These tests mirror fetch.window.js, but use `XmlHttpRequest` instead of +// `fetch()` to perform subresource fetches. +// +// This file covers only those tests that must execute in a non secure context. +// Other tests are defined in: xhr.https.window.js + +setup(() => { + // Making sure we are in a non secure context, as expected. + assert_false(window.isSecureContext); +}); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { server: Server.HTTP_LOCAL }, + expected: XhrTestResult.SUCCESS, +}), "local to local: no preflight required."); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.SUCCESS, +}), "local to private: no preflight required."); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTP_LOCAL }, + target: { + server: Server.HTTP_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.SUCCESS, +}), "local to public: no preflight required."); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.FAILURE, +}), "private to local: failure."); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { server: Server.HTTP_PRIVATE }, + expected: XhrTestResult.SUCCESS, +}), "private to private: no preflight required."); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTP_PRIVATE }, + target: { + server: Server.HTTP_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.SUCCESS, +}), "private to public: no preflight required."); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.FAILURE, +}), "public to local: failure."); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.FAILURE, +}), "public to private: failure."); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTP_PUBLIC }, + target: { server: Server.HTTP_PUBLIC }, + expected: XhrTestResult.SUCCESS, +}), "public to public: no preflight required."); + +// These tests verify that documents fetched from the `local` address space yet +// carrying the `treat-as-public-address` CSP directive are treated as if they +// had been fetched from the `public` address space. + +promise_test(t => xhrTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.FAILURE, +}), "treat-as-public-address to local: failure."); + +promise_test(t => xhrTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.FAILURE, +}), "treat-as-public-address to private: failure."); + +promise_test(t => xhrTest(t, { + source: { + server: Server.HTTP_LOCAL, + treatAsPublic: true, + }, + target: { + server: Server.HTTP_PUBLIC, + behavior: { response: ResponseBehavior.allowCrossOrigin() }, + }, + expected: XhrTestResult.SUCCESS, +}), "treat-as-public-address to public: no preflight required."); + +// These tests verify that HTTPS iframes embedded in an HTTP top-level document +// cannot fetch subresources from less-public address spaces. Indeed, even +// though the iframes have HTTPS origins, they are non-secure contexts because +// their parent is a non-secure context. + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTPS_LOCAL }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.SUCCESS, +}), "local https to local: success."); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTPS_PRIVATE }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.FAILURE, +}), "private https to local: failure."); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_LOCAL, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.FAILURE, +}), "public https to local: failure."); + +promise_test(t => xhrTest(t, { + source: { server: Server.HTTPS_PUBLIC }, + target: { + server: Server.HTTPS_PRIVATE, + behavior: { + preflight: PreflightBehavior.optionalSuccess(token()), + response: ResponseBehavior.allowCrossOrigin(), + }, + }, + expected: XhrTestResult.FAILURE, +}), "public https to private: failure."); diff --git a/test/wpt/tests/fetch/range/blob.any.js b/test/wpt/tests/fetch/range/blob.any.js new file mode 100644 index 0000000..7bcd4b9 --- /dev/null +++ b/test/wpt/tests/fetch/range/blob.any.js @@ -0,0 +1,233 @@ +// META: script=/common/utils.js + +const supportedBlobRange = [ + { + name: "A simple blob range request.", + data: ["A simple Hello, World! example"], + type: "text/plain", + range: "bytes=9-21", + content_length: 13, + content_range: "bytes 9-21/30", + result: "Hello, World!", + }, + { + name: "A blob range request with no type.", + data: ["A simple Hello, World! example"], + type: undefined, + range: "bytes=9-21", + content_length: 13, + content_range: "bytes 9-21/30", + result: "Hello, World!", + }, + { + name: "A blob range request with no end.", + data: ["Range with no end"], + type: "text/plain", + range: "bytes=11-", + content_length: 6, + content_range: "bytes 11-16/17", + result: "no end", + }, + { + name: "A blob range request with no start.", + data: ["Range with no start"], + type: "text/plain", + range: "bytes=-8", + content_length: 8, + content_range: "bytes 11-18/19", + result: "no start", + }, + { + name: "A simple blob range request with whitespace.", + data: ["A simple Hello, World! example"], + type: "text/plain", + range: "bytes= \t9-21", + content_length: 13, + content_range: "bytes 9-21/30", + result: "Hello, World!", + }, + { + name: "Blob content with short content and a large range end", + data: ["Not much here"], + type: "text/plain", + range: "bytes=4-100000000000", + content_length: 9, + content_range: "bytes 4-12/13", + result: "much here", + }, + { + name: "Blob content with short content and a range end matching content length", + data: ["Not much here"], + type: "text/plain", + range: "bytes=4-13", + content_length: 9, + content_range: "bytes 4-12/13", + result: "much here", + }, + { + name: "Blob range with whitespace before and after hyphen", + data: ["Valid whitespace #1"], + type: "text/plain", + range: "bytes=5 - 10", + content_length: 6, + content_range: "bytes 5-10/19", + result: " white", + }, + { + name: "Blob range with whitespace after hyphen", + data: ["Valid whitespace #2"], + type: "text/plain", + range: "bytes=-\t 5", + content_length: 5, + content_range: "bytes 14-18/19", + result: "ce #2", + }, + { + name: "Blob range with whitespace around equals sign", + data: ["Valid whitespace #3"], + type: "text/plain", + range: "bytes \t =\t 6-", + content_length: 13, + content_range: "bytes 6-18/19", + result: "whitespace #3", + }, +]; + +const unsupportedBlobRange = [ + { + name: "Blob range with no value", + data: ["Blob range should have a value"], + type: "text/plain", + range: "", + }, + { + name: "Blob range with incorrect range header", + data: ["A"], + type: "text/plain", + range: "byte=0-" + }, + { + name: "Blob range with incorrect range header #2", + data: ["A"], + type: "text/plain", + range: "bytes" + }, + { + name: "Blob range with incorrect range header #3", + data: ["A"], + type: "text/plain", + range: "bytes\t \t" + }, + { + name: "Blob range request with multiple range values", + data: ["Multiple ranges are not currently supported"], + type: "text/plain", + range: "bytes=0-5,15-", + }, + { + name: "Blob range request with multiple range values and whitespace", + data: ["Multiple ranges are not currently supported"], + type: "text/plain", + range: "bytes=0-5, 15-", + }, + { + name: "Blob range request with trailing comma", + data: ["Range with invalid trailing comma"], + type: "text/plain", + range: "bytes=0-5,", + }, + { + name: "Blob range with no start or end", + data: ["Range with no start or end"], + type: "text/plain", + range: "bytes=-", + }, + { + name: "Blob range request with short range end", + data: ["Range end should be greater than range start"], + type: "text/plain", + range: "bytes=10-5", + }, + { + name: "Blob range start should be an ASCII digit", + data: ["Range start must be an ASCII digit"], + type: "text/plain", + range: "bytes=x-5", + }, + { + name: "Blob range should have a dash", + data: ["Blob range should have a dash"], + type: "text/plain", + range: "bytes=5", + }, + { + name: "Blob range end should be an ASCII digit", + data: ["Range end must be an ASCII digit"], + type: "text/plain", + range: "bytes=5-x", + }, + { + name: "Blob range should include '-'", + data: ["Range end must include '-'"], + type: "text/plain", + range: "bytes=x", + }, + { + name: "Blob range should include '='", + data: ["Range end must include '='"], + type: "text/plain", + range: "bytes 5-", + }, + { + name: "Blob range should include 'bytes='", + data: ["Range end must include 'bytes='"], + type: "text/plain", + range: "5-", + }, + { + name: "Blob content with short content and a large range start", + data: ["Not much here"], + type: "text/plain", + range: "bytes=100000-", + }, + { + name: "Blob content with short content and a range start matching the content length", + data: ["Not much here"], + type: "text/plain", + range: "bytes=13-", + }, +]; + +supportedBlobRange.forEach(({ name, data, type, range, content_length, content_range, result }) => { + promise_test(async t => { + const blob = new Blob(data, { "type" : type }); + const blobURL = URL.createObjectURL(blob); + t.add_cleanup(() => URL.revokeObjectURL(blobURL)); + const resp = await fetch(blobURL, { + "headers": { + "Range": range + } + }); + assert_equals(resp.status, 206, "HTTP status is 206"); + assert_equals(resp.type, "basic", "response type is basic"); + assert_equals(resp.headers.get("Content-Type"), type || "", "Content-Type is " + resp.headers.get("Content-Type")); + assert_equals(resp.headers.get("Content-Length"), content_length.toString(), "Content-Length is " + resp.headers.get("Content-Length")); + assert_equals(resp.headers.get("Content-Range"), content_range, "Content-Range is " + resp.headers.get("Content-Range")); + const text = await resp.text(); + assert_equals(text, result, "Response's body is correct"); + }, name); +}); + +unsupportedBlobRange.forEach(({ name, data, type, range }) => { + promise_test(t => { + const blob = new Blob(data, { "type" : type }); + const blobURL = URL.createObjectURL(blob); + t.add_cleanup(() => URL.revokeObjectURL(blobURL)); + const promise = fetch(blobURL, { + "headers": { + "Range": range + } + }); + return promise_rejects_js(t, TypeError, promise); + }, name); +}); diff --git a/test/wpt/tests/fetch/range/data.any.js b/test/wpt/tests/fetch/range/data.any.js new file mode 100644 index 0000000..22ef11e --- /dev/null +++ b/test/wpt/tests/fetch/range/data.any.js @@ -0,0 +1,29 @@ +// META: script=/common/utils.js + +promise_test(async () => { + return fetch("data:text/plain;charset=US-ASCII,paddingHello%2C%20World%21padding", { + "method": "GET", + "Range": "bytes=13-26" + }).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type, "basic", "response type is basic"); + assert_equals(resp.headers.get("Content-Type"), "text/plain;charset=US-ASCII", "Content-Type is " + resp.headers.get("Content-Type")); + return resp.text(); + }).then(function(text) { + assert_equals(text, 'paddingHello, World!padding', "Response's body ignores range"); + }); +}, "data: URL and Range header"); + +promise_test(async () => { + return fetch("data:text/plain;charset=US-ASCII,paddingHello%2C%20paddingWorld%21padding", { + "method": "GET", + "Range": "bytes=7-14,21-27" + }).then(function(resp) { + assert_equals(resp.status, 200, "HTTP status is 200"); + assert_equals(resp.type, "basic", "response type is basic"); + assert_equals(resp.headers.get("Content-Type"), "text/plain;charset=US-ASCII", "Content-Type is " + resp.headers.get("Content-Type")); + return resp.text(); + }).then(function(text) { + assert_equals(text, 'paddingHello, paddingWorld!padding', "Response's body ignores range"); + }); +}, "data: URL and Range header with multiple ranges"); diff --git a/test/wpt/tests/fetch/range/general.any.js b/test/wpt/tests/fetch/range/general.any.js new file mode 100644 index 0000000..64b225a --- /dev/null +++ b/test/wpt/tests/fetch/range/general.any.js @@ -0,0 +1,140 @@ +// META: timeout=long +// META: global=window,worker +// META: script=/common/get-host-info.sub.js +// META: script=/common/utils.js + +// Helpers that return headers objects with a particular guard +function headersGuardNone(fill) { + if (fill) return new Headers(fill); + return new Headers(); +} + +function headersGuardResponse(fill) { + const opts = {}; + if (fill) opts.headers = fill; + return new Response('', opts).headers; +} + +function headersGuardRequest(fill) { + const opts = {}; + if (fill) opts.headers = fill; + return new Request('./', opts).headers; +} + +function headersGuardRequestNoCors(fill) { + const opts = { mode: 'no-cors' }; + if (fill) opts.headers = fill; + return new Request('./', opts).headers; +} + +const headerGuardTypes = [ + ['none', headersGuardNone], + ['response', headersGuardResponse], + ['request', headersGuardRequest] +]; + +for (const [guardType, createHeaders] of headerGuardTypes) { + test(() => { + // There are three ways to set headers. + // Filling, appending, and setting. Test each: + let headers = createHeaders({ Range: 'foo' }); + assert_equals(headers.get('Range'), 'foo'); + + headers = createHeaders(); + headers.append('Range', 'foo'); + assert_equals(headers.get('Range'), 'foo'); + + headers = createHeaders(); + headers.set('Range', 'foo'); + assert_equals(headers.get('Range'), 'foo'); + }, `Range header setting allowed for guard type: ${guardType}`); +} + +test(() => { + let headers = headersGuardRequestNoCors({ Range: 'foo' }); + assert_false(headers.has('Range')); + + headers = headersGuardRequestNoCors(); + headers.append('Range', 'foo'); + assert_false(headers.has('Range')); + + headers = headersGuardRequestNoCors(); + headers.set('Range', 'foo'); + assert_false(headers.has('Range')); +}, `Privileged header not allowed for guard type: request-no-cors`); + +promise_test(async () => { + const wavURL = new URL('resources/long-wav.py', location); + const stashTakeURL = new URL('resources/stash-take.py', location); + + function changeToken() { + const stashToken = token(); + wavURL.searchParams.set('accept-encoding-key', stashToken); + stashTakeURL.searchParams.set('key', stashToken); + } + + const rangeHeaders = [ + 'bytes=0-10', + 'foo=0-10', + 'foo', + '' + ]; + + for (const rangeHeader of rangeHeaders) { + changeToken(); + + await fetch(wavURL, { + headers: { Range: rangeHeader } + }); + + const response = await fetch(stashTakeURL); + + assert_regexp_match(await response.json(), + /.*\bidentity\b.*/, + `Expect identity accept-encoding if range header is ${JSON.stringify(rangeHeader)}`); + } +}, `Fetch with range header will be sent with Accept-Encoding: identity`); + +promise_test(async () => { + const wavURL = new URL(get_host_info().HTTP_REMOTE_ORIGIN + '/fetch/range/resources/long-wav.py'); + const stashTakeURL = new URL('resources/stash-take.py', location); + + function changeToken() { + const stashToken = token(); + wavURL.searchParams.set('accept-encoding-key', stashToken); + stashTakeURL.searchParams.set('key', stashToken); + } + + const rangeHeaders = [ + 'bytes=10-9', + 'bytes=-0', + 'bytes=0000000000000000000000000000000000000000000000000000000000011-0000000000000000000000000000000000000000000000000000000000111', + ]; + + for (const rangeHeader of rangeHeaders) { + changeToken(); + await fetch(wavURL, { headers: { Range : rangeHeader} }).then(() => { throw "loaded with range header " + rangeHeader }, () => { }); + } +}, `Cross Origin Fetch with non safe range header`); + +promise_test(async () => { + const wavURL = new URL(get_host_info().HTTP_REMOTE_ORIGIN + '/fetch/range/resources/long-wav.py'); + const stashTakeURL = new URL('resources/stash-take.py', location); + + function changeToken() { + const stashToken = token(); + wavURL.searchParams.set('accept-encoding-key', stashToken); + stashTakeURL.searchParams.set('key', stashToken); + } + + const rangeHeaders = [ + 'bytes=0-10', + 'bytes=0-', + 'bytes=00000000000000000000000000000000000000000000000000000000011-00000000000000000000000000000000000000000000000000000000000111', + ]; + + for (const rangeHeader of rangeHeaders) { + changeToken(); + await fetch(wavURL, { headers: { Range: rangeHeader } }).then(() => { }, () => { throw "failed load with range header " + rangeHeader }); + } +}, `Cross Origin Fetch with safe range header`); diff --git a/test/wpt/tests/fetch/range/general.window.js b/test/wpt/tests/fetch/range/general.window.js new file mode 100644 index 0000000..afe80d6 --- /dev/null +++ b/test/wpt/tests/fetch/range/general.window.js @@ -0,0 +1,29 @@ +// META: script=resources/utils.js +// META: script=/common/utils.js + +const onload = new Promise(r => window.addEventListener('load', r)); + +// It's weird that browsers do this, but it should continue to work. +promise_test(async t => { + await loadScript('resources/partial-script.py?pretend-offset=90000'); + assert_true(self.scriptExecuted); +}, `Script executed from partial response`); + +promise_test(async () => { + const wavURL = new URL('resources/long-wav.py', location); + const stashTakeURL = new URL('resources/stash-take.py', location); + const stashToken = token(); + wavURL.searchParams.set('accept-encoding-key', stashToken); + stashTakeURL.searchParams.set('key', stashToken); + + // The testing framework waits for window onload. If the audio element + // is appended before onload, it extends it, and the test times out. + await onload; + + const audio = appendAudio(document, wavURL); + await new Promise(r => audio.addEventListener('progress', r)); + audio.remove(); + + const response = await fetch(stashTakeURL); + assert_equals(await response.json(), 'identity', `Expect identity accept-encoding on media request`); +}, `Fetch with range header will be sent with Accept-Encoding: identity`); diff --git a/test/wpt/tests/fetch/range/non-matching-range-response.html b/test/wpt/tests/fetch/range/non-matching-range-response.html new file mode 100644 index 0000000..ba76c36 --- /dev/null +++ b/test/wpt/tests/fetch/range/non-matching-range-response.html @@ -0,0 +1,34 @@ + + + + + + + diff --git a/test/wpt/tests/fetch/range/resources/basic.html b/test/wpt/tests/fetch/range/resources/basic.html new file mode 100644 index 0000000..0e76edd --- /dev/null +++ b/test/wpt/tests/fetch/range/resources/basic.html @@ -0,0 +1 @@ + diff --git a/test/wpt/tests/fetch/range/resources/long-wav.py b/test/wpt/tests/fetch/range/resources/long-wav.py new file mode 100644 index 0000000..acfc81a --- /dev/null +++ b/test/wpt/tests/fetch/range/resources/long-wav.py @@ -0,0 +1,134 @@ +""" +This generates a 30 minute silent wav, and is capable of +responding to Range requests. +""" +import time +import re +import struct + +from wptserve.utils import isomorphic_decode + +def create_wav_header(sample_rate, bit_depth, channels, duration): + bytes_per_sample = int(bit_depth / 8) + block_align = bytes_per_sample * channels + byte_rate = sample_rate * block_align + sub_chunk_2_size = duration * byte_rate + + data = b'' + # ChunkID + data += b'RIFF' + # ChunkSize + data += struct.pack(' 0: + to_send = b'\x00' * min(bytes_remaining_to_send, sample_rate) + bytes_remaining_to_send -= len(to_send) + + if not response.writer.write(to_send): + break + + # Throttle the stream + time.sleep(0.5) diff --git a/test/wpt/tests/fetch/range/resources/partial-script.py b/test/wpt/tests/fetch/range/resources/partial-script.py new file mode 100644 index 0000000..a9570ec --- /dev/null +++ b/test/wpt/tests/fetch/range/resources/partial-script.py @@ -0,0 +1,29 @@ +""" +This generates a partial response containing valid JavaScript. +""" + +def main(request, response): + require_range = request.GET.first(b'require-range', b'') + pretend_offset = int(request.GET.first(b'pretend-offset', b'0')) + range_header = request.headers.get(b'Range', b'') + + if require_range and not range_header: + response.set_error(412, u"Range header required") + response.write() + return + + response.headers.set(b"Content-Type", b"text/plain") + response.headers.set(b"Accept-Ranges", b"bytes") + response.headers.set(b"Cache-Control", b"no-cache") + response.status = 206 + + to_send = b'self.scriptExecuted = true;' + length = len(to_send) + + content_range = b"bytes %d-%d/%d" % ( + pretend_offset, pretend_offset + length - 1, pretend_offset + length) + + response.headers.set(b"Content-Range", content_range) + response.headers.set(b"Content-Length", length) + + response.content = to_send diff --git a/test/wpt/tests/fetch/range/resources/partial-text.py b/test/wpt/tests/fetch/range/resources/partial-text.py new file mode 100644 index 0000000..fa3d117 --- /dev/null +++ b/test/wpt/tests/fetch/range/resources/partial-text.py @@ -0,0 +1,53 @@ +""" +This generates a partial response for a 100-byte text file. +""" +import re + +from wptserve.utils import isomorphic_decode + +def main(request, response): + total_length = int(request.GET.first(b'length', b'100')) + partial_code = int(request.GET.first(b'partial', b'206')) + content_type = request.GET.first(b'type', b'text/plain') + range_header = request.headers.get(b'Range', b'') + + # Send a 200 if there is no range request + if not range_header: + to_send = ''.zfill(total_length) + response.headers.set(b"Content-Type", content_type) + response.headers.set(b"Cache-Control", b"no-cache") + response.headers.set(b"Content-Length", total_length) + response.content = to_send + return + + # Simple range parsing, requires specifically "bytes=xxx-xxxx" + range_header_match = re.search(r'^bytes=(\d*)-(\d*)$', isomorphic_decode(range_header)) + start, end = range_header_match.groups() + start = int(start) + end = int(end) if end else total_length + length = end - start + + # Error the request if the range goes beyond the length + if length <= 0 or end > total_length: + response.set_error(416, u"Range Not Satisfiable") + # set_error sets the MIME type to application/json, which - for a + # no-cors media request - will be blocked by ORB. We'll just force + # the expected MIME type here, whichfixes the test, but doesn't make + # sense in general. + response.headers = [(b"Content-Type", content_type)] + response.write() + return + + # Generate a partial response of the requested length + to_send = ''.zfill(length) + response.headers.set(b"Content-Type", content_type) + response.headers.set(b"Accept-Ranges", b"bytes") + response.headers.set(b"Cache-Control", b"no-cache") + response.status = partial_code + + content_range = b"bytes %d-%d/%d" % (start, end, total_length) + + response.headers.set(b"Content-Range", content_range) + response.headers.set(b"Content-Length", length) + + response.content = to_send diff --git a/test/wpt/tests/fetch/range/resources/range-sw.js b/test/wpt/tests/fetch/range/resources/range-sw.js new file mode 100644 index 0000000..b47823f --- /dev/null +++ b/test/wpt/tests/fetch/range/resources/range-sw.js @@ -0,0 +1,218 @@ +importScripts('/resources/testharness.js'); + +setup({ explicit_done: true }); + +function assert_range_request(request, expectedRangeHeader, name) { + assert_equals(request.headers.get('Range'), expectedRangeHeader, name); +} + +async function broadcast(msg) { + for (const client of await clients.matchAll()) { + client.postMessage(msg); + } +} + +addEventListener('fetch', async event => { + /** @type Request */ + const request = event.request; + const url = new URL(request.url); + const action = url.searchParams.get('action'); + + switch (action) { + case 'range-header-filter-test': + rangeHeaderFilterTest(request); + return; + case 'range-header-passthrough-test': + rangeHeaderPassthroughTest(event); + return; + case 'store-ranged-response': + storeRangedResponse(event); + return; + case 'use-stored-ranged-response': + useStoredRangeResponse(event); + return; + case 'broadcast-accept-encoding': + broadcastAcceptEncoding(event); + return; + case 'record-media-range-request': + return recordMediaRangeRequest(event); + case 'use-media-range-request': + useMediaRangeRequest(event); + return; + } +}); + +/** + * @param {Request} request + */ +function rangeHeaderFilterTest(request) { + const rangeValue = request.headers.get('Range'); + + test(() => { + assert_range_request(new Request(request), rangeValue, `Untampered`); + assert_range_request(new Request(request, {}), rangeValue, `Untampered (no init props set)`); + assert_range_request(new Request(request, { __foo: 'bar' }), rangeValue, `Untampered (only invalid props set)`); + assert_range_request(new Request(request, { mode: 'cors' }), rangeValue, `More permissive mode`); + assert_range_request(request.clone(), rangeValue, `Clone`); + }, "Range headers correctly preserved"); + + test(() => { + assert_range_request(new Request(request, { headers: { Range: 'foo' } }), null, `Tampered - range header set`); + assert_range_request(new Request(request, { headers: {} }), null, `Tampered - empty headers set`); + assert_range_request(new Request(request, { mode: 'no-cors' }), null, `Tampered – mode set`); + assert_range_request(new Request(request, { cache: 'no-cache' }), null, `Tampered – cache mode set`); + }, "Range headers correctly removed"); + + test(() => { + let headers; + + headers = new Request(request).headers; + headers.delete('does-not-exist'); + assert_equals(headers.get('Range'), rangeValue, `Preserved if no header actually removed`); + + headers = new Request(request).headers; + headers.append('foo', 'bar'); + assert_equals(headers.get('Range'), rangeValue, `Preserved if silent-failure on append (due to request-no-cors guard)`); + + headers = new Request(request).headers; + headers.set('foo', 'bar'); + assert_equals(headers.get('Range'), rangeValue, `Preserved if silent-failure on set (due to request-no-cors guard)`); + + headers = new Request(request).headers; + headers.append('Range', 'foo'); + assert_equals(headers.get('Range'), rangeValue, `Preserved if silent-failure on append (due to request-no-cors guard)`); + + headers = new Request(request).headers; + headers.set('Range', 'foo'); + assert_equals(headers.get('Range'), rangeValue, `Preserved if silent-failure on set (due to request-no-cors guard)`); + + headers = new Request(request).headers; + headers.append('Accept', 'whatever'); + assert_equals(headers.get('Range'), null, `Stripped if header successfully appended`); + + headers = new Request(request).headers; + headers.set('Accept', 'whatever'); + assert_equals(headers.get('Range'), null, `Stripped if header successfully set`); + + headers = new Request(request).headers; + headers.delete('Accept'); + assert_equals(headers.get('Range'), null, `Stripped if header successfully deleted`); + + headers = new Request(request).headers; + headers.delete('Range'); + assert_equals(headers.get('Range'), null, `Stripped if range header successfully deleted`); + }, "Headers correctly filtered"); + + done(); +} + +function rangeHeaderPassthroughTest(event) { + /** @type Request */ + const request = event.request; + const url = new URL(request.url); + const key = url.searchParams.get('range-received-key'); + + event.waitUntil(new Promise(resolve => { + promise_test(async () => { + await fetch(event.request); + const response = await fetch('stash-take.py?key=' + key); + assert_equals(await response.json(), 'range-header-received'); + resolve(); + }, `Include range header in network request`); + + done(); + })); + + // Just send back any response, it isn't important for the test. + event.respondWith(new Response('')); +} + +let storedRangeResponseP; + +function storeRangedResponse(event) { + /** @type Request */ + const request = event.request; + const id = new URL(request.url).searchParams.get('id'); + + storedRangeResponseP = fetch(event.request); + broadcast({ id }); + + // Just send back any response, it isn't important for the test. + event.respondWith(new Response('')); +} + +function useStoredRangeResponse(event) { + event.respondWith(async function() { + const response = await storedRangeResponseP; + if (!response) throw Error("Expected stored range response"); + return response.clone(); + }()); +} + +function broadcastAcceptEncoding(event) { + /** @type Request */ + const request = event.request; + const id = new URL(request.url).searchParams.get('id'); + + broadcast({ + id, + acceptEncoding: request.headers.get('Accept-Encoding') + }); + + // Just send back any response, it isn't important for the test. + event.respondWith(new Response('')); +} + +let rangeResponse = {}; + +async function recordMediaRangeRequest(event) { + /** @type Request */ + const request = event.request; + const url = new URL(request.url); + const urlParams = new URLSearchParams(url.search); + const size = urlParams.get("size"); + const id = urlParams.get('id'); + const key = 'size' + size; + + if (key in rangeResponse) { + // Don't re-fetch ranges we already have. + const clonedResponse = rangeResponse[key].clone(); + event.respondWith(clonedResponse); + } else if (event.request.headers.get("range") === "bytes=0-") { + // Generate a bogus 206 response to trigger subsequent range requests + // of the desired size. + const length = urlParams.get("length") + 100; + const body = "A".repeat(Number(size)); + event.respondWith(new Response(body, {status: 206, headers: { + "Content-Type": "audio/mp4", + "Content-Range": `bytes 0-1/${length}` + }})); + } else if (event.request.headers.get("range") === `bytes=${Number(size)}-`) { + // Pass through actual range requests which will attempt to fetch up to the + // length in the original response which is bigger than the actual resource + // to make sure 206 and 416 responses are treated the same. + rangeResponse[key] = await fetch(event.request); + + // Let the client know we have the range response for the given ID + broadcast({id}); + } else { + event.respondWith(Promise.reject(Error("Invalid Request"))); + } +} + +function useMediaRangeRequest(event) { + /** @type Request */ + const request = event.request; + const url = new URL(request.url); + const urlParams = new URLSearchParams(url.search); + const size = urlParams.get("size"); + const key = 'size' + size; + + // Send a clone of the range response to preload. + if (key in rangeResponse) { + const clonedResponse = rangeResponse[key].clone(); + event.respondWith(clonedResponse); + } else { + event.respondWith(Promise.reject(Error("Invalid Request"))); + } +} diff --git a/test/wpt/tests/fetch/range/resources/stash-take.py b/test/wpt/tests/fetch/range/resources/stash-take.py new file mode 100644 index 0000000..6cf6ff5 --- /dev/null +++ b/test/wpt/tests/fetch/range/resources/stash-take.py @@ -0,0 +1,7 @@ +from wptserve.handlers import json_handler + + +@json_handler +def main(request, response): + key = request.GET.first(b"key") + return request.server.stash.take(key, b'/fetch/range/') diff --git a/test/wpt/tests/fetch/range/resources/utils.js b/test/wpt/tests/fetch/range/resources/utils.js new file mode 100644 index 0000000..ad2853b --- /dev/null +++ b/test/wpt/tests/fetch/range/resources/utils.js @@ -0,0 +1,36 @@ +function loadScript(url, { doc = document }={}) { + return new Promise((resolve, reject) => { + const script = doc.createElement('script'); + script.onload = () => resolve(); + script.onerror = () => reject(Error("Script load failed")); + script.src = url; + doc.body.appendChild(script); + }) +} + +function preloadImage(url, { doc = document }={}) { + return new Promise((resolve, reject) => { + const preload = doc.createElement('link'); + preload.rel = 'preload'; + preload.as = 'image'; + preload.onload = () => resolve(); + preload.onerror = () => resolve(); + preload.href = url; + doc.body.appendChild(preload); + }) +} + +/** + * + * @param {Document} document + * @param {string|URL} url + * @returns {HTMLAudioElement} + */ +function appendAudio(document, url) { + const audio = document.createElement('audio'); + audio.muted = true; + audio.src = url; + audio.preload = true; + document.body.appendChild(audio); + return audio; +} diff --git a/test/wpt/tests/fetch/range/resources/video-with-range.py b/test/wpt/tests/fetch/range/resources/video-with-range.py new file mode 100644 index 0000000..2d15ccf --- /dev/null +++ b/test/wpt/tests/fetch/range/resources/video-with-range.py @@ -0,0 +1,43 @@ +import re +import os +import json +from wptserve.utils import isomorphic_decode + +def main(request, response): + path = os.path.join(request.doc_root, u"media", "sine440.mp3") + total_size = os.path.getsize(path) + rewrites = json.loads(request.GET.first(b'rewrites', '[]')) + range_header = request.headers.get(b'Range') + range_header_match = range_header and re.search(r'^bytes=(\d*)-(\d*)$', isomorphic_decode(range_header)) + start = None + end = None + if range_header_match: + response.status = 206 + start, end = range_header_match.groups() + if range_header: + status = 206 + else: + status = 200 + for rewrite in rewrites: + req_start, req_end = rewrite['request'] + if start == req_start or req_start == '*': + if end == req_end or req_end == '*': + if 'response' in rewrite: + start, end = rewrite['response'] + if 'status' in rewrite: + status = rewrite['status'] + + start = int(start or 0) + end = int(end or total_size) + headers = [] + if status == 206: + headers.append((b"Content-Range", b"bytes %d-%d/%d" % (start, end - 1, total_size))) + headers.append((b"Accept-Ranges", b"bytes")) + + headers.append((b"Content-Type", b"audio/mp3")) + headers.append((b"Content-Length", str(end - start))) + headers.append((b"Cache-Control", b"no-cache")) + video_file = open(path, "rb") + video_file.seek(start) + content = video_file.read(end) + return status, headers, content diff --git a/test/wpt/tests/fetch/range/sw.https.window.js b/test/wpt/tests/fetch/range/sw.https.window.js new file mode 100644 index 0000000..62ad894 --- /dev/null +++ b/test/wpt/tests/fetch/range/sw.https.window.js @@ -0,0 +1,228 @@ +// META: script=../../../service-workers/service-worker/resources/test-helpers.sub.js +// META: script=/common/utils.js +// META: script=/common/get-host-info.sub.js +// META: script=resources/utils.js + +const { REMOTE_HOST } = get_host_info(); +const BASE_SCOPE = 'resources/basic.html?'; + +async function cleanup() { + for (const iframe of document.querySelectorAll('.test-iframe')) { + iframe.parentNode.removeChild(iframe); + } + + for (const reg of await navigator.serviceWorker.getRegistrations()) { + await reg.unregister(); + } +} + +async function setupRegistration(t, scope) { + await cleanup(); + const reg = await navigator.serviceWorker.register('resources/range-sw.js', { scope }); + await wait_for_state(t, reg.installing, 'activated'); + return reg; +} + +function awaitMessage(obj, id) { + return new Promise(resolve => { + obj.addEventListener('message', function listener(event) { + if (event.data.id !== id) return; + obj.removeEventListener('message', listener); + resolve(event.data); + }); + }); +} + +promise_test(async t => { + const scope = BASE_SCOPE + Math.random(); + const reg = await setupRegistration(t, scope); + const iframe = await with_iframe(scope); + const w = iframe.contentWindow; + + // Trigger a cross-origin range request using media + const url = new URL('long-wav.py?action=range-header-filter-test', w.location); + url.hostname = REMOTE_HOST; + appendAudio(w.document, url); + + // See rangeHeaderFilterTest in resources/range-sw.js + await fetch_tests_from_worker(reg.active); +}, `Defer range header filter tests to service worker`); + +promise_test(async t => { + const scope = BASE_SCOPE + Math.random(); + const reg = await setupRegistration(t, scope); + const iframe = await with_iframe(scope); + const w = iframe.contentWindow; + + // Trigger a cross-origin range request using media + const url = new URL('long-wav.py', w.location); + url.searchParams.set('action', 'range-header-passthrough-test'); + url.searchParams.set('range-received-key', token()); + url.hostname = REMOTE_HOST; + appendAudio(w.document, url); + + // See rangeHeaderPassthroughTest in resources/range-sw.js + await fetch_tests_from_worker(reg.active); +}, `Defer range header passthrough tests to service worker`); + +promise_test(async t => { + const scope = BASE_SCOPE + Math.random(); + await setupRegistration(t, scope); + const iframe = await with_iframe(scope); + const w = iframe.contentWindow; + const id = Math.random() + ''; + const storedRangeResponse = awaitMessage(w.navigator.serviceWorker, id); + + // Trigger a cross-origin range request using media + const url = new URL('partial-script.py', w.location); + url.searchParams.set('require-range', '1'); + url.searchParams.set('action', 'store-ranged-response'); + url.searchParams.set('id', id); + url.hostname = REMOTE_HOST; + + appendAudio(w.document, url); + + await storedRangeResponse; + + // Fetching should reject + const fetchPromise = w.fetch('?action=use-stored-ranged-response', { mode: 'no-cors' }); + await promise_rejects_js(t, w.TypeError, fetchPromise); + + // Script loading should error too + const loadScriptPromise = loadScript('?action=use-stored-ranged-response', { doc: w.document }); + await promise_rejects_js(t, Error, loadScriptPromise); + + await loadScriptPromise.catch(() => {}); + + assert_false(!!w.scriptExecuted, `Partial response shouldn't be executed`); +}, `Ranged response not allowed following no-cors ranged request`); + +promise_test(async t => { + const scope = BASE_SCOPE + Math.random(); + await setupRegistration(t, scope); + const iframe = await with_iframe(scope); + const w = iframe.contentWindow; + const id = Math.random() + ''; + const storedRangeResponse = awaitMessage(w.navigator.serviceWorker, id); + + // Trigger a range request using media + const url = new URL('partial-script.py', w.location); + url.searchParams.set('require-range', '1'); + url.searchParams.set('action', 'store-ranged-response'); + url.searchParams.set('id', id); + + appendAudio(w.document, url); + + await storedRangeResponse; + + // This should not throw + await w.fetch('?action=use-stored-ranged-response'); + + // This shouldn't throw either + await loadScript('?action=use-stored-ranged-response', { doc: w.document }); + + assert_true(w.scriptExecuted, `Partial response should be executed`); +}, `Non-opaque ranged response executed`); + +promise_test(async t => { + const scope = BASE_SCOPE + Math.random(); + await setupRegistration(t, scope); + const iframe = await with_iframe(scope); + const w = iframe.contentWindow; + const fetchId = Math.random() + ''; + const fetchBroadcast = awaitMessage(w.navigator.serviceWorker, fetchId); + const audioId = Math.random() + ''; + const audioBroadcast = awaitMessage(w.navigator.serviceWorker, audioId); + + const url = new URL('long-wav.py', w.location); + url.searchParams.set('action', 'broadcast-accept-encoding'); + url.searchParams.set('id', fetchId); + + await w.fetch(url, { + headers: { Range: 'bytes=0-10' } + }); + + assert_equals((await fetchBroadcast).acceptEncoding, null, "Accept-Encoding should not be set for fetch"); + + url.searchParams.set('id', audioId); + appendAudio(w.document, url); + + assert_equals((await audioBroadcast).acceptEncoding, null, "Accept-Encoding should not be set for media"); +}, `Accept-Encoding should not appear in a service worker`); + +promise_test(async t => { + const scope = BASE_SCOPE + Math.random(); + await setupRegistration(t, scope); + const iframe = await with_iframe(scope); + const w = iframe.contentWindow; + const length = 100; + const count = 3; + const counts = {}; + + // test a single range request size + async function testSizedRange(size, partialResponseCode) { + const rangeId = Math.random() + ''; + const rangeBroadcast = awaitMessage(w.navigator.serviceWorker, rangeId); + + // Create a bogus audio element to trick the browser into sending + // cross-origin range requests that can be manipulated by the service worker. + const sound_url = new URL('partial-text.py', w.location); + sound_url.hostname = REMOTE_HOST; + sound_url.searchParams.set('action', 'record-media-range-request'); + sound_url.searchParams.set('length', length); + sound_url.searchParams.set('size', size); + sound_url.searchParams.set('partial', partialResponseCode); + sound_url.searchParams.set('id', rangeId); + sound_url.searchParams.set('type', 'audio/mp4'); + appendAudio(w.document, sound_url); + + // wait for the range requests to happen + await rangeBroadcast; + + // Create multiple preload requests and count the number of resource timing + // entries that get created to make sure 206 and 416 range responses are treated + // the same. + const url = new URL('partial-text.py', w.location); + url.searchParams.set('action', 'use-media-range-request'); + url.searchParams.set('size', size); + url.searchParams.set('type', 'audio/mp4'); + counts['size' + size] = 0; + for (let i = 0; i < count; i++) { + await preloadImage(url, { doc: w.document }); + } + } + + // Test range requests from 1 smaller than the correct size to 1 larger than + // the correct size to exercise the various permutations using the default 206 + // response code for successful range requests. + for (let size = length - 1; size <= length + 1; size++) { + await testSizedRange(size, '206'); + } + + // Test a successful range request using a 200 response. + await testSizedRange(length - 2, '200'); + + // Check the resource timing entries and count the reported number of fetches of each type + const resources = w.performance.getEntriesByType("resource"); + for (const entry of resources) { + const url = new URL(entry.name); + if (url.searchParams.has('action') && + url.searchParams.get('action') == 'use-media-range-request' && + url.searchParams.has('size')) { + counts['size' + url.searchParams.get('size')]++; + } + } + + // Make sure there are a non-zero number of preload requests and they are all the same + let counts_valid = true; + const first = 'size' + (length - 2); + for (let size = length - 2; size <= length + 1; size++) { + let key = 'size' + size; + if (!(key in counts) || counts[key] <= 0 || counts[key] != counts[first]) { + counts_valid = false; + break; + } + } + + assert_true(counts_valid, `Opaque range request preloads were different for error and success`); +}, `Opaque range preload successes and failures should be indistinguishable`); diff --git a/test/wpt/tests/fetch/redirect-navigate/302-found-post-handler.py b/test/wpt/tests/fetch/redirect-navigate/302-found-post-handler.py new file mode 100644 index 0000000..40a224f --- /dev/null +++ b/test/wpt/tests/fetch/redirect-navigate/302-found-post-handler.py @@ -0,0 +1,15 @@ +from wptserve.utils import isomorphic_encode + +def main(request, response): + if request.method == u"POST": + response.add_required_headers = False + response.writer.write_status(302) + response.writer.write_header(b"Location", isomorphic_encode(request.url)) + response.writer.end_headers() + response.writer.write(b"") + elif request.method == u"GET": + return ([(b"Content-Type", b"text/plain")], + b"OK") + else: + return ([(b"Content-Type", b"text/plain")], + b"FAIL") \ No newline at end of file diff --git a/test/wpt/tests/fetch/redirect-navigate/302-found-post.html b/test/wpt/tests/fetch/redirect-navigate/302-found-post.html new file mode 100644 index 0000000..854cd32 --- /dev/null +++ b/test/wpt/tests/fetch/redirect-navigate/302-found-post.html @@ -0,0 +1,20 @@ + + +HTTP 302 Found POST Navigation Test + + + + + diff --git a/test/wpt/tests/fetch/redirect-navigate/preserve-fragment.html b/test/wpt/tests/fetch/redirect-navigate/preserve-fragment.html new file mode 100644 index 0000000..682539a --- /dev/null +++ b/test/wpt/tests/fetch/redirect-navigate/preserve-fragment.html @@ -0,0 +1,202 @@ + + + + + Ensure fragment is kept across redirects + + + + + + + + + + + + + + diff --git a/test/wpt/tests/fetch/redirect-navigate/resources/destination.html b/test/wpt/tests/fetch/redirect-navigate/resources/destination.html new file mode 100644 index 0000000..f98c5a8 --- /dev/null +++ b/test/wpt/tests/fetch/redirect-navigate/resources/destination.html @@ -0,0 +1,28 @@ + + + + + + + + +

Target

+

Target

+ + diff --git a/test/wpt/tests/fetch/redirects/data.window.js b/test/wpt/tests/fetch/redirects/data.window.js new file mode 100644 index 0000000..eeb4196 --- /dev/null +++ b/test/wpt/tests/fetch/redirects/data.window.js @@ -0,0 +1,25 @@ +// See ../api/redirect/redirect-to-dataurl.any.js for fetch() tests + +async_test(t => { + const img = document.createElement("img"); + img.onload = t.unreached_func(); + img.onerror = t.step_func_done(); + img.src = "../api/resources/redirect.py?location=data:image/png%3Bbase64,iVBORw0KGgoAAAANSUhEUgAAAIUAAABqCAIAAAAdqgU8AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAF6SURBVHhe7dNBDQAADIPA%2Bje92eBxSQUQSLedlQzo0TLQonFWPVoGWjT%2BoUfLQIvGP/RoGWjR%2BIceLQMtGv/Qo2WgReMferQMtGj8Q4%2BWgRaNf%2BjRMtCi8Q89WgZaNP6hR8tAi8Y/9GgZaNH4hx4tAy0a/9CjZaBF4x96tAy0aPxDj5aBFo1/6NEy0KLxDz1aBlo0/qFHy0CLxj/0aBlo0fiHHi0DLRr/0KNloEXjH3q0DLRo/EOPloEWjX/o0TLQovEPPVoGWjT%2BoUfLQIvGP/RoGWjR%2BIceLQMtGv/Qo2WgReMferQMtGj8Q4%2BWgRaNf%2BjRMtCi8Q89WgZaNP6hR8tAi8Y/9GgZaNH4hx4tAy0a/9CjZaBF4x96tAy0aPxDj5aBFo1/6NEy0KLxDz1aBlo0/qFHy0CLxj/0aBlo0fiHHi0DLRr/0KNloEXjH3q0DLRo/EOPloEWjX/o0TLQovEPPVoGWjT%2BoUfLQIvGP/RoGWjR%2BIceJQMPIOzeGc0PIDEAAAAASUVORK5CYII"; +}, " fetch that redirects to data: URL"); + +globalThis.globalTest = null; +async_test(t => { + globalThis.globalTest = t; + const script = document.createElement("script"); + script.src = "../api/resources/redirect.py?location=data:text/javascript,(globalThis.globalTest.unreached_func())()"; + script.onerror = t.step_func_done(); + document.body.append(script); +}, " + + +
+ + + + + + + diff --git a/test/wpt/tests/fetch/security/1xx-response.any.js b/test/wpt/tests/fetch/security/1xx-response.any.js new file mode 100644 index 0000000..df4dafc --- /dev/null +++ b/test/wpt/tests/fetch/security/1xx-response.any.js @@ -0,0 +1,28 @@ +promise_test(async (t) => { + // The 100 response should be ignored, then the transaction ends, which + // should lead to an error. + await promise_rejects_js( + t, TypeError, fetch('/common/text-plain.txt?pipe=status(100)')); +}, 'Status(100) should be ignored.'); + +// This behavior is being discussed at https://github.com/whatwg/fetch/issues/1397. +promise_test(async (t) => { + const res = await fetch('/common/text-plain.txt?pipe=status(101)'); + assert_equals(res.status, 101); + const body = await res.text(); + assert_equals(body, ''); +}, 'Status(101) should be accepted, with removing body.'); + +promise_test(async (t) => { + // The 103 response should be ignored, then the transaction ends, which + // should lead to an error. + await promise_rejects_js( + t, TypeError, fetch('/common/text-plain.txt?pipe=status(103)')); +}, 'Status(103) should be ignored.'); + +promise_test(async (t) => { + // The 199 response should be ignored, then the transaction ends, which + // should lead to an error. + await promise_rejects_js( + t, TypeError, fetch('/common/text-plain.txt?pipe=status(199)')); +}, 'Status(199) should be ignored.'); diff --git a/test/wpt/tests/fetch/security/dangling-markup-mitigation-data-url.tentative.sub.html b/test/wpt/tests/fetch/security/dangling-markup-mitigation-data-url.tentative.sub.html new file mode 100644 index 0000000..f27735d --- /dev/null +++ b/test/wpt/tests/fetch/security/dangling-markup-mitigation-data-url.tentative.sub.html @@ -0,0 +1,229 @@ + + + + + diff --git a/test/wpt/tests/fetch/security/dangling-markup-mitigation.tentative.html b/test/wpt/tests/fetch/security/dangling-markup-mitigation.tentative.html new file mode 100644 index 0000000..61a9316 --- /dev/null +++ b/test/wpt/tests/fetch/security/dangling-markup-mitigation.tentative.html @@ -0,0 +1,147 @@ + + + + + diff --git a/test/wpt/tests/fetch/security/embedded-credentials.tentative.sub.html b/test/wpt/tests/fetch/security/embedded-credentials.tentative.sub.html new file mode 100644 index 0000000..ca5ee1c --- /dev/null +++ b/test/wpt/tests/fetch/security/embedded-credentials.tentative.sub.html @@ -0,0 +1,89 @@ + + + + + diff --git a/test/wpt/tests/fetch/security/redirect-to-url-with-credentials.https.html b/test/wpt/tests/fetch/security/redirect-to-url-with-credentials.https.html new file mode 100644 index 0000000..b064648 --- /dev/null +++ b/test/wpt/tests/fetch/security/redirect-to-url-with-credentials.https.html @@ -0,0 +1,68 @@ + +
+ + + +
+ + + + diff --git a/test/wpt/tests/fetch/security/support/embedded-credential-window.sub.html b/test/wpt/tests/fetch/security/support/embedded-credential-window.sub.html new file mode 100644 index 0000000..20d307e --- /dev/null +++ b/test/wpt/tests/fetch/security/support/embedded-credential-window.sub.html @@ -0,0 +1,19 @@ + + diff --git a/test/wpt/tests/fetch/stale-while-revalidate/fetch-sw.https.html b/test/wpt/tests/fetch/stale-while-revalidate/fetch-sw.https.html new file mode 100644 index 0000000..efcebc2 --- /dev/null +++ b/test/wpt/tests/fetch/stale-while-revalidate/fetch-sw.https.html @@ -0,0 +1,65 @@ + + + + + Stale Revalidation Requests don't get sent to service worker + + + + + + + + + diff --git a/test/wpt/tests/fetch/stale-while-revalidate/fetch.any.js b/test/wpt/tests/fetch/stale-while-revalidate/fetch.any.js new file mode 100644 index 0000000..3682b9d --- /dev/null +++ b/test/wpt/tests/fetch/stale-while-revalidate/fetch.any.js @@ -0,0 +1,32 @@ +// META: global=window,worker +// META: title=Tests Stale While Revalidate is executed for fetch API +// META: script=/common/utils.js + +function wait25ms(test) { + return new Promise(resolve => { + test.step_timeout(() => { + resolve(); + }, 25); + }); +} + +promise_test(async (test) => { + var request_token = token(); + + const response = await fetch(`resources/stale-script.py?token=` + request_token); + // Wait until resource is completely fetched to allow caching before next fetch. + const body = await response.text(); + const response2 = await fetch(`resources/stale-script.py?token=` + request_token); + + assert_equals(response.headers.get('Unique-Id'), response2.headers.get('Unique-Id')); + const body2 = await response2.text(); + assert_equals(body, body2); + + while(true) { + const revalidation_check = await fetch(`resources/stale-script.py?query&token=` + request_token); + if (revalidation_check.headers.get('Count') == '2') { + break; + } + await wait25ms(test); + } +}, 'Second fetch returns same response'); diff --git a/test/wpt/tests/fetch/stale-while-revalidate/resources/stale-css.py b/test/wpt/tests/fetch/stale-while-revalidate/resources/stale-css.py new file mode 100644 index 0000000..b876683 --- /dev/null +++ b/test/wpt/tests/fetch/stale-while-revalidate/resources/stale-css.py @@ -0,0 +1,28 @@ +def main(request, response): + + token = request.GET.first(b"token", None) + is_query = request.GET.first(b"query", None) != None + with request.server.stash.lock: + value = request.server.stash.take(token) + count = 0 + if value != None: + count = int(value) + if is_query: + if count < 2: + request.server.stash.put(token, count) + else: + count = count + 1 + request.server.stash.put(token, count) + if is_query: + headers = [(b"Count", count)] + content = b"" + return 200, headers, content + else: + content = b"body { background: rgb(0, 128, 0); }" + if count > 1: + content = b"body { background: rgb(255, 0, 0); }" + + headers = [(b"Content-Type", b"text/css"), + (b"Cache-Control", b"private, max-age=0, stale-while-revalidate=60")] + + return 200, headers, content diff --git a/test/wpt/tests/fetch/stale-while-revalidate/resources/stale-image.py b/test/wpt/tests/fetch/stale-while-revalidate/resources/stale-image.py new file mode 100644 index 0000000..36e6fc0 --- /dev/null +++ b/test/wpt/tests/fetch/stale-while-revalidate/resources/stale-image.py @@ -0,0 +1,40 @@ +import os.path + +from wptserve.utils import isomorphic_decode + +def main(request, response): + + token = request.GET.first(b"token", None) + is_query = request.GET.first(b"query", None) != None + with request.server.stash.lock: + value = request.server.stash.take(token) + count = 0 + if value != None: + count = int(value) + if is_query: + if count < 2: + request.server.stash.put(token, count) + else: + count = count + 1 + request.server.stash.put(token, count) + + if is_query: + headers = [(b"Count", count)] + content = b"" + return 200, headers, content + else: + filename = u"green-16x16.png" + if count > 1: + filename = u"green-256x256.png" + + path = os.path.join(os.path.dirname(isomorphic_decode(__file__)), u"../../../images", filename) + body = open(path, "rb").read() + + response.add_required_headers = False + response.writer.write_status(200) + response.writer.write_header(b"content-length", len(body)) + response.writer.write_header(b"Cache-Control", b"private, max-age=0, stale-while-revalidate=60") + response.writer.write_header(b"content-type", b"image/png") + response.writer.end_headers() + + response.writer.write(body) diff --git a/test/wpt/tests/fetch/stale-while-revalidate/resources/stale-script.py b/test/wpt/tests/fetch/stale-while-revalidate/resources/stale-script.py new file mode 100644 index 0000000..731cd80 --- /dev/null +++ b/test/wpt/tests/fetch/stale-while-revalidate/resources/stale-script.py @@ -0,0 +1,32 @@ +import random, string + +def id_token(): + letters = string.ascii_lowercase + return b''.join(random.choice(letters).encode("utf-8") for i in range(20)) + +def main(request, response): + token = request.GET.first(b"token", None) + is_query = request.GET.first(b"query", None) != None + with request.server.stash.lock: + value = request.server.stash.take(token) + count = 0 + if value != None: + count = int(value) + if is_query: + if count < 2: + request.server.stash.put(token, count) + else: + count = count + 1 + request.server.stash.put(token, count) + + if is_query: + headers = [(b"Count", count)] + content = u"" + return 200, headers, content + else: + unique_id = id_token() + headers = [(b"Content-Type", b"text/javascript"), + (b"Cache-Control", b"private, max-age=0, stale-while-revalidate=60"), + (b"Unique-Id", unique_id)] + content = b"report('%s')" % unique_id + return 200, headers, content diff --git a/test/wpt/tests/fetch/stale-while-revalidate/revalidate-not-blocked-by-csp.html b/test/wpt/tests/fetch/stale-while-revalidate/revalidate-not-blocked-by-csp.html new file mode 100644 index 0000000..ea70b9a --- /dev/null +++ b/test/wpt/tests/fetch/stale-while-revalidate/revalidate-not-blocked-by-csp.html @@ -0,0 +1,69 @@ + + +Test revalidations requests aren't blocked by CSP. + + + + + + diff --git a/test/wpt/tests/fetch/stale-while-revalidate/stale-css.html b/test/wpt/tests/fetch/stale-while-revalidate/stale-css.html new file mode 100644 index 0000000..603a60c --- /dev/null +++ b/test/wpt/tests/fetch/stale-while-revalidate/stale-css.html @@ -0,0 +1,51 @@ + + +Tests Stale While Revalidate works for css + + + + + + diff --git a/test/wpt/tests/fetch/stale-while-revalidate/stale-image.html b/test/wpt/tests/fetch/stale-while-revalidate/stale-image.html new file mode 100644 index 0000000..d86bdfb --- /dev/null +++ b/test/wpt/tests/fetch/stale-while-revalidate/stale-image.html @@ -0,0 +1,55 @@ + + +Tests Stale While Revalidate works for images + + + + + + + + + diff --git a/test/wpt/tests/fetch/stale-while-revalidate/stale-script.html b/test/wpt/tests/fetch/stale-while-revalidate/stale-script.html new file mode 100644 index 0000000..f531748 --- /dev/null +++ b/test/wpt/tests/fetch/stale-while-revalidate/stale-script.html @@ -0,0 +1,59 @@ + + +Tests Stale While Revalidate works for scripts + + + + + + diff --git a/test/wpt/tests/fetch/stale-while-revalidate/sw-intercept.js b/test/wpt/tests/fetch/stale-while-revalidate/sw-intercept.js new file mode 100644 index 0000000..dca7de5 --- /dev/null +++ b/test/wpt/tests/fetch/stale-while-revalidate/sw-intercept.js @@ -0,0 +1,14 @@ +async function broadcast(msg) { + for (const client of await clients.matchAll()) { + client.postMessage(msg); + } +} + +self.addEventListener('fetch', event => { + event.waitUntil(broadcast(event.request.url)); + event.respondWith(fetch(event.request)); +}); + +self.addEventListener('activate', event => { + self.clients.claim(); +}); diff --git a/test/wpt/tests/interfaces/ANGLE_instanced_arrays.idl b/test/wpt/tests/interfaces/ANGLE_instanced_arrays.idl new file mode 100644 index 0000000..557a416 --- /dev/null +++ b/test/wpt/tests/interfaces/ANGLE_instanced_arrays.idl @@ -0,0 +1,12 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL ANGLE_instanced_arrays Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/ANGLE_instanced_arrays/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface ANGLE_instanced_arrays { + const GLenum VERTEX_ATTRIB_ARRAY_DIVISOR_ANGLE = 0x88FE; + undefined drawArraysInstancedANGLE(GLenum mode, GLint first, GLsizei count, GLsizei primcount); + undefined drawElementsInstancedANGLE(GLenum mode, GLsizei count, GLenum type, GLintptr offset, GLsizei primcount); + undefined vertexAttribDivisorANGLE(GLuint index, GLuint divisor); +}; diff --git a/test/wpt/tests/interfaces/CSP.idl b/test/wpt/tests/interfaces/CSP.idl new file mode 100644 index 0000000..ac0a6ff --- /dev/null +++ b/test/wpt/tests/interfaces/CSP.idl @@ -0,0 +1,56 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Content Security Policy Level 3 (https://w3c.github.io/webappsec-csp/) + +[Exposed=Window] +interface CSPViolationReportBody : ReportBody { + [Default] object toJSON(); + readonly attribute USVString documentURL; + readonly attribute USVString? referrer; + readonly attribute USVString? blockedURL; + readonly attribute DOMString effectiveDirective; + readonly attribute DOMString originalPolicy; + readonly attribute USVString? sourceFile; + readonly attribute DOMString? sample; + readonly attribute SecurityPolicyViolationEventDisposition disposition; + readonly attribute unsigned short statusCode; + readonly attribute unsigned long? lineNumber; + readonly attribute unsigned long? columnNumber; +}; + +enum SecurityPolicyViolationEventDisposition { + "enforce", "report" +}; + +[Exposed=(Window,Worker)] +interface SecurityPolicyViolationEvent : Event { + constructor(DOMString type, optional SecurityPolicyViolationEventInit eventInitDict = {}); + readonly attribute USVString documentURI; + readonly attribute USVString referrer; + readonly attribute USVString blockedURI; + readonly attribute DOMString effectiveDirective; + readonly attribute DOMString violatedDirective; // historical alias of effectiveDirective + readonly attribute DOMString originalPolicy; + readonly attribute USVString sourceFile; + readonly attribute DOMString sample; + readonly attribute SecurityPolicyViolationEventDisposition disposition; + readonly attribute unsigned short statusCode; + readonly attribute unsigned long lineNumber; + readonly attribute unsigned long columnNumber; +}; + +dictionary SecurityPolicyViolationEventInit : EventInit { + required USVString documentURI; + USVString referrer = ""; + USVString blockedURI = ""; + required DOMString violatedDirective; + required DOMString effectiveDirective; + required DOMString originalPolicy; + USVString sourceFile = ""; + DOMString sample = ""; + required SecurityPolicyViolationEventDisposition disposition; + required unsigned short statusCode; + unsigned long lineNumber = 0; + unsigned long columnNumber = 0; +}; diff --git a/test/wpt/tests/interfaces/DOM-Parsing.idl b/test/wpt/tests/interfaces/DOM-Parsing.idl new file mode 100644 index 0000000..d0d84ab --- /dev/null +++ b/test/wpt/tests/interfaces/DOM-Parsing.idl @@ -0,0 +1,26 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: DOM Parsing and Serialization (https://w3c.github.io/DOM-Parsing/) + +[Exposed=Window] +interface XMLSerializer { + constructor(); + DOMString serializeToString(Node root); +}; + +interface mixin InnerHTML { + [CEReactions] attribute [LegacyNullToEmptyString] DOMString innerHTML; +}; + +Element includes InnerHTML; +ShadowRoot includes InnerHTML; + +partial interface Element { + [CEReactions] attribute [LegacyNullToEmptyString] DOMString outerHTML; + [CEReactions] undefined insertAdjacentHTML(DOMString position, DOMString text); +}; + +partial interface Range { + [CEReactions, NewObject] DocumentFragment createContextualFragment(DOMString fragment); +}; diff --git a/test/wpt/tests/interfaces/EXT_blend_minmax.idl b/test/wpt/tests/interfaces/EXT_blend_minmax.idl new file mode 100644 index 0000000..fd7d26e --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_blend_minmax.idl @@ -0,0 +1,10 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_blend_minmax Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_blend_minmax/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_blend_minmax { + const GLenum MIN_EXT = 0x8007; + const GLenum MAX_EXT = 0x8008; +}; diff --git a/test/wpt/tests/interfaces/EXT_color_buffer_float.idl b/test/wpt/tests/interfaces/EXT_color_buffer_float.idl new file mode 100644 index 0000000..09bd397 --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_color_buffer_float.idl @@ -0,0 +1,8 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_color_buffer_float Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_color_buffer_float/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_color_buffer_float { +}; // interface EXT_color_buffer_float diff --git a/test/wpt/tests/interfaces/EXT_color_buffer_half_float.idl b/test/wpt/tests/interfaces/EXT_color_buffer_half_float.idl new file mode 100644 index 0000000..7197e44 --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_color_buffer_half_float.idl @@ -0,0 +1,12 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_color_buffer_half_float Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_color_buffer_half_float/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_color_buffer_half_float { + const GLenum RGBA16F_EXT = 0x881A; + const GLenum RGB16F_EXT = 0x881B; + const GLenum FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE_EXT = 0x8211; + const GLenum UNSIGNED_NORMALIZED_EXT = 0x8C17; +}; // interface EXT_color_buffer_half_float diff --git a/test/wpt/tests/interfaces/EXT_disjoint_timer_query.idl b/test/wpt/tests/interfaces/EXT_disjoint_timer_query.idl new file mode 100644 index 0000000..cf0c8d9 --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_disjoint_timer_query.idl @@ -0,0 +1,30 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_disjoint_timer_query Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_disjoint_timer_query/) + +typedef unsigned long long GLuint64EXT; + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WebGLTimerQueryEXT : WebGLObject { +}; + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_disjoint_timer_query { + const GLenum QUERY_COUNTER_BITS_EXT = 0x8864; + const GLenum CURRENT_QUERY_EXT = 0x8865; + const GLenum QUERY_RESULT_EXT = 0x8866; + const GLenum QUERY_RESULT_AVAILABLE_EXT = 0x8867; + const GLenum TIME_ELAPSED_EXT = 0x88BF; + const GLenum TIMESTAMP_EXT = 0x8E28; + const GLenum GPU_DISJOINT_EXT = 0x8FBB; + + WebGLTimerQueryEXT? createQueryEXT(); + undefined deleteQueryEXT(WebGLTimerQueryEXT? query); + [WebGLHandlesContextLoss] boolean isQueryEXT(WebGLTimerQueryEXT? query); + undefined beginQueryEXT(GLenum target, WebGLTimerQueryEXT query); + undefined endQueryEXT(GLenum target); + undefined queryCounterEXT(WebGLTimerQueryEXT query, GLenum target); + any getQueryEXT(GLenum target, GLenum pname); + any getQueryObjectEXT(WebGLTimerQueryEXT query, GLenum pname); +}; diff --git a/test/wpt/tests/interfaces/EXT_disjoint_timer_query_webgl2.idl b/test/wpt/tests/interfaces/EXT_disjoint_timer_query_webgl2.idl new file mode 100644 index 0000000..689203c --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_disjoint_timer_query_webgl2.idl @@ -0,0 +1,14 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_disjoint_timer_query_webgl2 Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_disjoint_timer_query_webgl2/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_disjoint_timer_query_webgl2 { + const GLenum QUERY_COUNTER_BITS_EXT = 0x8864; + const GLenum TIME_ELAPSED_EXT = 0x88BF; + const GLenum TIMESTAMP_EXT = 0x8E28; + const GLenum GPU_DISJOINT_EXT = 0x8FBB; + + undefined queryCounterEXT(WebGLQuery query, GLenum target); +}; diff --git a/test/wpt/tests/interfaces/EXT_float_blend.idl b/test/wpt/tests/interfaces/EXT_float_blend.idl new file mode 100644 index 0000000..58ec47e --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_float_blend.idl @@ -0,0 +1,8 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_float_blend Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_float_blend/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_float_blend { +}; // interface EXT_float_blend diff --git a/test/wpt/tests/interfaces/EXT_frag_depth.idl b/test/wpt/tests/interfaces/EXT_frag_depth.idl new file mode 100644 index 0000000..1ae6896 --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_frag_depth.idl @@ -0,0 +1,8 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_frag_depth Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_frag_depth/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_frag_depth { +}; diff --git a/test/wpt/tests/interfaces/EXT_sRGB.idl b/test/wpt/tests/interfaces/EXT_sRGB.idl new file mode 100644 index 0000000..3c03c33 --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_sRGB.idl @@ -0,0 +1,12 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_sRGB Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_sRGB/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_sRGB { + const GLenum SRGB_EXT = 0x8C40; + const GLenum SRGB_ALPHA_EXT = 0x8C42; + const GLenum SRGB8_ALPHA8_EXT = 0x8C43; + const GLenum FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING_EXT = 0x8210; +}; diff --git a/test/wpt/tests/interfaces/EXT_shader_texture_lod.idl b/test/wpt/tests/interfaces/EXT_shader_texture_lod.idl new file mode 100644 index 0000000..13df26c --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_shader_texture_lod.idl @@ -0,0 +1,8 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_shader_texture_lod Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_shader_texture_lod/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_shader_texture_lod { +}; diff --git a/test/wpt/tests/interfaces/EXT_texture_compression_bptc.idl b/test/wpt/tests/interfaces/EXT_texture_compression_bptc.idl new file mode 100644 index 0000000..2772980 --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_texture_compression_bptc.idl @@ -0,0 +1,12 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_texture_compression_bptc Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_texture_compression_bptc/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_texture_compression_bptc { + const GLenum COMPRESSED_RGBA_BPTC_UNORM_EXT = 0x8E8C; + const GLenum COMPRESSED_SRGB_ALPHA_BPTC_UNORM_EXT = 0x8E8D; + const GLenum COMPRESSED_RGB_BPTC_SIGNED_FLOAT_EXT = 0x8E8E; + const GLenum COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_EXT = 0x8E8F; +}; diff --git a/test/wpt/tests/interfaces/EXT_texture_compression_rgtc.idl b/test/wpt/tests/interfaces/EXT_texture_compression_rgtc.idl new file mode 100644 index 0000000..f12b962 --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_texture_compression_rgtc.idl @@ -0,0 +1,12 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_texture_compression_rgtc Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_texture_compression_rgtc/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_texture_compression_rgtc { + const GLenum COMPRESSED_RED_RGTC1_EXT = 0x8DBB; + const GLenum COMPRESSED_SIGNED_RED_RGTC1_EXT = 0x8DBC; + const GLenum COMPRESSED_RED_GREEN_RGTC2_EXT = 0x8DBD; + const GLenum COMPRESSED_SIGNED_RED_GREEN_RGTC2_EXT = 0x8DBE; +}; diff --git a/test/wpt/tests/interfaces/EXT_texture_filter_anisotropic.idl b/test/wpt/tests/interfaces/EXT_texture_filter_anisotropic.idl new file mode 100644 index 0000000..5c78bfa --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_texture_filter_anisotropic.idl @@ -0,0 +1,10 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_texture_filter_anisotropic Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_texture_filter_anisotropic/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_texture_filter_anisotropic { + const GLenum TEXTURE_MAX_ANISOTROPY_EXT = 0x84FE; + const GLenum MAX_TEXTURE_MAX_ANISOTROPY_EXT = 0x84FF; +}; diff --git a/test/wpt/tests/interfaces/EXT_texture_norm16.idl b/test/wpt/tests/interfaces/EXT_texture_norm16.idl new file mode 100644 index 0000000..1fe5ed8 --- /dev/null +++ b/test/wpt/tests/interfaces/EXT_texture_norm16.idl @@ -0,0 +1,16 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL EXT_texture_norm16 Extension Specification (https://registry.khronos.org/webgl/extensions/EXT_texture_norm16/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface EXT_texture_norm16 { + const GLenum R16_EXT = 0x822A; + const GLenum RG16_EXT = 0x822C; + const GLenum RGB16_EXT = 0x8054; + const GLenum RGBA16_EXT = 0x805B; + const GLenum R16_SNORM_EXT = 0x8F98; + const GLenum RG16_SNORM_EXT = 0x8F99; + const GLenum RGB16_SNORM_EXT = 0x8F9A; + const GLenum RGBA16_SNORM_EXT = 0x8F9B; +}; diff --git a/test/wpt/tests/interfaces/FedCM.idl b/test/wpt/tests/interfaces/FedCM.idl new file mode 100644 index 0000000..8de87e8 --- /dev/null +++ b/test/wpt/tests/interfaces/FedCM.idl @@ -0,0 +1,67 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Federated Credential Management API (https://fedidcg.github.io/FedCM/) + +[Exposed=Window, SecureContext] +interface IdentityCredential : Credential { + readonly attribute USVString? token; +}; + +partial dictionary CredentialRequestOptions { + IdentityCredentialRequestOptions identity; +}; + +dictionary IdentityCredentialRequestOptions { + sequence providers; +}; + +dictionary IdentityProviderConfig { + required USVString configURL; + required USVString clientId; + USVString nonce; +}; + +dictionary IdentityProviderWellKnown { + required sequence provider_urls; +}; + +dictionary IdentityProviderIcon { + required USVString url; + unsigned long size; +}; + +dictionary IdentityProviderBranding { + USVString background_color; + USVString color; + sequence icons; + USVString name; +}; + +dictionary IdentityProviderAPIConfig { + required USVString accounts_endpoint; + required USVString client_metadata_endpoint; + required USVString id_assertion_endpoint; + IdentityProviderBranding branding; +}; + +dictionary IdentityProviderAccount { + required USVString id; + required USVString name; + required USVString email; + USVString given_name; + USVString picture; + sequence approved_clients; +}; +dictionary IdentityProviderAccountList { + sequence accounts; +}; + +dictionary IdentityProviderToken { + required USVString token; +}; + +dictionary IdentityProviderClientMetadata { + USVString privacy_policy_url; + USVString terms_of_service_url; +}; diff --git a/test/wpt/tests/interfaces/FileAPI.idl b/test/wpt/tests/interfaces/FileAPI.idl new file mode 100644 index 0000000..aee0e65 --- /dev/null +++ b/test/wpt/tests/interfaces/FileAPI.idl @@ -0,0 +1,100 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: File API (https://w3c.github.io/FileAPI/) + +[Exposed=(Window,Worker), Serializable] +interface Blob { + constructor(optional sequence blobParts, + optional BlobPropertyBag options = {}); + + readonly attribute unsigned long long size; + readonly attribute DOMString type; + + // slice Blob into byte-ranged chunks + Blob slice(optional [Clamp] long long start, + optional [Clamp] long long end, + optional DOMString contentType); + + // read from the Blob. + [NewObject] ReadableStream stream(); + [NewObject] Promise text(); + [NewObject] Promise arrayBuffer(); +}; + +enum EndingType { "transparent", "native" }; + +dictionary BlobPropertyBag { + DOMString type = ""; + EndingType endings = "transparent"; +}; + +typedef (BufferSource or Blob or USVString) BlobPart; + +[Exposed=(Window,Worker), Serializable] +interface File : Blob { + constructor(sequence fileBits, + USVString fileName, + optional FilePropertyBag options = {}); + readonly attribute DOMString name; + readonly attribute long long lastModified; +}; + +dictionary FilePropertyBag : BlobPropertyBag { + long long lastModified; +}; + +[Exposed=(Window,Worker), Serializable] +interface FileList { + getter File? item(unsigned long index); + readonly attribute unsigned long length; +}; + +[Exposed=(Window,Worker)] +interface FileReader: EventTarget { + constructor(); + // async read methods + undefined readAsArrayBuffer(Blob blob); + undefined readAsBinaryString(Blob blob); + undefined readAsText(Blob blob, optional DOMString encoding); + undefined readAsDataURL(Blob blob); + + undefined abort(); + + // states + const unsigned short EMPTY = 0; + const unsigned short LOADING = 1; + const unsigned short DONE = 2; + + readonly attribute unsigned short readyState; + + // File or Blob data + readonly attribute (DOMString or ArrayBuffer)? result; + + readonly attribute DOMException? error; + + // event handler content attributes + attribute EventHandler onloadstart; + attribute EventHandler onprogress; + attribute EventHandler onload; + attribute EventHandler onabort; + attribute EventHandler onerror; + attribute EventHandler onloadend; +}; + +[Exposed=(DedicatedWorker,SharedWorker)] +interface FileReaderSync { + constructor(); + // Synchronously return strings + + ArrayBuffer readAsArrayBuffer(Blob blob); + DOMString readAsBinaryString(Blob blob); + DOMString readAsText(Blob blob, optional DOMString encoding); + DOMString readAsDataURL(Blob blob); +}; + +[Exposed=(Window,DedicatedWorker,SharedWorker)] +partial interface URL { + static DOMString createObjectURL((Blob or MediaSource) obj); + static undefined revokeObjectURL(DOMString url); +}; diff --git a/test/wpt/tests/interfaces/IndexedDB.idl b/test/wpt/tests/interfaces/IndexedDB.idl new file mode 100644 index 0000000..d82391d --- /dev/null +++ b/test/wpt/tests/interfaces/IndexedDB.idl @@ -0,0 +1,226 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Indexed Database API 3.0 (https://w3c.github.io/IndexedDB/) + +[Exposed=(Window,Worker)] +interface IDBRequest : EventTarget { + readonly attribute any result; + readonly attribute DOMException? error; + readonly attribute (IDBObjectStore or IDBIndex or IDBCursor)? source; + readonly attribute IDBTransaction? transaction; + readonly attribute IDBRequestReadyState readyState; + + // Event handlers: + attribute EventHandler onsuccess; + attribute EventHandler onerror; +}; + +enum IDBRequestReadyState { + "pending", + "done" +}; + +[Exposed=(Window,Worker)] +interface IDBOpenDBRequest : IDBRequest { + // Event handlers: + attribute EventHandler onblocked; + attribute EventHandler onupgradeneeded; +}; + +[Exposed=(Window,Worker)] +interface IDBVersionChangeEvent : Event { + constructor(DOMString type, optional IDBVersionChangeEventInit eventInitDict = {}); + readonly attribute unsigned long long oldVersion; + readonly attribute unsigned long long? newVersion; +}; + +dictionary IDBVersionChangeEventInit : EventInit { + unsigned long long oldVersion = 0; + unsigned long long? newVersion = null; +}; + +partial interface mixin WindowOrWorkerGlobalScope { + [SameObject] readonly attribute IDBFactory indexedDB; +}; + +[Exposed=(Window,Worker)] +interface IDBFactory { + [NewObject] IDBOpenDBRequest open(DOMString name, + optional [EnforceRange] unsigned long long version); + [NewObject] IDBOpenDBRequest deleteDatabase(DOMString name); + + Promise> databases(); + + short cmp(any first, any second); +}; + +dictionary IDBDatabaseInfo { + DOMString name; + unsigned long long version; +}; + +[Exposed=(Window,Worker)] +interface IDBDatabase : EventTarget { + readonly attribute DOMString name; + readonly attribute unsigned long long version; + readonly attribute DOMStringList objectStoreNames; + + [NewObject] IDBTransaction transaction((DOMString or sequence) storeNames, + optional IDBTransactionMode mode = "readonly", + optional IDBTransactionOptions options = {}); + undefined close(); + + [NewObject] IDBObjectStore createObjectStore( + DOMString name, + optional IDBObjectStoreParameters options = {}); + undefined deleteObjectStore(DOMString name); + + // Event handlers: + attribute EventHandler onabort; + attribute EventHandler onclose; + attribute EventHandler onerror; + attribute EventHandler onversionchange; +}; + +enum IDBTransactionDurability { "default", "strict", "relaxed" }; + +dictionary IDBTransactionOptions { + IDBTransactionDurability durability = "default"; +}; + +dictionary IDBObjectStoreParameters { + (DOMString or sequence)? keyPath = null; + boolean autoIncrement = false; +}; + +[Exposed=(Window,Worker)] +interface IDBObjectStore { + attribute DOMString name; + readonly attribute any keyPath; + readonly attribute DOMStringList indexNames; + [SameObject] readonly attribute IDBTransaction transaction; + readonly attribute boolean autoIncrement; + + [NewObject] IDBRequest put(any value, optional any key); + [NewObject] IDBRequest add(any value, optional any key); + [NewObject] IDBRequest delete(any query); + [NewObject] IDBRequest clear(); + [NewObject] IDBRequest get(any query); + [NewObject] IDBRequest getKey(any query); + [NewObject] IDBRequest getAll(optional any query, + optional [EnforceRange] unsigned long count); + [NewObject] IDBRequest getAllKeys(optional any query, + optional [EnforceRange] unsigned long count); + [NewObject] IDBRequest count(optional any query); + + [NewObject] IDBRequest openCursor(optional any query, + optional IDBCursorDirection direction = "next"); + [NewObject] IDBRequest openKeyCursor(optional any query, + optional IDBCursorDirection direction = "next"); + + IDBIndex index(DOMString name); + + [NewObject] IDBIndex createIndex(DOMString name, + (DOMString or sequence) keyPath, + optional IDBIndexParameters options = {}); + undefined deleteIndex(DOMString name); +}; + +dictionary IDBIndexParameters { + boolean unique = false; + boolean multiEntry = false; +}; + +[Exposed=(Window,Worker)] +interface IDBIndex { + attribute DOMString name; + [SameObject] readonly attribute IDBObjectStore objectStore; + readonly attribute any keyPath; + readonly attribute boolean multiEntry; + readonly attribute boolean unique; + + [NewObject] IDBRequest get(any query); + [NewObject] IDBRequest getKey(any query); + [NewObject] IDBRequest getAll(optional any query, + optional [EnforceRange] unsigned long count); + [NewObject] IDBRequest getAllKeys(optional any query, + optional [EnforceRange] unsigned long count); + [NewObject] IDBRequest count(optional any query); + + [NewObject] IDBRequest openCursor(optional any query, + optional IDBCursorDirection direction = "next"); + [NewObject] IDBRequest openKeyCursor(optional any query, + optional IDBCursorDirection direction = "next"); +}; + +[Exposed=(Window,Worker)] +interface IDBKeyRange { + readonly attribute any lower; + readonly attribute any upper; + readonly attribute boolean lowerOpen; + readonly attribute boolean upperOpen; + + // Static construction methods: + [NewObject] static IDBKeyRange only(any value); + [NewObject] static IDBKeyRange lowerBound(any lower, optional boolean open = false); + [NewObject] static IDBKeyRange upperBound(any upper, optional boolean open = false); + [NewObject] static IDBKeyRange bound(any lower, + any upper, + optional boolean lowerOpen = false, + optional boolean upperOpen = false); + + boolean includes(any key); +}; + +[Exposed=(Window,Worker)] +interface IDBCursor { + readonly attribute (IDBObjectStore or IDBIndex) source; + readonly attribute IDBCursorDirection direction; + readonly attribute any key; + readonly attribute any primaryKey; + [SameObject] readonly attribute IDBRequest request; + + undefined advance([EnforceRange] unsigned long count); + undefined continue(optional any key); + undefined continuePrimaryKey(any key, any primaryKey); + + [NewObject] IDBRequest update(any value); + [NewObject] IDBRequest delete(); +}; + +enum IDBCursorDirection { + "next", + "nextunique", + "prev", + "prevunique" +}; + +[Exposed=(Window,Worker)] +interface IDBCursorWithValue : IDBCursor { + readonly attribute any value; +}; + +[Exposed=(Window,Worker)] +interface IDBTransaction : EventTarget { + readonly attribute DOMStringList objectStoreNames; + readonly attribute IDBTransactionMode mode; + readonly attribute IDBTransactionDurability durability; + [SameObject] readonly attribute IDBDatabase db; + readonly attribute DOMException? error; + + IDBObjectStore objectStore(DOMString name); + undefined commit(); + undefined abort(); + + // Event handlers: + attribute EventHandler onabort; + attribute EventHandler oncomplete; + attribute EventHandler onerror; +}; + +enum IDBTransactionMode { + "readonly", + "readwrite", + "versionchange" +}; diff --git a/test/wpt/tests/interfaces/KHR_parallel_shader_compile.idl b/test/wpt/tests/interfaces/KHR_parallel_shader_compile.idl new file mode 100644 index 0000000..1470965 --- /dev/null +++ b/test/wpt/tests/interfaces/KHR_parallel_shader_compile.idl @@ -0,0 +1,9 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL KHR_parallel_shader_compile Extension Specification (https://registry.khronos.org/webgl/extensions/KHR_parallel_shader_compile/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface KHR_parallel_shader_compile { + const GLenum COMPLETION_STATUS_KHR = 0x91B1; +}; diff --git a/test/wpt/tests/interfaces/META.yml b/test/wpt/tests/interfaces/META.yml new file mode 100644 index 0000000..c1dd8dd --- /dev/null +++ b/test/wpt/tests/interfaces/META.yml @@ -0,0 +1,2 @@ +suggested_reviewers: + - foolip diff --git a/test/wpt/tests/interfaces/OES_draw_buffers_indexed.idl b/test/wpt/tests/interfaces/OES_draw_buffers_indexed.idl new file mode 100644 index 0000000..ea1e217 --- /dev/null +++ b/test/wpt/tests/interfaces/OES_draw_buffers_indexed.idl @@ -0,0 +1,26 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL OES_draw_buffers_indexed Extension Specification (https://registry.khronos.org/webgl/extensions/OES_draw_buffers_indexed/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface OES_draw_buffers_indexed { + undefined enableiOES(GLenum target, GLuint index); + + undefined disableiOES(GLenum target, GLuint index); + + undefined blendEquationiOES(GLuint buf, GLenum mode); + + undefined blendEquationSeparateiOES(GLuint buf, + GLenum modeRGB, GLenum modeAlpha); + + undefined blendFunciOES(GLuint buf, + GLenum src, GLenum dst); + + undefined blendFuncSeparateiOES(GLuint buf, + GLenum srcRGB, GLenum dstRGB, + GLenum srcAlpha, GLenum dstAlpha); + + undefined colorMaskiOES(GLuint buf, + GLboolean r, GLboolean g, GLboolean b, GLboolean a); +}; diff --git a/test/wpt/tests/interfaces/OES_element_index_uint.idl b/test/wpt/tests/interfaces/OES_element_index_uint.idl new file mode 100644 index 0000000..df43a57 --- /dev/null +++ b/test/wpt/tests/interfaces/OES_element_index_uint.idl @@ -0,0 +1,8 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL OES_element_index_uint Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/OES_element_index_uint/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface OES_element_index_uint { +}; diff --git a/test/wpt/tests/interfaces/OES_fbo_render_mipmap.idl b/test/wpt/tests/interfaces/OES_fbo_render_mipmap.idl new file mode 100644 index 0000000..608c392 --- /dev/null +++ b/test/wpt/tests/interfaces/OES_fbo_render_mipmap.idl @@ -0,0 +1,8 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL OES_fbo_render_mipmap Extension Specification (https://registry.khronos.org/webgl/extensions/OES_fbo_render_mipmap/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface OES_fbo_render_mipmap { +}; diff --git a/test/wpt/tests/interfaces/OES_standard_derivatives.idl b/test/wpt/tests/interfaces/OES_standard_derivatives.idl new file mode 100644 index 0000000..7bf073a --- /dev/null +++ b/test/wpt/tests/interfaces/OES_standard_derivatives.idl @@ -0,0 +1,9 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL OES_standard_derivatives Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/OES_standard_derivatives/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface OES_standard_derivatives { + const GLenum FRAGMENT_SHADER_DERIVATIVE_HINT_OES = 0x8B8B; +}; diff --git a/test/wpt/tests/interfaces/OES_texture_float.idl b/test/wpt/tests/interfaces/OES_texture_float.idl new file mode 100644 index 0000000..a1bb79c --- /dev/null +++ b/test/wpt/tests/interfaces/OES_texture_float.idl @@ -0,0 +1,7 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL OES_texture_float Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/OES_texture_float/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface OES_texture_float { }; diff --git a/test/wpt/tests/interfaces/OES_texture_float_linear.idl b/test/wpt/tests/interfaces/OES_texture_float_linear.idl new file mode 100644 index 0000000..4626297 --- /dev/null +++ b/test/wpt/tests/interfaces/OES_texture_float_linear.idl @@ -0,0 +1,7 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL OES_texture_float_linear Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/OES_texture_float_linear/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface OES_texture_float_linear { }; diff --git a/test/wpt/tests/interfaces/OES_texture_half_float.idl b/test/wpt/tests/interfaces/OES_texture_half_float.idl new file mode 100644 index 0000000..be41454 --- /dev/null +++ b/test/wpt/tests/interfaces/OES_texture_half_float.idl @@ -0,0 +1,9 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL OES_texture_half_float Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/OES_texture_half_float/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface OES_texture_half_float { + const GLenum HALF_FLOAT_OES = 0x8D61; +}; diff --git a/test/wpt/tests/interfaces/OES_texture_half_float_linear.idl b/test/wpt/tests/interfaces/OES_texture_half_float_linear.idl new file mode 100644 index 0000000..2f1a999 --- /dev/null +++ b/test/wpt/tests/interfaces/OES_texture_half_float_linear.idl @@ -0,0 +1,7 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL OES_texture_half_float_linear Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/OES_texture_half_float_linear/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface OES_texture_half_float_linear { }; diff --git a/test/wpt/tests/interfaces/OES_vertex_array_object.idl b/test/wpt/tests/interfaces/OES_vertex_array_object.idl new file mode 100644 index 0000000..8aeb745 --- /dev/null +++ b/test/wpt/tests/interfaces/OES_vertex_array_object.idl @@ -0,0 +1,18 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL OES_vertex_array_object Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/OES_vertex_array_object/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WebGLVertexArrayObjectOES : WebGLObject { +}; + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface OES_vertex_array_object { + const GLenum VERTEX_ARRAY_BINDING_OES = 0x85B5; + + WebGLVertexArrayObjectOES? createVertexArrayOES(); + undefined deleteVertexArrayOES(WebGLVertexArrayObjectOES? arrayObject); + [WebGLHandlesContextLoss] GLboolean isVertexArrayOES(WebGLVertexArrayObjectOES? arrayObject); + undefined bindVertexArrayOES(WebGLVertexArrayObjectOES? arrayObject); +}; diff --git a/test/wpt/tests/interfaces/OVR_multiview2.idl b/test/wpt/tests/interfaces/OVR_multiview2.idl new file mode 100644 index 0000000..9c1ecc4 --- /dev/null +++ b/test/wpt/tests/interfaces/OVR_multiview2.idl @@ -0,0 +1,14 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL OVR_multiview2 Extension Specification (https://registry.khronos.org/webgl/extensions/OVR_multiview2/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface OVR_multiview2 { + const GLenum FRAMEBUFFER_ATTACHMENT_TEXTURE_NUM_VIEWS_OVR = 0x9630; + const GLenum FRAMEBUFFER_ATTACHMENT_TEXTURE_BASE_VIEW_INDEX_OVR = 0x9632; + const GLenum MAX_VIEWS_OVR = 0x9631; + const GLenum FRAMEBUFFER_INCOMPLETE_VIEW_TARGETS_OVR = 0x9633; + + undefined framebufferTextureMultiviewOVR(GLenum target, GLenum attachment, WebGLTexture? texture, GLint level, GLint baseViewIndex, GLsizei numViews); +}; diff --git a/test/wpt/tests/interfaces/README.md b/test/wpt/tests/interfaces/README.md new file mode 100644 index 0000000..5e948ad --- /dev/null +++ b/test/wpt/tests/interfaces/README.md @@ -0,0 +1,3 @@ +This directory contains [Web IDL](https://webidl.spec.whatwg.org/) interface definitions for use in idlharness.js tests. + +The `.idl` files (except `*.tentative.idl`) are copied from [@webref/idl](https://www.npmjs.com/package/@webref/idl) by a [workflow](https://github.com/web-platform-tests/wpt/blob/master/.github/workflows/interfaces.yml) that tries to sync the files daily. The resulting pull requests require manual review but can be approved/merged by anyone with write access. diff --git a/test/wpt/tests/interfaces/SVG.idl b/test/wpt/tests/interfaces/SVG.idl new file mode 100644 index 0000000..3a0b861 --- /dev/null +++ b/test/wpt/tests/interfaces/SVG.idl @@ -0,0 +1,693 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Scalable Vector Graphics (SVG) 2 (https://svgwg.org/svg2-draft/) + +[Exposed=Window] +interface SVGElement : Element { + + [SameObject] readonly attribute SVGAnimatedString className; + + readonly attribute SVGSVGElement? ownerSVGElement; + readonly attribute SVGElement? viewportElement; +}; + +SVGElement includes GlobalEventHandlers; +SVGElement includes SVGElementInstance; +SVGElement includes HTMLOrSVGElement; + +dictionary SVGBoundingBoxOptions { + boolean fill = true; + boolean stroke = false; + boolean markers = false; + boolean clipped = false; +}; + +[Exposed=Window] +interface SVGGraphicsElement : SVGElement { + [SameObject] readonly attribute SVGAnimatedTransformList transform; + + DOMRect getBBox(optional SVGBoundingBoxOptions options = {}); + DOMMatrix? getCTM(); + DOMMatrix? getScreenCTM(); +}; + +SVGGraphicsElement includes SVGTests; + +[Exposed=Window] +interface SVGGeometryElement : SVGGraphicsElement { + [SameObject] readonly attribute SVGAnimatedNumber pathLength; + + boolean isPointInFill(optional DOMPointInit point = {}); + boolean isPointInStroke(optional DOMPointInit point = {}); + float getTotalLength(); + DOMPoint getPointAtLength(float distance); +}; + +[Exposed=Window] +interface SVGNumber { + attribute float value; +}; + +[Exposed=Window] +interface SVGLength { + + // Length Unit Types + const unsigned short SVG_LENGTHTYPE_UNKNOWN = 0; + const unsigned short SVG_LENGTHTYPE_NUMBER = 1; + const unsigned short SVG_LENGTHTYPE_PERCENTAGE = 2; + const unsigned short SVG_LENGTHTYPE_EMS = 3; + const unsigned short SVG_LENGTHTYPE_EXS = 4; + const unsigned short SVG_LENGTHTYPE_PX = 5; + const unsigned short SVG_LENGTHTYPE_CM = 6; + const unsigned short SVG_LENGTHTYPE_MM = 7; + const unsigned short SVG_LENGTHTYPE_IN = 8; + const unsigned short SVG_LENGTHTYPE_PT = 9; + const unsigned short SVG_LENGTHTYPE_PC = 10; + + readonly attribute unsigned short unitType; + attribute float value; + attribute float valueInSpecifiedUnits; + attribute DOMString valueAsString; + + undefined newValueSpecifiedUnits(unsigned short unitType, float valueInSpecifiedUnits); + undefined convertToSpecifiedUnits(unsigned short unitType); +}; + +[Exposed=Window] +interface SVGAngle { + + // Angle Unit Types + const unsigned short SVG_ANGLETYPE_UNKNOWN = 0; + const unsigned short SVG_ANGLETYPE_UNSPECIFIED = 1; + const unsigned short SVG_ANGLETYPE_DEG = 2; + const unsigned short SVG_ANGLETYPE_RAD = 3; + const unsigned short SVG_ANGLETYPE_GRAD = 4; + + readonly attribute unsigned short unitType; + attribute float value; + attribute float valueInSpecifiedUnits; + attribute DOMString valueAsString; + + undefined newValueSpecifiedUnits(unsigned short unitType, float valueInSpecifiedUnits); + undefined convertToSpecifiedUnits(unsigned short unitType); +}; + +[Exposed=Window] +interface SVGNumberList { + + readonly attribute unsigned long length; + readonly attribute unsigned long numberOfItems; + + undefined clear(); + SVGNumber initialize(SVGNumber newItem); + getter SVGNumber getItem(unsigned long index); + SVGNumber insertItemBefore(SVGNumber newItem, unsigned long index); + SVGNumber replaceItem(SVGNumber newItem, unsigned long index); + SVGNumber removeItem(unsigned long index); + SVGNumber appendItem(SVGNumber newItem); + setter undefined (unsigned long index, SVGNumber newItem); +}; + +[Exposed=Window] +interface SVGLengthList { + + readonly attribute unsigned long length; + readonly attribute unsigned long numberOfItems; + + undefined clear(); + SVGLength initialize(SVGLength newItem); + getter SVGLength getItem(unsigned long index); + SVGLength insertItemBefore(SVGLength newItem, unsigned long index); + SVGLength replaceItem(SVGLength newItem, unsigned long index); + SVGLength removeItem(unsigned long index); + SVGLength appendItem(SVGLength newItem); + setter undefined (unsigned long index, SVGLength newItem); +}; + +[Exposed=Window] +interface SVGStringList { + + readonly attribute unsigned long length; + readonly attribute unsigned long numberOfItems; + + undefined clear(); + DOMString initialize(DOMString newItem); + getter DOMString getItem(unsigned long index); + DOMString insertItemBefore(DOMString newItem, unsigned long index); + DOMString replaceItem(DOMString newItem, unsigned long index); + DOMString removeItem(unsigned long index); + DOMString appendItem(DOMString newItem); + setter undefined (unsigned long index, DOMString newItem); +}; + +[Exposed=Window] +interface SVGAnimatedBoolean { + attribute boolean baseVal; + readonly attribute boolean animVal; +}; + +[Exposed=Window] +interface SVGAnimatedEnumeration { + attribute unsigned short baseVal; + readonly attribute unsigned short animVal; +}; + +[Exposed=Window] +interface SVGAnimatedInteger { + attribute long baseVal; + readonly attribute long animVal; +}; + +[Exposed=Window] +interface SVGAnimatedNumber { + attribute float baseVal; + readonly attribute float animVal; +}; + +[Exposed=Window] +interface SVGAnimatedLength { + [SameObject] readonly attribute SVGLength baseVal; + [SameObject] readonly attribute SVGLength animVal; +}; + +[Exposed=Window] +interface SVGAnimatedAngle { + [SameObject] readonly attribute SVGAngle baseVal; + [SameObject] readonly attribute SVGAngle animVal; +}; + +[Exposed=Window] +interface SVGAnimatedString { + attribute DOMString baseVal; + readonly attribute DOMString animVal; +}; + +[Exposed=Window] +interface SVGAnimatedRect { + [SameObject] readonly attribute DOMRect baseVal; + [SameObject] readonly attribute DOMRectReadOnly animVal; +}; + +[Exposed=Window] +interface SVGAnimatedNumberList { + [SameObject] readonly attribute SVGNumberList baseVal; + [SameObject] readonly attribute SVGNumberList animVal; +}; + +[Exposed=Window] +interface SVGAnimatedLengthList { + [SameObject] readonly attribute SVGLengthList baseVal; + [SameObject] readonly attribute SVGLengthList animVal; +}; + +[Exposed=Window] +interface SVGUnitTypes { + // Unit Types + const unsigned short SVG_UNIT_TYPE_UNKNOWN = 0; + const unsigned short SVG_UNIT_TYPE_USERSPACEONUSE = 1; + const unsigned short SVG_UNIT_TYPE_OBJECTBOUNDINGBOX = 2; +}; + +interface mixin SVGTests { + [SameObject] readonly attribute SVGStringList requiredExtensions; + [SameObject] readonly attribute SVGStringList systemLanguage; +}; + +interface mixin SVGFitToViewBox { + [SameObject] readonly attribute SVGAnimatedRect viewBox; + [SameObject] readonly attribute SVGAnimatedPreserveAspectRatio preserveAspectRatio; +}; + +interface mixin SVGURIReference { + [SameObject] readonly attribute SVGAnimatedString href; +}; + +partial interface Document { + readonly attribute SVGSVGElement? rootElement; +}; + +[Exposed=Window] +interface SVGSVGElement : SVGGraphicsElement { + + [SameObject] readonly attribute SVGAnimatedLength x; + [SameObject] readonly attribute SVGAnimatedLength y; + [SameObject] readonly attribute SVGAnimatedLength width; + [SameObject] readonly attribute SVGAnimatedLength height; + + attribute float currentScale; + [SameObject] readonly attribute DOMPointReadOnly currentTranslate; + + NodeList getIntersectionList(DOMRectReadOnly rect, SVGElement? referenceElement); + NodeList getEnclosureList(DOMRectReadOnly rect, SVGElement? referenceElement); + boolean checkIntersection(SVGElement element, DOMRectReadOnly rect); + boolean checkEnclosure(SVGElement element, DOMRectReadOnly rect); + + undefined deselectAll(); + + SVGNumber createSVGNumber(); + SVGLength createSVGLength(); + SVGAngle createSVGAngle(); + DOMPoint createSVGPoint(); + DOMMatrix createSVGMatrix(); + DOMRect createSVGRect(); + SVGTransform createSVGTransform(); + SVGTransform createSVGTransformFromMatrix(optional DOMMatrix2DInit matrix = {}); + + Element getElementById(DOMString elementId); + + // Deprecated methods that have no effect when called, + // but which are kept for compatibility reasons. + unsigned long suspendRedraw(unsigned long maxWaitMilliseconds); + undefined unsuspendRedraw(unsigned long suspendHandleID); + undefined unsuspendRedrawAll(); + undefined forceRedraw(); +}; + +SVGSVGElement includes SVGFitToViewBox; +SVGSVGElement includes WindowEventHandlers; + +[Exposed=Window] +interface SVGGElement : SVGGraphicsElement { +}; + +[Exposed=Window] +interface SVGDefsElement : SVGGraphicsElement { +}; + +[Exposed=Window] +interface SVGDescElement : SVGElement { +}; + +[Exposed=Window] +interface SVGMetadataElement : SVGElement { +}; + +[Exposed=Window] +interface SVGTitleElement : SVGElement { +}; + +[Exposed=Window] +interface SVGSymbolElement : SVGGraphicsElement { +}; + +SVGSymbolElement includes SVGFitToViewBox; + +[Exposed=Window] +interface SVGUseElement : SVGGraphicsElement { + [SameObject] readonly attribute SVGAnimatedLength x; + [SameObject] readonly attribute SVGAnimatedLength y; + [SameObject] readonly attribute SVGAnimatedLength width; + [SameObject] readonly attribute SVGAnimatedLength height; + [SameObject] readonly attribute SVGElement? instanceRoot; + [SameObject] readonly attribute SVGElement? animatedInstanceRoot; +}; + +SVGUseElement includes SVGURIReference; + +[Exposed=Window] +interface SVGUseElementShadowRoot : ShadowRoot { +}; + +interface mixin SVGElementInstance { + [SameObject] readonly attribute SVGElement? correspondingElement; + [SameObject] readonly attribute SVGUseElement? correspondingUseElement; +}; + +[Exposed=Window] +interface ShadowAnimation : Animation { + constructor(Animation source, (Element or CSSPseudoElement) newTarget); + [SameObject] readonly attribute Animation sourceAnimation; +}; + +[Exposed=Window] +interface SVGSwitchElement : SVGGraphicsElement { +}; + +interface mixin GetSVGDocument { + Document getSVGDocument(); +}; + +[Exposed=Window] +interface SVGStyleElement : SVGElement { + attribute DOMString type; + attribute DOMString media; + attribute DOMString title; +}; + +SVGStyleElement includes LinkStyle; + +[Exposed=Window] +interface SVGTransform { + + // Transform Types + const unsigned short SVG_TRANSFORM_UNKNOWN = 0; + const unsigned short SVG_TRANSFORM_MATRIX = 1; + const unsigned short SVG_TRANSFORM_TRANSLATE = 2; + const unsigned short SVG_TRANSFORM_SCALE = 3; + const unsigned short SVG_TRANSFORM_ROTATE = 4; + const unsigned short SVG_TRANSFORM_SKEWX = 5; + const unsigned short SVG_TRANSFORM_SKEWY = 6; + + readonly attribute unsigned short type; + [SameObject] readonly attribute DOMMatrix matrix; + readonly attribute float angle; + + undefined setMatrix(optional DOMMatrix2DInit matrix = {}); + undefined setTranslate(float tx, float ty); + undefined setScale(float sx, float sy); + undefined setRotate(float angle, float cx, float cy); + undefined setSkewX(float angle); + undefined setSkewY(float angle); +}; + +[Exposed=Window] +interface SVGTransformList { + + readonly attribute unsigned long length; + readonly attribute unsigned long numberOfItems; + + undefined clear(); + SVGTransform initialize(SVGTransform newItem); + getter SVGTransform getItem(unsigned long index); + SVGTransform insertItemBefore(SVGTransform newItem, unsigned long index); + SVGTransform replaceItem(SVGTransform newItem, unsigned long index); + SVGTransform removeItem(unsigned long index); + SVGTransform appendItem(SVGTransform newItem); + setter undefined (unsigned long index, SVGTransform newItem); + + // Additional methods not common to other list interfaces. + SVGTransform createSVGTransformFromMatrix(optional DOMMatrix2DInit matrix = {}); + SVGTransform? consolidate(); +}; + +[Exposed=Window] +interface SVGAnimatedTransformList { + [SameObject] readonly attribute SVGTransformList baseVal; + [SameObject] readonly attribute SVGTransformList animVal; +}; + +[Exposed=Window] +interface SVGPreserveAspectRatio { + + // Alignment Types + const unsigned short SVG_PRESERVEASPECTRATIO_UNKNOWN = 0; + const unsigned short SVG_PRESERVEASPECTRATIO_NONE = 1; + const unsigned short SVG_PRESERVEASPECTRATIO_XMINYMIN = 2; + const unsigned short SVG_PRESERVEASPECTRATIO_XMIDYMIN = 3; + const unsigned short SVG_PRESERVEASPECTRATIO_XMAXYMIN = 4; + const unsigned short SVG_PRESERVEASPECTRATIO_XMINYMID = 5; + const unsigned short SVG_PRESERVEASPECTRATIO_XMIDYMID = 6; + const unsigned short SVG_PRESERVEASPECTRATIO_XMAXYMID = 7; + const unsigned short SVG_PRESERVEASPECTRATIO_XMINYMAX = 8; + const unsigned short SVG_PRESERVEASPECTRATIO_XMIDYMAX = 9; + const unsigned short SVG_PRESERVEASPECTRATIO_XMAXYMAX = 10; + + // Meet-or-slice Types + const unsigned short SVG_MEETORSLICE_UNKNOWN = 0; + const unsigned short SVG_MEETORSLICE_MEET = 1; + const unsigned short SVG_MEETORSLICE_SLICE = 2; + + attribute unsigned short align; + attribute unsigned short meetOrSlice; +}; + +[Exposed=Window] +interface SVGAnimatedPreserveAspectRatio { + [SameObject] readonly attribute SVGPreserveAspectRatio baseVal; + [SameObject] readonly attribute SVGPreserveAspectRatio animVal; +}; + +[Exposed=Window] +interface SVGPathElement : SVGGeometryElement { +}; + +[Exposed=Window] +interface SVGRectElement : SVGGeometryElement { + [SameObject] readonly attribute SVGAnimatedLength x; + [SameObject] readonly attribute SVGAnimatedLength y; + [SameObject] readonly attribute SVGAnimatedLength width; + [SameObject] readonly attribute SVGAnimatedLength height; + [SameObject] readonly attribute SVGAnimatedLength rx; + [SameObject] readonly attribute SVGAnimatedLength ry; +}; + +[Exposed=Window] +interface SVGCircleElement : SVGGeometryElement { + [SameObject] readonly attribute SVGAnimatedLength cx; + [SameObject] readonly attribute SVGAnimatedLength cy; + [SameObject] readonly attribute SVGAnimatedLength r; +}; + +[Exposed=Window] +interface SVGEllipseElement : SVGGeometryElement { + [SameObject] readonly attribute SVGAnimatedLength cx; + [SameObject] readonly attribute SVGAnimatedLength cy; + [SameObject] readonly attribute SVGAnimatedLength rx; + [SameObject] readonly attribute SVGAnimatedLength ry; +}; + +[Exposed=Window] +interface SVGLineElement : SVGGeometryElement { + [SameObject] readonly attribute SVGAnimatedLength x1; + [SameObject] readonly attribute SVGAnimatedLength y1; + [SameObject] readonly attribute SVGAnimatedLength x2; + [SameObject] readonly attribute SVGAnimatedLength y2; +}; + +interface mixin SVGAnimatedPoints { + [SameObject] readonly attribute SVGPointList points; + [SameObject] readonly attribute SVGPointList animatedPoints; +}; + +[Exposed=Window] +interface SVGPointList { + + readonly attribute unsigned long length; + readonly attribute unsigned long numberOfItems; + + undefined clear(); + DOMPoint initialize(DOMPoint newItem); + getter DOMPoint getItem(unsigned long index); + DOMPoint insertItemBefore(DOMPoint newItem, unsigned long index); + DOMPoint replaceItem(DOMPoint newItem, unsigned long index); + DOMPoint removeItem(unsigned long index); + DOMPoint appendItem(DOMPoint newItem); + setter undefined (unsigned long index, DOMPoint newItem); +}; + +[Exposed=Window] +interface SVGPolylineElement : SVGGeometryElement { +}; + +SVGPolylineElement includes SVGAnimatedPoints; + +[Exposed=Window] +interface SVGPolygonElement : SVGGeometryElement { +}; + +SVGPolygonElement includes SVGAnimatedPoints; + +[Exposed=Window] +interface SVGTextContentElement : SVGGraphicsElement { + + // lengthAdjust Types + const unsigned short LENGTHADJUST_UNKNOWN = 0; + const unsigned short LENGTHADJUST_SPACING = 1; + const unsigned short LENGTHADJUST_SPACINGANDGLYPHS = 2; + + [SameObject] readonly attribute SVGAnimatedLength textLength; + [SameObject] readonly attribute SVGAnimatedEnumeration lengthAdjust; + + long getNumberOfChars(); + float getComputedTextLength(); + float getSubStringLength(unsigned long charnum, unsigned long nchars); + DOMPoint getStartPositionOfChar(unsigned long charnum); + DOMPoint getEndPositionOfChar(unsigned long charnum); + DOMRect getExtentOfChar(unsigned long charnum); + float getRotationOfChar(unsigned long charnum); + long getCharNumAtPosition(optional DOMPointInit point = {}); + undefined selectSubString(unsigned long charnum, unsigned long nchars); +}; + +[Exposed=Window] +interface SVGTextPositioningElement : SVGTextContentElement { + [SameObject] readonly attribute SVGAnimatedLengthList x; + [SameObject] readonly attribute SVGAnimatedLengthList y; + [SameObject] readonly attribute SVGAnimatedLengthList dx; + [SameObject] readonly attribute SVGAnimatedLengthList dy; + [SameObject] readonly attribute SVGAnimatedNumberList rotate; +}; + +[Exposed=Window] +interface SVGTextElement : SVGTextPositioningElement { +}; + +[Exposed=Window] +interface SVGTSpanElement : SVGTextPositioningElement { +}; + +[Exposed=Window] +interface SVGTextPathElement : SVGTextContentElement { + + // textPath Method Types + const unsigned short TEXTPATH_METHODTYPE_UNKNOWN = 0; + const unsigned short TEXTPATH_METHODTYPE_ALIGN = 1; + const unsigned short TEXTPATH_METHODTYPE_STRETCH = 2; + + // textPath Spacing Types + const unsigned short TEXTPATH_SPACINGTYPE_UNKNOWN = 0; + const unsigned short TEXTPATH_SPACINGTYPE_AUTO = 1; + const unsigned short TEXTPATH_SPACINGTYPE_EXACT = 2; + + [SameObject] readonly attribute SVGAnimatedLength startOffset; + [SameObject] readonly attribute SVGAnimatedEnumeration method; + [SameObject] readonly attribute SVGAnimatedEnumeration spacing; +}; + +SVGTextPathElement includes SVGURIReference; + +[Exposed=Window] +interface SVGImageElement : SVGGraphicsElement { + [SameObject] readonly attribute SVGAnimatedLength x; + [SameObject] readonly attribute SVGAnimatedLength y; + [SameObject] readonly attribute SVGAnimatedLength width; + [SameObject] readonly attribute SVGAnimatedLength height; + [SameObject] readonly attribute SVGAnimatedPreserveAspectRatio preserveAspectRatio; + attribute DOMString? crossOrigin; +}; + +SVGImageElement includes SVGURIReference; + +[Exposed=Window] +interface SVGForeignObjectElement : SVGGraphicsElement { + [SameObject] readonly attribute SVGAnimatedLength x; + [SameObject] readonly attribute SVGAnimatedLength y; + [SameObject] readonly attribute SVGAnimatedLength width; + [SameObject] readonly attribute SVGAnimatedLength height; +}; + +[Exposed=Window] +interface SVGMarkerElement : SVGElement { + + // Marker Unit Types + const unsigned short SVG_MARKERUNITS_UNKNOWN = 0; + const unsigned short SVG_MARKERUNITS_USERSPACEONUSE = 1; + const unsigned short SVG_MARKERUNITS_STROKEWIDTH = 2; + + // Marker Orientation Types + const unsigned short SVG_MARKER_ORIENT_UNKNOWN = 0; + const unsigned short SVG_MARKER_ORIENT_AUTO = 1; + const unsigned short SVG_MARKER_ORIENT_ANGLE = 2; + + [SameObject] readonly attribute SVGAnimatedLength refX; + [SameObject] readonly attribute SVGAnimatedLength refY; + [SameObject] readonly attribute SVGAnimatedEnumeration markerUnits; + [SameObject] readonly attribute SVGAnimatedLength markerWidth; + [SameObject] readonly attribute SVGAnimatedLength markerHeight; + [SameObject] readonly attribute SVGAnimatedEnumeration orientType; + [SameObject] readonly attribute SVGAnimatedAngle orientAngle; + attribute DOMString orient; + + undefined setOrientToAuto(); + undefined setOrientToAngle(SVGAngle angle); +}; + +SVGMarkerElement includes SVGFitToViewBox; + +[Exposed=Window] +interface SVGGradientElement : SVGElement { + + // Spread Method Types + const unsigned short SVG_SPREADMETHOD_UNKNOWN = 0; + const unsigned short SVG_SPREADMETHOD_PAD = 1; + const unsigned short SVG_SPREADMETHOD_REFLECT = 2; + const unsigned short SVG_SPREADMETHOD_REPEAT = 3; + + [SameObject] readonly attribute SVGAnimatedEnumeration gradientUnits; + [SameObject] readonly attribute SVGAnimatedTransformList gradientTransform; + [SameObject] readonly attribute SVGAnimatedEnumeration spreadMethod; +}; + +SVGGradientElement includes SVGURIReference; + +[Exposed=Window] +interface SVGLinearGradientElement : SVGGradientElement { + [SameObject] readonly attribute SVGAnimatedLength x1; + [SameObject] readonly attribute SVGAnimatedLength y1; + [SameObject] readonly attribute SVGAnimatedLength x2; + [SameObject] readonly attribute SVGAnimatedLength y2; +}; + +[Exposed=Window] +interface SVGRadialGradientElement : SVGGradientElement { + [SameObject] readonly attribute SVGAnimatedLength cx; + [SameObject] readonly attribute SVGAnimatedLength cy; + [SameObject] readonly attribute SVGAnimatedLength r; + [SameObject] readonly attribute SVGAnimatedLength fx; + [SameObject] readonly attribute SVGAnimatedLength fy; + [SameObject] readonly attribute SVGAnimatedLength fr; +}; + +[Exposed=Window] +interface SVGStopElement : SVGElement { + [SameObject] readonly attribute SVGAnimatedNumber offset; +}; + +[Exposed=Window] +interface SVGPatternElement : SVGElement { + [SameObject] readonly attribute SVGAnimatedEnumeration patternUnits; + [SameObject] readonly attribute SVGAnimatedEnumeration patternContentUnits; + [SameObject] readonly attribute SVGAnimatedTransformList patternTransform; + [SameObject] readonly attribute SVGAnimatedLength x; + [SameObject] readonly attribute SVGAnimatedLength y; + [SameObject] readonly attribute SVGAnimatedLength width; + [SameObject] readonly attribute SVGAnimatedLength height; +}; + +SVGPatternElement includes SVGFitToViewBox; +SVGPatternElement includes SVGURIReference; + +[Exposed=Window] +interface SVGScriptElement : SVGElement { + attribute DOMString type; + attribute DOMString? crossOrigin; +}; + +SVGScriptElement includes SVGURIReference; + +[Exposed=Window] +interface SVGAElement : SVGGraphicsElement { + [SameObject] readonly attribute SVGAnimatedString target; + attribute DOMString download; + attribute USVString ping; + attribute DOMString rel; + [SameObject, PutForwards=value] readonly attribute DOMTokenList relList; + attribute DOMString hreflang; + attribute DOMString type; + + attribute DOMString text; + + attribute DOMString referrerPolicy; +}; + +SVGAElement includes SVGURIReference; + +// Inline HTMLHyperlinkElementUtils except href, which conflicts. +partial interface SVGAElement { + readonly attribute USVString origin; + [CEReactions] attribute USVString protocol; + [CEReactions] attribute USVString username; + [CEReactions] attribute USVString password; + [CEReactions] attribute USVString host; + [CEReactions] attribute USVString hostname; + [CEReactions] attribute USVString port; + [CEReactions] attribute USVString pathname; + [CEReactions] attribute USVString search; + [CEReactions] attribute USVString hash; +}; + +[Exposed=Window] +interface SVGViewElement : SVGElement {}; + +SVGViewElement includes SVGFitToViewBox; diff --git a/test/wpt/tests/interfaces/WEBGL_blend_equation_advanced_coherent.idl b/test/wpt/tests/interfaces/WEBGL_blend_equation_advanced_coherent.idl new file mode 100644 index 0000000..2208329 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_blend_equation_advanced_coherent.idl @@ -0,0 +1,23 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_blend_equation_advanced_coherent Extension Draft Specification (https://registry.khronos.org/webgl/extensions/WEBGL_blend_equation_advanced_coherent/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_blend_equation_advanced_coherent { + const GLenum MULTIPLY = 0x9294; + const GLenum SCREEN = 0x9295; + const GLenum OVERLAY = 0x9296; + const GLenum DARKEN = 0x9297; + const GLenum LIGHTEN = 0x9298; + const GLenum COLORDODGE = 0x9299; + const GLenum COLORBURN = 0x929A; + const GLenum HARDLIGHT = 0x929B; + const GLenum SOFTLIGHT = 0x929C; + const GLenum DIFFERENCE = 0x929E; + const GLenum EXCLUSION = 0x92A0; + const GLenum HSL_HUE = 0x92AD; + const GLenum HSL_SATURATION = 0x92AE; + const GLenum HSL_COLOR = 0x92AF; + const GLenum HSL_LUMINOSITY = 0x92B0; +}; diff --git a/test/wpt/tests/interfaces/WEBGL_clip_cull_distance.idl b/test/wpt/tests/interfaces/WEBGL_clip_cull_distance.idl new file mode 100644 index 0000000..46fa921 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_clip_cull_distance.idl @@ -0,0 +1,20 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_clip_cull_distance Extension Draft Specification (https://registry.khronos.org/webgl/extensions/WEBGL_clip_cull_distance/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_clip_cull_distance { + const GLenum MAX_CLIP_DISTANCES_WEBGL = 0x0D32; + const GLenum MAX_CULL_DISTANCES_WEBGL = 0x82F9; + const GLenum MAX_COMBINED_CLIP_AND_CULL_DISTANCES_WEBGL = 0x82FA; + + const GLenum CLIP_DISTANCE0_WEBGL = 0x3000; + const GLenum CLIP_DISTANCE1_WEBGL = 0x3001; + const GLenum CLIP_DISTANCE2_WEBGL = 0x3002; + const GLenum CLIP_DISTANCE3_WEBGL = 0x3003; + const GLenum CLIP_DISTANCE4_WEBGL = 0x3004; + const GLenum CLIP_DISTANCE5_WEBGL = 0x3005; + const GLenum CLIP_DISTANCE6_WEBGL = 0x3006; + const GLenum CLIP_DISTANCE7_WEBGL = 0x3007; +}; diff --git a/test/wpt/tests/interfaces/WEBGL_color_buffer_float.idl b/test/wpt/tests/interfaces/WEBGL_color_buffer_float.idl new file mode 100644 index 0000000..b73f631 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_color_buffer_float.idl @@ -0,0 +1,11 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_color_buffer_float Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_color_buffer_float/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_color_buffer_float { + const GLenum RGBA32F_EXT = 0x8814; + const GLenum FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE_EXT = 0x8211; + const GLenum UNSIGNED_NORMALIZED_EXT = 0x8C17; +}; // interface WEBGL_color_buffer_float diff --git a/test/wpt/tests/interfaces/WEBGL_compressed_texture_astc.idl b/test/wpt/tests/interfaces/WEBGL_compressed_texture_astc.idl new file mode 100644 index 0000000..9e4632f --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_compressed_texture_astc.idl @@ -0,0 +1,41 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_compressed_texture_astc Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_astc/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_compressed_texture_astc { + /* Compressed Texture Format */ + const GLenum COMPRESSED_RGBA_ASTC_4x4_KHR = 0x93B0; + const GLenum COMPRESSED_RGBA_ASTC_5x4_KHR = 0x93B1; + const GLenum COMPRESSED_RGBA_ASTC_5x5_KHR = 0x93B2; + const GLenum COMPRESSED_RGBA_ASTC_6x5_KHR = 0x93B3; + const GLenum COMPRESSED_RGBA_ASTC_6x6_KHR = 0x93B4; + const GLenum COMPRESSED_RGBA_ASTC_8x5_KHR = 0x93B5; + const GLenum COMPRESSED_RGBA_ASTC_8x6_KHR = 0x93B6; + const GLenum COMPRESSED_RGBA_ASTC_8x8_KHR = 0x93B7; + const GLenum COMPRESSED_RGBA_ASTC_10x5_KHR = 0x93B8; + const GLenum COMPRESSED_RGBA_ASTC_10x6_KHR = 0x93B9; + const GLenum COMPRESSED_RGBA_ASTC_10x8_KHR = 0x93BA; + const GLenum COMPRESSED_RGBA_ASTC_10x10_KHR = 0x93BB; + const GLenum COMPRESSED_RGBA_ASTC_12x10_KHR = 0x93BC; + const GLenum COMPRESSED_RGBA_ASTC_12x12_KHR = 0x93BD; + + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR = 0x93D0; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR = 0x93D1; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR = 0x93D2; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR = 0x93D3; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR = 0x93D4; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR = 0x93D5; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR = 0x93D6; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR = 0x93D7; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR = 0x93D8; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR = 0x93D9; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR = 0x93DA; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR = 0x93DB; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR = 0x93DC; + const GLenum COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR = 0x93DD; + + // Profile query support. + sequence getSupportedProfiles(); +}; diff --git a/test/wpt/tests/interfaces/WEBGL_compressed_texture_etc.idl b/test/wpt/tests/interfaces/WEBGL_compressed_texture_etc.idl new file mode 100644 index 0000000..5174a08 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_compressed_texture_etc.idl @@ -0,0 +1,19 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_compressed_texture_etc Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_etc/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_compressed_texture_etc { + /* Compressed Texture Formats */ + const GLenum COMPRESSED_R11_EAC = 0x9270; + const GLenum COMPRESSED_SIGNED_R11_EAC = 0x9271; + const GLenum COMPRESSED_RG11_EAC = 0x9272; + const GLenum COMPRESSED_SIGNED_RG11_EAC = 0x9273; + const GLenum COMPRESSED_RGB8_ETC2 = 0x9274; + const GLenum COMPRESSED_SRGB8_ETC2 = 0x9275; + const GLenum COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 = 0x9276; + const GLenum COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2 = 0x9277; + const GLenum COMPRESSED_RGBA8_ETC2_EAC = 0x9278; + const GLenum COMPRESSED_SRGB8_ALPHA8_ETC2_EAC = 0x9279; +}; diff --git a/test/wpt/tests/interfaces/WEBGL_compressed_texture_etc1.idl b/test/wpt/tests/interfaces/WEBGL_compressed_texture_etc1.idl new file mode 100644 index 0000000..773697e --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_compressed_texture_etc1.idl @@ -0,0 +1,10 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_compressed_texture_etc1 Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_etc1/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_compressed_texture_etc1 { + /* Compressed Texture Format */ + const GLenum COMPRESSED_RGB_ETC1_WEBGL = 0x8D64; +}; diff --git a/test/wpt/tests/interfaces/WEBGL_compressed_texture_pvrtc.idl b/test/wpt/tests/interfaces/WEBGL_compressed_texture_pvrtc.idl new file mode 100644 index 0000000..5aa004a --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_compressed_texture_pvrtc.idl @@ -0,0 +1,13 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_compressed_texture_pvrtc Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_pvrtc/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_compressed_texture_pvrtc { + /* Compressed Texture Formats */ + const GLenum COMPRESSED_RGB_PVRTC_4BPPV1_IMG = 0x8C00; + const GLenum COMPRESSED_RGB_PVRTC_2BPPV1_IMG = 0x8C01; + const GLenum COMPRESSED_RGBA_PVRTC_4BPPV1_IMG = 0x8C02; + const GLenum COMPRESSED_RGBA_PVRTC_2BPPV1_IMG = 0x8C03; +}; diff --git a/test/wpt/tests/interfaces/WEBGL_compressed_texture_s3tc.idl b/test/wpt/tests/interfaces/WEBGL_compressed_texture_s3tc.idl new file mode 100644 index 0000000..6e7c4bd --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_compressed_texture_s3tc.idl @@ -0,0 +1,13 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_compressed_texture_s3tc Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_s3tc/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_compressed_texture_s3tc { + /* Compressed Texture Formats */ + const GLenum COMPRESSED_RGB_S3TC_DXT1_EXT = 0x83F0; + const GLenum COMPRESSED_RGBA_S3TC_DXT1_EXT = 0x83F1; + const GLenum COMPRESSED_RGBA_S3TC_DXT3_EXT = 0x83F2; + const GLenum COMPRESSED_RGBA_S3TC_DXT5_EXT = 0x83F3; +}; diff --git a/test/wpt/tests/interfaces/WEBGL_compressed_texture_s3tc_srgb.idl b/test/wpt/tests/interfaces/WEBGL_compressed_texture_s3tc_srgb.idl new file mode 100644 index 0000000..809265e --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_compressed_texture_s3tc_srgb.idl @@ -0,0 +1,13 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_compressed_texture_s3tc_srgb Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_compressed_texture_s3tc_srgb/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_compressed_texture_s3tc_srgb { + /* Compressed Texture Formats */ + const GLenum COMPRESSED_SRGB_S3TC_DXT1_EXT = 0x8C4C; + const GLenum COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT = 0x8C4D; + const GLenum COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT = 0x8C4E; + const GLenum COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT = 0x8C4F; +}; diff --git a/test/wpt/tests/interfaces/WEBGL_debug_renderer_info.idl b/test/wpt/tests/interfaces/WEBGL_debug_renderer_info.idl new file mode 100644 index 0000000..7694061 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_debug_renderer_info.idl @@ -0,0 +1,12 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_debug_renderer_info Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_debug_renderer_info/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_debug_renderer_info { + + const GLenum UNMASKED_VENDOR_WEBGL = 0x9245; + const GLenum UNMASKED_RENDERER_WEBGL = 0x9246; + +}; diff --git a/test/wpt/tests/interfaces/WEBGL_debug_shaders.idl b/test/wpt/tests/interfaces/WEBGL_debug_shaders.idl new file mode 100644 index 0000000..ecb48d0 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_debug_shaders.idl @@ -0,0 +1,11 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_debug_shaders Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_debug_shaders/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_debug_shaders { + + DOMString getTranslatedShaderSource(WebGLShader shader); + +}; diff --git a/test/wpt/tests/interfaces/WEBGL_depth_texture.idl b/test/wpt/tests/interfaces/WEBGL_depth_texture.idl new file mode 100644 index 0000000..a9ec791 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_depth_texture.idl @@ -0,0 +1,9 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_depth_texture Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_depth_texture/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_depth_texture { + const GLenum UNSIGNED_INT_24_8_WEBGL = 0x84FA; +}; diff --git a/test/wpt/tests/interfaces/WEBGL_draw_buffers.idl b/test/wpt/tests/interfaces/WEBGL_draw_buffers.idl new file mode 100644 index 0000000..3310388 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_draw_buffers.idl @@ -0,0 +1,46 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_draw_buffers Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_draw_buffers/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_draw_buffers { + const GLenum COLOR_ATTACHMENT0_WEBGL = 0x8CE0; + const GLenum COLOR_ATTACHMENT1_WEBGL = 0x8CE1; + const GLenum COLOR_ATTACHMENT2_WEBGL = 0x8CE2; + const GLenum COLOR_ATTACHMENT3_WEBGL = 0x8CE3; + const GLenum COLOR_ATTACHMENT4_WEBGL = 0x8CE4; + const GLenum COLOR_ATTACHMENT5_WEBGL = 0x8CE5; + const GLenum COLOR_ATTACHMENT6_WEBGL = 0x8CE6; + const GLenum COLOR_ATTACHMENT7_WEBGL = 0x8CE7; + const GLenum COLOR_ATTACHMENT8_WEBGL = 0x8CE8; + const GLenum COLOR_ATTACHMENT9_WEBGL = 0x8CE9; + const GLenum COLOR_ATTACHMENT10_WEBGL = 0x8CEA; + const GLenum COLOR_ATTACHMENT11_WEBGL = 0x8CEB; + const GLenum COLOR_ATTACHMENT12_WEBGL = 0x8CEC; + const GLenum COLOR_ATTACHMENT13_WEBGL = 0x8CED; + const GLenum COLOR_ATTACHMENT14_WEBGL = 0x8CEE; + const GLenum COLOR_ATTACHMENT15_WEBGL = 0x8CEF; + + const GLenum DRAW_BUFFER0_WEBGL = 0x8825; + const GLenum DRAW_BUFFER1_WEBGL = 0x8826; + const GLenum DRAW_BUFFER2_WEBGL = 0x8827; + const GLenum DRAW_BUFFER3_WEBGL = 0x8828; + const GLenum DRAW_BUFFER4_WEBGL = 0x8829; + const GLenum DRAW_BUFFER5_WEBGL = 0x882A; + const GLenum DRAW_BUFFER6_WEBGL = 0x882B; + const GLenum DRAW_BUFFER7_WEBGL = 0x882C; + const GLenum DRAW_BUFFER8_WEBGL = 0x882D; + const GLenum DRAW_BUFFER9_WEBGL = 0x882E; + const GLenum DRAW_BUFFER10_WEBGL = 0x882F; + const GLenum DRAW_BUFFER11_WEBGL = 0x8830; + const GLenum DRAW_BUFFER12_WEBGL = 0x8831; + const GLenum DRAW_BUFFER13_WEBGL = 0x8832; + const GLenum DRAW_BUFFER14_WEBGL = 0x8833; + const GLenum DRAW_BUFFER15_WEBGL = 0x8834; + + const GLenum MAX_COLOR_ATTACHMENTS_WEBGL = 0x8CDF; + const GLenum MAX_DRAW_BUFFERS_WEBGL = 0x8824; + + undefined drawBuffersWEBGL(sequence buffers); +}; diff --git a/test/wpt/tests/interfaces/WEBGL_draw_instanced_base_vertex_base_instance.idl b/test/wpt/tests/interfaces/WEBGL_draw_instanced_base_vertex_base_instance.idl new file mode 100644 index 0000000..38f7a42 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_draw_instanced_base_vertex_base_instance.idl @@ -0,0 +1,14 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_draw_instanced_base_vertex_base_instance Extension Draft Specification (https://registry.khronos.org/webgl/extensions/WEBGL_draw_instanced_base_vertex_base_instance/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_draw_instanced_base_vertex_base_instance { + undefined drawArraysInstancedBaseInstanceWEBGL( + GLenum mode, GLint first, GLsizei count, + GLsizei instanceCount, GLuint baseInstance); + undefined drawElementsInstancedBaseVertexBaseInstanceWEBGL( + GLenum mode, GLsizei count, GLenum type, GLintptr offset, + GLsizei instanceCount, GLint baseVertex, GLuint baseInstance); +}; diff --git a/test/wpt/tests/interfaces/WEBGL_lose_context.idl b/test/wpt/tests/interfaces/WEBGL_lose_context.idl new file mode 100644 index 0000000..ee68fb5 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_lose_context.idl @@ -0,0 +1,10 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_lose_context Khronos Ratified Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_lose_context/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_lose_context { + undefined loseContext(); + undefined restoreContext(); +}; diff --git a/test/wpt/tests/interfaces/WEBGL_multi_draw.idl b/test/wpt/tests/interfaces/WEBGL_multi_draw.idl new file mode 100644 index 0000000..ee8c044 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_multi_draw.idl @@ -0,0 +1,32 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_multi_draw Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_multi_draw/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_multi_draw { + undefined multiDrawArraysWEBGL( + GLenum mode, + ([AllowShared] Int32Array or sequence) firstsList, GLuint firstsOffset, + ([AllowShared] Int32Array or sequence) countsList, GLuint countsOffset, + GLsizei drawcount); + undefined multiDrawElementsWEBGL( + GLenum mode, + ([AllowShared] Int32Array or sequence) countsList, GLuint countsOffset, + GLenum type, + ([AllowShared] Int32Array or sequence) offsetsList, GLuint offsetsOffset, + GLsizei drawcount); + undefined multiDrawArraysInstancedWEBGL( + GLenum mode, + ([AllowShared] Int32Array or sequence) firstsList, GLuint firstsOffset, + ([AllowShared] Int32Array or sequence) countsList, GLuint countsOffset, + ([AllowShared] Int32Array or sequence) instanceCountsList, GLuint instanceCountsOffset, + GLsizei drawcount); + undefined multiDrawElementsInstancedWEBGL( + GLenum mode, + ([AllowShared] Int32Array or sequence) countsList, GLuint countsOffset, + GLenum type, + ([AllowShared] Int32Array or sequence) offsetsList, GLuint offsetsOffset, + ([AllowShared] Int32Array or sequence) instanceCountsList, GLuint instanceCountsOffset, + GLsizei drawcount); +}; diff --git a/test/wpt/tests/interfaces/WEBGL_multi_draw_instanced_base_vertex_base_instance.idl b/test/wpt/tests/interfaces/WEBGL_multi_draw_instanced_base_vertex_base_instance.idl new file mode 100644 index 0000000..2258fa9 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_multi_draw_instanced_base_vertex_base_instance.idl @@ -0,0 +1,26 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_multi_draw_instanced_base_vertex_base_instance Extension Draft Specification (https://registry.khronos.org/webgl/extensions/WEBGL_multi_draw_instanced_base_vertex_base_instance/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_multi_draw_instanced_base_vertex_base_instance { + undefined multiDrawArraysInstancedBaseInstanceWEBGL( + GLenum mode, + ([AllowShared] Int32Array or sequence) firstsList, GLuint firstsOffset, + ([AllowShared] Int32Array or sequence) countsList, GLuint countsOffset, + ([AllowShared] Int32Array or sequence) instanceCountsList, GLuint instanceCountsOffset, + ([AllowShared] Uint32Array or sequence) baseInstancesList, GLuint baseInstancesOffset, + GLsizei drawcount + ); + undefined multiDrawElementsInstancedBaseVertexBaseInstanceWEBGL( + GLenum mode, + ([AllowShared] Int32Array or sequence) countsList, GLuint countsOffset, + GLenum type, + ([AllowShared] Int32Array or sequence) offsetsList, GLuint offsetsOffset, + ([AllowShared] Int32Array or sequence) instanceCountsList, GLuint instanceCountsOffset, + ([AllowShared] Int32Array or sequence) baseVerticesList, GLuint baseVerticesOffset, + ([AllowShared] Uint32Array or sequence) baseInstancesList, GLuint baseInstancesOffset, + GLsizei drawcount + ); +}; diff --git a/test/wpt/tests/interfaces/WEBGL_provoking_vertex.idl b/test/wpt/tests/interfaces/WEBGL_provoking_vertex.idl new file mode 100644 index 0000000..035e1d2 --- /dev/null +++ b/test/wpt/tests/interfaces/WEBGL_provoking_vertex.idl @@ -0,0 +1,13 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebGL WEBGL_provoking_vertex Extension Specification (https://registry.khronos.org/webgl/extensions/WEBGL_provoking_vertex/) + +[Exposed=(Window,Worker), LegacyNoInterfaceObject] +interface WEBGL_provoking_vertex { + const GLenum FIRST_VERTEX_CONVENTION_WEBGL = 0x8E4D; + const GLenum LAST_VERTEX_CONVENTION_WEBGL = 0x8E4E; // default + const GLenum PROVOKING_VERTEX_WEBGL = 0x8E4F; + + undefined provokingVertexWEBGL(GLenum provokeMode); +}; diff --git a/test/wpt/tests/interfaces/WebCryptoAPI.idl b/test/wpt/tests/interfaces/WebCryptoAPI.idl new file mode 100644 index 0000000..0e68ea8 --- /dev/null +++ b/test/wpt/tests/interfaces/WebCryptoAPI.idl @@ -0,0 +1,237 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Web Cryptography API (https://w3c.github.io/webcrypto/) + +partial interface mixin WindowOrWorkerGlobalScope { + [SameObject] readonly attribute Crypto crypto; +}; + +[Exposed=(Window,Worker)] +interface Crypto { + [SecureContext] readonly attribute SubtleCrypto subtle; + ArrayBufferView getRandomValues(ArrayBufferView array); + [SecureContext] DOMString randomUUID(); +}; + +typedef (object or DOMString) AlgorithmIdentifier; + +typedef AlgorithmIdentifier HashAlgorithmIdentifier; + +dictionary Algorithm { + required DOMString name; +}; + +dictionary KeyAlgorithm { + required DOMString name; +}; + +enum KeyType { "public", "private", "secret" }; + +enum KeyUsage { "encrypt", "decrypt", "sign", "verify", "deriveKey", "deriveBits", "wrapKey", "unwrapKey" }; + +[SecureContext,Exposed=(Window,Worker),Serializable] +interface CryptoKey { + readonly attribute KeyType type; + readonly attribute boolean extractable; + readonly attribute object algorithm; + readonly attribute object usages; +}; + +enum KeyFormat { "raw", "spki", "pkcs8", "jwk" }; + +[SecureContext,Exposed=(Window,Worker)] +interface SubtleCrypto { + Promise encrypt(AlgorithmIdentifier algorithm, + CryptoKey key, + BufferSource data); + Promise decrypt(AlgorithmIdentifier algorithm, + CryptoKey key, + BufferSource data); + Promise sign(AlgorithmIdentifier algorithm, + CryptoKey key, + BufferSource data); + Promise verify(AlgorithmIdentifier algorithm, + CryptoKey key, + BufferSource signature, + BufferSource data); + Promise digest(AlgorithmIdentifier algorithm, + BufferSource data); + + Promise generateKey(AlgorithmIdentifier algorithm, + boolean extractable, + sequence keyUsages ); + Promise deriveKey(AlgorithmIdentifier algorithm, + CryptoKey baseKey, + AlgorithmIdentifier derivedKeyType, + boolean extractable, + sequence keyUsages ); + Promise deriveBits(AlgorithmIdentifier algorithm, + CryptoKey baseKey, + unsigned long length); + + Promise importKey(KeyFormat format, + (BufferSource or JsonWebKey) keyData, + AlgorithmIdentifier algorithm, + boolean extractable, + sequence keyUsages ); + Promise exportKey(KeyFormat format, CryptoKey key); + + Promise wrapKey(KeyFormat format, + CryptoKey key, + CryptoKey wrappingKey, + AlgorithmIdentifier wrapAlgorithm); + Promise unwrapKey(KeyFormat format, + BufferSource wrappedKey, + CryptoKey unwrappingKey, + AlgorithmIdentifier unwrapAlgorithm, + AlgorithmIdentifier unwrappedKeyAlgorithm, + boolean extractable, + sequence keyUsages ); +}; + +dictionary RsaOtherPrimesInfo { + // The following fields are defined in Section 6.3.2.7 of JSON Web Algorithms + DOMString r; + DOMString d; + DOMString t; +}; + +dictionary JsonWebKey { + // The following fields are defined in Section 3.1 of JSON Web Key + DOMString kty; + DOMString use; + sequence key_ops; + DOMString alg; + + // The following fields are defined in JSON Web Key Parameters Registration + boolean ext; + + // The following fields are defined in Section 6 of JSON Web Algorithms + DOMString crv; + DOMString x; + DOMString y; + DOMString d; + DOMString n; + DOMString e; + DOMString p; + DOMString q; + DOMString dp; + DOMString dq; + DOMString qi; + sequence oth; + DOMString k; +}; + +typedef Uint8Array BigInteger; + +dictionary CryptoKeyPair { + CryptoKey publicKey; + CryptoKey privateKey; +}; + +dictionary RsaKeyGenParams : Algorithm { + required [EnforceRange] unsigned long modulusLength; + required BigInteger publicExponent; +}; + +dictionary RsaHashedKeyGenParams : RsaKeyGenParams { + required HashAlgorithmIdentifier hash; +}; + +dictionary RsaKeyAlgorithm : KeyAlgorithm { + required unsigned long modulusLength; + required BigInteger publicExponent; +}; + +dictionary RsaHashedKeyAlgorithm : RsaKeyAlgorithm { + required KeyAlgorithm hash; +}; + +dictionary RsaHashedImportParams : Algorithm { + required HashAlgorithmIdentifier hash; +}; + +dictionary RsaPssParams : Algorithm { + required [EnforceRange] unsigned long saltLength; +}; + +dictionary RsaOaepParams : Algorithm { + BufferSource label; +}; + +dictionary EcdsaParams : Algorithm { + required HashAlgorithmIdentifier hash; +}; + +typedef DOMString NamedCurve; + +dictionary EcKeyGenParams : Algorithm { + required NamedCurve namedCurve; +}; + +dictionary EcKeyAlgorithm : KeyAlgorithm { + required NamedCurve namedCurve; +}; + +dictionary EcKeyImportParams : Algorithm { + required NamedCurve namedCurve; +}; + +dictionary EcdhKeyDeriveParams : Algorithm { + required CryptoKey public; +}; + +dictionary AesCtrParams : Algorithm { + required BufferSource counter; + required [EnforceRange] octet length; +}; + +dictionary AesKeyAlgorithm : KeyAlgorithm { + required unsigned short length; +}; + +dictionary AesKeyGenParams : Algorithm { + required [EnforceRange] unsigned short length; +}; + +dictionary AesDerivedKeyParams : Algorithm { + required [EnforceRange] unsigned short length; +}; + +dictionary AesCbcParams : Algorithm { + required BufferSource iv; +}; + +dictionary AesGcmParams : Algorithm { + required BufferSource iv; + BufferSource additionalData; + [EnforceRange] octet tagLength; +}; + +dictionary HmacImportParams : Algorithm { + required HashAlgorithmIdentifier hash; + [EnforceRange] unsigned long length; +}; + +dictionary HmacKeyAlgorithm : KeyAlgorithm { + required KeyAlgorithm hash; + required unsigned long length; +}; + +dictionary HmacKeyGenParams : Algorithm { + required HashAlgorithmIdentifier hash; + [EnforceRange] unsigned long length; +}; + +dictionary HkdfParams : Algorithm { + required HashAlgorithmIdentifier hash; + required BufferSource salt; + required BufferSource info; +}; + +dictionary Pbkdf2Params : Algorithm { + required BufferSource salt; + required [EnforceRange] unsigned long iterations; + required HashAlgorithmIdentifier hash; +}; diff --git a/test/wpt/tests/interfaces/accelerometer.idl b/test/wpt/tests/interfaces/accelerometer.idl new file mode 100644 index 0000000..fc8fc07 --- /dev/null +++ b/test/wpt/tests/interfaces/accelerometer.idl @@ -0,0 +1,40 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Accelerometer (https://w3c.github.io/accelerometer/) + +[SecureContext, Exposed=Window] +interface Accelerometer : Sensor { + constructor(optional AccelerometerSensorOptions options = {}); + readonly attribute double? x; + readonly attribute double? y; + readonly attribute double? z; +}; + +enum AccelerometerLocalCoordinateSystem { "device", "screen" }; + +dictionary AccelerometerSensorOptions : SensorOptions { + AccelerometerLocalCoordinateSystem referenceFrame = "device"; +}; + +[SecureContext, Exposed=Window] +interface LinearAccelerationSensor : Accelerometer { + constructor(optional AccelerometerSensorOptions options = {}); +}; + +[SecureContext, Exposed=Window] +interface GravitySensor : Accelerometer { + constructor(optional AccelerometerSensorOptions options = {}); +}; + +dictionary AccelerometerReadingValues { + required double? x; + required double? y; + required double? z; +}; + +dictionary LinearAccelerationReadingValues : AccelerometerReadingValues { +}; + +dictionary GravityReadingValues : AccelerometerReadingValues { +}; diff --git a/test/wpt/tests/interfaces/ambient-light.idl b/test/wpt/tests/interfaces/ambient-light.idl new file mode 100644 index 0000000..6d9c8e0 --- /dev/null +++ b/test/wpt/tests/interfaces/ambient-light.idl @@ -0,0 +1,14 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Ambient Light Sensor (https://w3c.github.io/ambient-light/) + +[SecureContext, Exposed=Window] +interface AmbientLightSensor : Sensor { + constructor(optional SensorOptions sensorOptions = {}); + readonly attribute double? illuminance; +}; + +dictionary AmbientLightReadingValues { + required double? illuminance; +}; diff --git a/test/wpt/tests/interfaces/anchors.idl b/test/wpt/tests/interfaces/anchors.idl new file mode 100644 index 0000000..d8c5aa6 --- /dev/null +++ b/test/wpt/tests/interfaces/anchors.idl @@ -0,0 +1,37 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: WebXR Anchors Module (https://immersive-web.github.io/anchors/) + +[SecureContext, Exposed=Window] +interface XRAnchor { + readonly attribute XRSpace anchorSpace; + + Promise requestPersistentHandle(); + + undefined delete(); +}; + +partial interface XRFrame { + Promise createAnchor(XRRigidTransform pose, XRSpace space); +}; + +partial interface XRSession { + readonly attribute FrozenArray persistentAnchors; + + Promise restorePersistentAnchor(DOMString uuid); + Promise deletePersistentAnchor(DOMString uuid); +}; + +partial interface XRHitTestResult { + Promise createAnchor(); +}; + +[Exposed=Window] +interface XRAnchorSet { + readonly setlike; +}; + +partial interface XRFrame { + [SameObject] readonly attribute XRAnchorSet trackedAnchors; +}; diff --git a/test/wpt/tests/interfaces/attribution-reporting-api.idl b/test/wpt/tests/interfaces/attribution-reporting-api.idl new file mode 100644 index 0000000..ed4497b --- /dev/null +++ b/test/wpt/tests/interfaces/attribution-reporting-api.idl @@ -0,0 +1,26 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Attribution Reporting (https://wicg.github.io/attribution-reporting-api/) + +interface mixin HTMLAttributionSrcElementUtils { + [CEReactions, SecureContext] attribute USVString attributionSrc; +}; + +HTMLAnchorElement includes HTMLAttributionSrcElementUtils; +HTMLImageElement includes HTMLAttributionSrcElementUtils; +HTMLScriptElement includes HTMLAttributionSrcElementUtils; + +dictionary AttributionReportingRequestOptions { + required boolean eventSourceEligible; + required boolean triggerEligible; +}; + +partial dictionary RequestInit { + AttributionReportingRequestOptions attributionReporting; +}; + +partial interface XMLHttpRequest { + [SecureContext] + undefined setAttributionReporting(AttributionReportingRequestOptions options); +}; diff --git a/test/wpt/tests/interfaces/audio-output.idl b/test/wpt/tests/interfaces/audio-output.idl new file mode 100644 index 0000000..80ceb22 --- /dev/null +++ b/test/wpt/tests/interfaces/audio-output.idl @@ -0,0 +1,17 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Audio Output Devices API (https://w3c.github.io/mediacapture-output/) + +partial interface HTMLMediaElement { + [SecureContext] readonly attribute DOMString sinkId; + [SecureContext] Promise setSinkId (DOMString sinkId); +}; + +partial interface MediaDevices { + Promise selectAudioOutput(optional AudioOutputOptions options = {}); +}; + +dictionary AudioOutputOptions { + DOMString deviceId = ""; +}; diff --git a/test/wpt/tests/interfaces/autoplay-detection.idl b/test/wpt/tests/interfaces/autoplay-detection.idl new file mode 100644 index 0000000..cd0884f --- /dev/null +++ b/test/wpt/tests/interfaces/autoplay-detection.idl @@ -0,0 +1,19 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Autoplay Policy Detection (https://w3c.github.io/autoplay/) + +enum AutoplayPolicy { + "allowed", + "allowed-muted", + "disallowed" +}; + +enum AutoplayPolicyMediaType { "mediaelement", "audiocontext" }; + +[Exposed=Window] +partial interface Navigator { + AutoplayPolicy getAutoplayPolicy(AutoplayPolicyMediaType type); + AutoplayPolicy getAutoplayPolicy(HTMLMediaElement element); + AutoplayPolicy getAutoplayPolicy(AudioContext context); +}; diff --git a/test/wpt/tests/interfaces/background-fetch.idl b/test/wpt/tests/interfaces/background-fetch.idl new file mode 100644 index 0000000..993bd8b --- /dev/null +++ b/test/wpt/tests/interfaces/background-fetch.idl @@ -0,0 +1,89 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Background Fetch (https://wicg.github.io/background-fetch/) + +partial interface ServiceWorkerGlobalScope { + attribute EventHandler onbackgroundfetchsuccess; + attribute EventHandler onbackgroundfetchfail; + attribute EventHandler onbackgroundfetchabort; + attribute EventHandler onbackgroundfetchclick; +}; + +partial interface ServiceWorkerRegistration { + readonly attribute BackgroundFetchManager backgroundFetch; +}; + +[Exposed=(Window,Worker)] +interface BackgroundFetchManager { + Promise fetch(DOMString id, (RequestInfo or sequence) requests, optional BackgroundFetchOptions options = {}); + Promise get(DOMString id); + Promise> getIds(); +}; + +dictionary BackgroundFetchUIOptions { + sequence icons; + DOMString title; +}; + +dictionary BackgroundFetchOptions : BackgroundFetchUIOptions { + unsigned long long downloadTotal = 0; +}; + +[Exposed=(Window,Worker)] +interface BackgroundFetchRegistration : EventTarget { + readonly attribute DOMString id; + readonly attribute unsigned long long uploadTotal; + readonly attribute unsigned long long uploaded; + readonly attribute unsigned long long downloadTotal; + readonly attribute unsigned long long downloaded; + readonly attribute BackgroundFetchResult result; + readonly attribute BackgroundFetchFailureReason failureReason; + readonly attribute boolean recordsAvailable; + + attribute EventHandler onprogress; + + Promise abort(); + Promise match(RequestInfo request, optional CacheQueryOptions options = {}); + Promise> matchAll(optional RequestInfo request, optional CacheQueryOptions options = {}); +}; + +enum BackgroundFetchResult { "", "success", "failure" }; + +enum BackgroundFetchFailureReason { + // The background fetch has not completed yet, or was successful. + "", + // The operation was aborted by the user, or abort() was called. + "aborted", + // A response had a not-ok-status. + "bad-status", + // A fetch failed for other reasons, e.g. CORS, MIX, an invalid partial response, + // or a general network failure for a fetch that cannot be retried. + "fetch-error", + // Storage quota was reached during the operation. + "quota-exceeded", + // The provided downloadTotal was exceeded. + "download-total-exceeded" +}; + +[Exposed=(Window,Worker)] +interface BackgroundFetchRecord { + readonly attribute Request request; + readonly attribute Promise responseReady; +}; + +[Exposed=ServiceWorker] +interface BackgroundFetchEvent : ExtendableEvent { + constructor(DOMString type, BackgroundFetchEventInit init); + readonly attribute BackgroundFetchRegistration registration; +}; + +dictionary BackgroundFetchEventInit : ExtendableEventInit { + required BackgroundFetchRegistration registration; +}; + +[Exposed=ServiceWorker] +interface BackgroundFetchUpdateUIEvent : BackgroundFetchEvent { + constructor(DOMString type, BackgroundFetchEventInit init); + Promise updateUI(optional BackgroundFetchUIOptions options = {}); +}; diff --git a/test/wpt/tests/interfaces/background-sync.idl b/test/wpt/tests/interfaces/background-sync.idl new file mode 100644 index 0000000..79a13a6 --- /dev/null +++ b/test/wpt/tests/interfaces/background-sync.idl @@ -0,0 +1,30 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Web Background Synchronization (https://wicg.github.io/background-sync/spec/) + +partial interface ServiceWorkerRegistration { + readonly attribute SyncManager sync; +}; + +[Exposed=(Window,Worker)] +interface SyncManager { + Promise register(DOMString tag); + Promise> getTags(); +}; + +partial interface ServiceWorkerGlobalScope { + attribute EventHandler onsync; +}; + +[Exposed=ServiceWorker] +interface SyncEvent : ExtendableEvent { + constructor(DOMString type, SyncEventInit init); + readonly attribute DOMString tag; + readonly attribute boolean lastChance; +}; + +dictionary SyncEventInit : ExtendableEventInit { + required DOMString tag; + boolean lastChance = false; +}; diff --git a/test/wpt/tests/interfaces/badging.idl b/test/wpt/tests/interfaces/badging.idl new file mode 100644 index 0000000..8b401e0 --- /dev/null +++ b/test/wpt/tests/interfaces/badging.idl @@ -0,0 +1,15 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Badging API (https://w3c.github.io/badging/) + +[SecureContext] +interface mixin NavigatorBadge { + Promise setAppBadge( + optional [EnforceRange] unsigned long long contents + ); + Promise clearAppBadge(); +}; + +Navigator includes NavigatorBadge; +WorkerNavigator includes NavigatorBadge; diff --git a/test/wpt/tests/interfaces/battery-status.idl b/test/wpt/tests/interfaces/battery-status.idl new file mode 100644 index 0000000..2d042db --- /dev/null +++ b/test/wpt/tests/interfaces/battery-status.idl @@ -0,0 +1,21 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Battery Status API (https://w3c.github.io/battery/) + +[SecureContext] +partial interface Navigator { + Promise getBattery(); +}; + +[SecureContext, Exposed=Window] +interface BatteryManager : EventTarget { + readonly attribute boolean charging; + readonly attribute unrestricted double chargingTime; + readonly attribute unrestricted double dischargingTime; + readonly attribute double level; + attribute EventHandler onchargingchange; + attribute EventHandler onchargingtimechange; + attribute EventHandler ondischargingtimechange; + attribute EventHandler onlevelchange; +}; diff --git a/test/wpt/tests/interfaces/beacon.idl b/test/wpt/tests/interfaces/beacon.idl new file mode 100644 index 0000000..103a999 --- /dev/null +++ b/test/wpt/tests/interfaces/beacon.idl @@ -0,0 +1,8 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Beacon (https://w3c.github.io/beacon/) + +partial interface Navigator { + boolean sendBeacon(USVString url, optional BodyInit? data = null); +}; diff --git a/test/wpt/tests/interfaces/capture-handle-identity.idl b/test/wpt/tests/interfaces/capture-handle-identity.idl new file mode 100644 index 0000000..37b2c61 --- /dev/null +++ b/test/wpt/tests/interfaces/capture-handle-identity.idl @@ -0,0 +1,27 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Capture Handle - Bootstrapping Collaboration when Screensharing (https://w3c.github.io/mediacapture-handle/identity/) + +dictionary CaptureHandleConfig { + boolean exposeOrigin = false; + DOMString handle = ""; + sequence permittedOrigins = []; +}; + +partial interface MediaDevices { + undefined setCaptureHandleConfig(optional CaptureHandleConfig config = {}); +}; + +dictionary CaptureHandle { + DOMString origin; + DOMString handle; +}; + +partial interface MediaStreamTrack { + CaptureHandle? getCaptureHandle(); +}; + +partial interface MediaStreamTrack { + attribute EventHandler oncapturehandlechange; +}; diff --git a/test/wpt/tests/interfaces/captured-mouse-events.tentative.idl b/test/wpt/tests/interfaces/captured-mouse-events.tentative.idl new file mode 100644 index 0000000..7b081cd --- /dev/null +++ b/test/wpt/tests/interfaces/captured-mouse-events.tentative.idl @@ -0,0 +1,25 @@ +// https://screen-share.github.io/mouse-events/ + +enum CaptureStartFocusBehavior { + "focus-captured-surface", + "no-focus-change" +}; + +[Exposed=Window, SecureContext] +interface CaptureController : EventTarget { + constructor(); + undefined setFocusBehavior(CaptureStartFocusBehavior focusBehavior); + attribute EventHandler oncapturedmousechange; +}; + +[Exposed=Window] +interface CapturedMouseEvent : Event { + constructor(DOMString type, optional CapturedMouseEventInit eventInitDict = {}); + readonly attribute long surfaceX; + readonly attribute long surfaceY; +}; + +dictionary CapturedMouseEventInit : EventInit { + long surfaceX = -1; + long surfaceY = -1; +}; diff --git a/test/wpt/tests/interfaces/clipboard-apis.idl b/test/wpt/tests/interfaces/clipboard-apis.idl new file mode 100644 index 0000000..3f2c9ba --- /dev/null +++ b/test/wpt/tests/interfaces/clipboard-apis.idl @@ -0,0 +1,51 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Clipboard API and events (https://w3c.github.io/clipboard-apis/) + +dictionary ClipboardEventInit : EventInit { + DataTransfer? clipboardData = null; +}; + +[Exposed=Window] +interface ClipboardEvent : Event { + constructor(DOMString type, optional ClipboardEventInit eventInitDict = {}); + readonly attribute DataTransfer? clipboardData; +}; + +partial interface Navigator { + [SecureContext, SameObject] readonly attribute Clipboard clipboard; +}; + +typedef Promise<(DOMString or Blob)> ClipboardItemData; + +[SecureContext, Exposed=Window] +interface ClipboardItem { + constructor(record items, + optional ClipboardItemOptions options = {}); + + readonly attribute PresentationStyle presentationStyle; + readonly attribute FrozenArray types; + + Promise getType(DOMString type); +}; + +enum PresentationStyle { "unspecified", "inline", "attachment" }; + +dictionary ClipboardItemOptions { + PresentationStyle presentationStyle = "unspecified"; +}; + +typedef sequence ClipboardItems; + +[SecureContext, Exposed=Window] +interface Clipboard : EventTarget { + Promise read(); + Promise readText(); + Promise write(ClipboardItems data); + Promise writeText(DOMString data); +}; + +dictionary ClipboardPermissionDescriptor : PermissionDescriptor { + boolean allowWithoutGesture = false; +}; diff --git a/test/wpt/tests/interfaces/close-watcher.idl b/test/wpt/tests/interfaces/close-watcher.idl new file mode 100644 index 0000000..de7940c --- /dev/null +++ b/test/wpt/tests/interfaces/close-watcher.idl @@ -0,0 +1,19 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Close Watcher API (https://wicg.github.io/close-watcher/) + +[Exposed=Window] +interface CloseWatcher : EventTarget { + constructor(optional CloseWatcherOptions options = {}); + + undefined destroy(); + undefined close(); + + attribute EventHandler oncancel; + attribute EventHandler onclose; +}; + +dictionary CloseWatcherOptions { + AbortSignal signal; +}; diff --git a/test/wpt/tests/interfaces/compat.idl b/test/wpt/tests/interfaces/compat.idl new file mode 100644 index 0000000..8106c2d --- /dev/null +++ b/test/wpt/tests/interfaces/compat.idl @@ -0,0 +1,13 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Compatibility Standard (https://compat.spec.whatwg.org/) + +partial interface Window { + readonly attribute short orientation; + attribute EventHandler onorientationchange; +}; + +partial interface HTMLBodyElement { + attribute EventHandler onorientationchange; +}; diff --git a/test/wpt/tests/interfaces/compression.idl b/test/wpt/tests/interfaces/compression.idl new file mode 100644 index 0000000..7525d7c --- /dev/null +++ b/test/wpt/tests/interfaces/compression.idl @@ -0,0 +1,22 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Compression Streams (https://wicg.github.io/compression/) + +enum CompressionFormat { + "deflate", + "deflate-raw", + "gzip", +}; + +[Exposed=*] +interface CompressionStream { + constructor(CompressionFormat format); +}; +CompressionStream includes GenericTransformStream; + +[Exposed=*] +interface DecompressionStream { + constructor(CompressionFormat format); +}; +DecompressionStream includes GenericTransformStream; diff --git a/test/wpt/tests/interfaces/compute-pressure.idl b/test/wpt/tests/interfaces/compute-pressure.idl new file mode 100644 index 0000000..3e35dc4 --- /dev/null +++ b/test/wpt/tests/interfaces/compute-pressure.idl @@ -0,0 +1,37 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Compute Pressure Level 1 (https://w3c.github.io/compute-pressure/) + +enum PressureSource { "thermals", "cpu" }; + +enum PressureState { "nominal", "fair", "serious", "critical" }; + +callback PressureUpdateCallback = undefined ( + sequence changes, + PressureObserver observer +); + +[Exposed=(DedicatedWorker,SharedWorker,Window), SecureContext] +interface PressureObserver { + constructor(PressureUpdateCallback callback, optional PressureObserverOptions options = {}); + + Promise observe(PressureSource source); + undefined unobserve(PressureSource source); + undefined disconnect(); + sequence takeRecords(); + + [SameObject] static readonly attribute FrozenArray supportedSources; +}; + +[Exposed=(DedicatedWorker,SharedWorker,Window), SecureContext] +interface PressureRecord { + readonly attribute PressureSource source; + readonly attribute PressureState state; + readonly attribute DOMHighResTimeStamp time; + [Default] object toJSON(); +}; + +dictionary PressureObserverOptions { + double sampleRate = 1.0; +}; diff --git a/test/wpt/tests/interfaces/console.idl b/test/wpt/tests/interfaces/console.idl new file mode 100644 index 0000000..fdf1d0d --- /dev/null +++ b/test/wpt/tests/interfaces/console.idl @@ -0,0 +1,34 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Console Standard (https://console.spec.whatwg.org/) + +[Exposed=*] +namespace console { // but see namespace object requirements below + // Logging + undefined assert(optional boolean condition = false, any... data); + undefined clear(); + undefined debug(any... data); + undefined error(any... data); + undefined info(any... data); + undefined log(any... data); + undefined table(optional any tabularData, optional sequence properties); + undefined trace(any... data); + undefined warn(any... data); + undefined dir(optional any item, optional object? options); + undefined dirxml(any... data); + + // Counting + undefined count(optional DOMString label = "default"); + undefined countReset(optional DOMString label = "default"); + + // Grouping + undefined group(any... data); + undefined groupCollapsed(any... data); + undefined groupEnd(); + + // Timing + undefined time(optional DOMString label = "default"); + undefined timeLog(optional DOMString label = "default", any... data); + undefined timeEnd(optional DOMString label = "default"); +}; diff --git a/test/wpt/tests/interfaces/contact-picker.idl b/test/wpt/tests/interfaces/contact-picker.idl new file mode 100644 index 0000000..0119d0e --- /dev/null +++ b/test/wpt/tests/interfaces/contact-picker.idl @@ -0,0 +1,44 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Contact Picker API (https://w3c.github.io/contact-picker/) + +[Exposed=Window] +partial interface Navigator { + [SecureContext, SameObject] readonly attribute ContactsManager contacts; +}; + +enum ContactProperty { "address", "email", "icon", "name", "tel" }; + +[Exposed=Window] +interface ContactAddress { + [Default] object toJSON(); + readonly attribute DOMString city; + readonly attribute DOMString country; + readonly attribute DOMString dependentLocality; + readonly attribute DOMString organization; + readonly attribute DOMString phone; + readonly attribute DOMString postalCode; + readonly attribute DOMString recipient; + readonly attribute DOMString region; + readonly attribute DOMString sortingCode; + readonly attribute FrozenArray addressLine; +}; + +dictionary ContactInfo { + sequence address; + sequence email; + sequence icon; + sequence name; + sequence tel; +}; + +dictionary ContactsSelectOptions { + boolean multiple = false; +}; + +[Exposed=Window,SecureContext] +interface ContactsManager { + Promise> getProperties(); + Promise> select(sequence properties, optional ContactsSelectOptions options = {}); +}; diff --git a/test/wpt/tests/interfaces/content-index.idl b/test/wpt/tests/interfaces/content-index.idl new file mode 100644 index 0000000..177c5b9 --- /dev/null +++ b/test/wpt/tests/interfaces/content-index.idl @@ -0,0 +1,46 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Content Index (https://wicg.github.io/content-index/spec/) + +partial interface ServiceWorkerGlobalScope { + attribute EventHandler oncontentdelete; +}; + +partial interface ServiceWorkerRegistration { + [SameObject] readonly attribute ContentIndex index; +}; + +enum ContentCategory { + "", + "homepage", + "article", + "video", + "audio", +}; + +dictionary ContentDescription { + required DOMString id; + required DOMString title; + required DOMString description; + ContentCategory category = ""; + sequence icons = []; + required USVString url; +}; + +[Exposed=(Window,Worker)] +interface ContentIndex { + Promise add(ContentDescription description); + Promise delete(DOMString id); + Promise> getAll(); +}; + +dictionary ContentIndexEventInit : ExtendableEventInit { + required DOMString id; +}; + +[Exposed=ServiceWorker] +interface ContentIndexEvent : ExtendableEvent { + constructor(DOMString type, ContentIndexEventInit init); + readonly attribute DOMString id; +}; diff --git a/test/wpt/tests/interfaces/cookie-store.idl b/test/wpt/tests/interfaces/cookie-store.idl new file mode 100644 index 0000000..f44b4c6 --- /dev/null +++ b/test/wpt/tests/interfaces/cookie-store.idl @@ -0,0 +1,110 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Cookie Store API (https://wicg.github.io/cookie-store/) + +[Exposed=(ServiceWorker,Window), + SecureContext] +interface CookieStore : EventTarget { + Promise get(USVString name); + Promise get(optional CookieStoreGetOptions options = {}); + + Promise getAll(USVString name); + Promise getAll(optional CookieStoreGetOptions options = {}); + + Promise set(USVString name, USVString value); + Promise set(CookieInit options); + + Promise delete(USVString name); + Promise delete(CookieStoreDeleteOptions options); + + [Exposed=Window] + attribute EventHandler onchange; +}; + +dictionary CookieStoreGetOptions { + USVString name; + USVString url; +}; + +enum CookieSameSite { + "strict", + "lax", + "none" +}; + +dictionary CookieInit { + required USVString name; + required USVString value; + DOMHighResTimeStamp? expires = null; + USVString? domain = null; + USVString path = "/"; + CookieSameSite sameSite = "strict"; +}; + +dictionary CookieStoreDeleteOptions { + required USVString name; + USVString? domain = null; + USVString path = "/"; +}; + +dictionary CookieListItem { + USVString name; + USVString value; + USVString? domain; + USVString path; + DOMHighResTimeStamp? expires; + boolean secure; + CookieSameSite sameSite; +}; + +typedef sequence CookieList; + +[Exposed=(ServiceWorker,Window), + SecureContext] +interface CookieStoreManager { + Promise subscribe(sequence subscriptions); + Promise> getSubscriptions(); + Promise unsubscribe(sequence subscriptions); +}; + +[Exposed=(ServiceWorker,Window)] +partial interface ServiceWorkerRegistration { + [SameObject] readonly attribute CookieStoreManager cookies; +}; + +[Exposed=Window, + SecureContext] +interface CookieChangeEvent : Event { + constructor(DOMString type, optional CookieChangeEventInit eventInitDict = {}); + [SameObject] readonly attribute FrozenArray changed; + [SameObject] readonly attribute FrozenArray deleted; +}; + +dictionary CookieChangeEventInit : EventInit { + CookieList changed; + CookieList deleted; +}; + +[Exposed=ServiceWorker] +interface ExtendableCookieChangeEvent : ExtendableEvent { + constructor(DOMString type, optional ExtendableCookieChangeEventInit eventInitDict = {}); + [SameObject] readonly attribute FrozenArray changed; + [SameObject] readonly attribute FrozenArray deleted; +}; + +dictionary ExtendableCookieChangeEventInit : ExtendableEventInit { + CookieList changed; + CookieList deleted; +}; + +[SecureContext] +partial interface Window { + [SameObject] readonly attribute CookieStore cookieStore; +}; + +partial interface ServiceWorkerGlobalScope { + [SameObject] readonly attribute CookieStore cookieStore; + + attribute EventHandler oncookiechange; +}; diff --git a/test/wpt/tests/interfaces/credential-management.idl b/test/wpt/tests/interfaces/credential-management.idl new file mode 100644 index 0000000..e9fab13 --- /dev/null +++ b/test/wpt/tests/interfaces/credential-management.idl @@ -0,0 +1,105 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Credential Management Level 1 (https://w3c.github.io/webappsec-credential-management/) + +[Exposed=Window, SecureContext] +interface Credential { + readonly attribute USVString id; + readonly attribute DOMString type; + static Promise isConditionalMediationAvailable(); +}; + +[SecureContext] +interface mixin CredentialUserData { + readonly attribute USVString name; + readonly attribute USVString iconURL; +}; + +partial interface Navigator { + [SecureContext, SameObject] readonly attribute CredentialsContainer credentials; +}; + +[Exposed=Window, SecureContext] +interface CredentialsContainer { + Promise get(optional CredentialRequestOptions options = {}); + Promise store(Credential credential); + Promise create(optional CredentialCreationOptions options = {}); + Promise preventSilentAccess(); +}; + +dictionary CredentialData { + required USVString id; +}; + +dictionary CredentialRequestOptions { + CredentialMediationRequirement mediation = "optional"; + AbortSignal signal; +}; + +enum CredentialMediationRequirement { + "silent", + "optional", + "conditional", + "required" +}; + +dictionary CredentialCreationOptions { + AbortSignal signal; +}; + +[Exposed=Window, + SecureContext] +interface PasswordCredential : Credential { + constructor(HTMLFormElement form); + constructor(PasswordCredentialData data); + readonly attribute USVString password; +}; +PasswordCredential includes CredentialUserData; + +partial dictionary CredentialRequestOptions { + boolean password = false; +}; + +dictionary PasswordCredentialData : CredentialData { + USVString name; + USVString iconURL; + required USVString origin; + required USVString password; +}; + +typedef (PasswordCredentialData or HTMLFormElement) PasswordCredentialInit; + +partial dictionary CredentialCreationOptions { + PasswordCredentialInit password; +}; + +[Exposed=Window, + SecureContext] +interface FederatedCredential : Credential { + constructor(FederatedCredentialInit data); + readonly attribute USVString provider; + readonly attribute DOMString? protocol; +}; +FederatedCredential includes CredentialUserData; + +dictionary FederatedCredentialRequestOptions { + sequence providers; + sequence protocols; +}; + +partial dictionary CredentialRequestOptions { + FederatedCredentialRequestOptions federated; +}; + +dictionary FederatedCredentialInit : CredentialData { + USVString name; + USVString iconURL; + required USVString origin; + required USVString provider; + DOMString protocol; +}; + +partial dictionary CredentialCreationOptions { + FederatedCredentialInit federated; +}; diff --git a/test/wpt/tests/interfaces/csp-embedded-enforcement.idl b/test/wpt/tests/interfaces/csp-embedded-enforcement.idl new file mode 100644 index 0000000..a980630 --- /dev/null +++ b/test/wpt/tests/interfaces/csp-embedded-enforcement.idl @@ -0,0 +1,8 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Content Security Policy: Embedded Enforcement (https://w3c.github.io/webappsec-cspee/) + +partial interface HTMLIFrameElement { + [CEReactions] attribute DOMString csp; +}; diff --git a/test/wpt/tests/interfaces/csp-next.idl b/test/wpt/tests/interfaces/csp-next.idl new file mode 100644 index 0000000..d94b36c --- /dev/null +++ b/test/wpt/tests/interfaces/csp-next.idl @@ -0,0 +1,21 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: Scripting Policy (https://wicg.github.io/csp-next/scripting-policy.html) + +enum ScriptingPolicyViolationType { + "externalScript", + "inlineScript", + "inlineEventHandler", + "eval" +}; + +[Exposed=(Window,Worker), SecureContext] +interface ScriptingPolicyReportBody : ReportBody { + [Default] object toJSON(); + readonly attribute DOMString violationType; + readonly attribute USVString? violationURL; + readonly attribute USVString? violationSample; + readonly attribute unsigned long lineno; + readonly attribute unsigned long colno; +}; diff --git a/test/wpt/tests/interfaces/css-anchor-position.idl b/test/wpt/tests/interfaces/css-anchor-position.idl new file mode 100644 index 0000000..c5da3f4 --- /dev/null +++ b/test/wpt/tests/interfaces/css-anchor-position.idl @@ -0,0 +1,11 @@ +// Source: CSS Anchor Positioning (https://drafts.csswg.org/css-anchor-position-1/) + +[Exposed=Window] +interface CSSPositionFallbackRule : CSSGroupingRule { + readonly attribute CSSOMString name; +}; + +[Exposed=Window] +interface CSSTryRule : CSSRule { + [SameObject, PutForwards=cssText] readonly attribute CSSStyleDeclaration style; +}; diff --git a/test/wpt/tests/interfaces/css-animation-worklet.idl b/test/wpt/tests/interfaces/css-animation-worklet.idl new file mode 100644 index 0000000..82d34a3 --- /dev/null +++ b/test/wpt/tests/interfaces/css-animation-worklet.idl @@ -0,0 +1,37 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Animation Worklet API (https://drafts.css-houdini.org/css-animationworklet-1/) + +[Exposed=Window] +partial namespace CSS { + [SameObject] readonly attribute Worklet animationWorklet; +}; + +[ Global=(Worklet,AnimationWorklet), Exposed=AnimationWorklet ] +interface AnimationWorkletGlobalScope : WorkletGlobalScope { + undefined registerAnimator(DOMString name, AnimatorInstanceConstructor animatorCtor); +}; + +callback AnimatorInstanceConstructor = any (any options, optional any state); + +[ Exposed=AnimationWorklet ] +interface WorkletAnimationEffect { + EffectTiming getTiming(); + ComputedEffectTiming getComputedTiming(); + attribute double? localTime; +}; + +[Exposed=Window] +interface WorkletAnimation : Animation { + constructor(DOMString animatorName, + optional (AnimationEffect or sequence)? effects = null, + optional AnimationTimeline? timeline, + optional any options); + readonly attribute DOMString animatorName; +}; + +[Exposed=AnimationWorklet] +interface WorkletGroupEffect { + sequence getChildren(); +}; diff --git a/test/wpt/tests/interfaces/css-animations-2.idl b/test/wpt/tests/interfaces/css-animations-2.idl new file mode 100644 index 0000000..84f138e --- /dev/null +++ b/test/wpt/tests/interfaces/css-animations-2.idl @@ -0,0 +1,9 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Animations Level 2 (https://drafts.csswg.org/css-animations-2/) + +[Exposed=Window] +interface CSSAnimation : Animation { + readonly attribute CSSOMString animationName; +}; diff --git a/test/wpt/tests/interfaces/css-animations.idl b/test/wpt/tests/interfaces/css-animations.idl new file mode 100644 index 0000000..6620e01 --- /dev/null +++ b/test/wpt/tests/interfaces/css-animations.idl @@ -0,0 +1,47 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Animations Level 1 (https://drafts.csswg.org/css-animations-1/) + +[Exposed=Window] +interface AnimationEvent : Event { + constructor(CSSOMString type, optional AnimationEventInit animationEventInitDict = {}); + readonly attribute CSSOMString animationName; + readonly attribute double elapsedTime; + readonly attribute CSSOMString pseudoElement; +}; +dictionary AnimationEventInit : EventInit { + CSSOMString animationName = ""; + double elapsedTime = 0.0; + CSSOMString pseudoElement = ""; +}; + +partial interface CSSRule { + const unsigned short KEYFRAMES_RULE = 7; + const unsigned short KEYFRAME_RULE = 8; +}; + +[Exposed=Window] +interface CSSKeyframeRule : CSSRule { + attribute CSSOMString keyText; + [SameObject, PutForwards=cssText] readonly attribute CSSStyleDeclaration style; +}; + +[Exposed=Window] +interface CSSKeyframesRule : CSSRule { + attribute CSSOMString name; + readonly attribute CSSRuleList cssRules; + readonly attribute unsigned long length; + + getter CSSKeyframeRule (unsigned long index); + undefined appendRule(CSSOMString rule); + undefined deleteRule(CSSOMString select); + CSSKeyframeRule? findRule(CSSOMString select); +}; + +partial interface mixin GlobalEventHandlers { + attribute EventHandler onanimationstart; + attribute EventHandler onanimationiteration; + attribute EventHandler onanimationend; + attribute EventHandler onanimationcancel; +}; diff --git a/test/wpt/tests/interfaces/css-cascade-6.idl b/test/wpt/tests/interfaces/css-cascade-6.idl new file mode 100644 index 0000000..3bdf6ba --- /dev/null +++ b/test/wpt/tests/interfaces/css-cascade-6.idl @@ -0,0 +1,10 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Cascading and Inheritance Level 6 (https://drafts.csswg.org/css-cascade-6/) + +[Exposed=Window] +interface CSSScopeRule : CSSGroupingRule { + readonly attribute CSSOMString? start; + readonly attribute CSSOMString? end; +}; diff --git a/test/wpt/tests/interfaces/css-cascade.idl b/test/wpt/tests/interfaces/css-cascade.idl new file mode 100644 index 0000000..0dd9969 --- /dev/null +++ b/test/wpt/tests/interfaces/css-cascade.idl @@ -0,0 +1,14 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Cascading and Inheritance Level 5 (https://drafts.csswg.org/css-cascade-5/) + +[Exposed=Window] +interface CSSLayerBlockRule : CSSGroupingRule { + readonly attribute CSSOMString name; +}; + +[Exposed=Window] +interface CSSLayerStatementRule : CSSRule { + readonly attribute FrozenArray nameList; +}; diff --git a/test/wpt/tests/interfaces/css-color-5.idl b/test/wpt/tests/interfaces/css-color-5.idl new file mode 100644 index 0000000..6f5c6df --- /dev/null +++ b/test/wpt/tests/interfaces/css-color-5.idl @@ -0,0 +1,12 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Color Module Level 5 (https://drafts.csswg.org/css-color-5/) + +[Exposed=Window] +interface CSSColorProfileRule : CSSRule { + readonly attribute CSSOMString name ; + readonly attribute CSSOMString src ; + readonly attribute CSSOMString renderingIntent ; + readonly attribute CSSOMString components ; +}; diff --git a/test/wpt/tests/interfaces/css-conditional.idl b/test/wpt/tests/interfaces/css-conditional.idl new file mode 100644 index 0000000..d87f305 --- /dev/null +++ b/test/wpt/tests/interfaces/css-conditional.idl @@ -0,0 +1,27 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Conditional Rules Module Level 3 (https://drafts.csswg.org/css-conditional-3/) + +partial interface CSSRule { + const unsigned short SUPPORTS_RULE = 12; +}; + +[Exposed=Window] +interface CSSConditionRule : CSSGroupingRule { + readonly attribute CSSOMString conditionText; +}; + +[Exposed=Window] +interface CSSMediaRule : CSSConditionRule { + [SameObject, PutForwards=mediaText] readonly attribute MediaList media; +}; + +[Exposed=Window] +interface CSSSupportsRule : CSSConditionRule { +}; + +partial namespace CSS { + boolean supports(CSSOMString property, CSSOMString value); + boolean supports(CSSOMString conditionText); +}; diff --git a/test/wpt/tests/interfaces/css-contain-3.idl b/test/wpt/tests/interfaces/css-contain-3.idl new file mode 100644 index 0000000..0ecf380 --- /dev/null +++ b/test/wpt/tests/interfaces/css-contain-3.idl @@ -0,0 +1,10 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Containment Module Level 3 (https://drafts.csswg.org/css-contain-3/) + +[Exposed=Window] +interface CSSContainerRule : CSSConditionRule { + readonly attribute CSSOMString containerName; + readonly attribute CSSOMString containerQuery; +}; diff --git a/test/wpt/tests/interfaces/css-contain.idl b/test/wpt/tests/interfaces/css-contain.idl new file mode 100644 index 0000000..be2137a --- /dev/null +++ b/test/wpt/tests/interfaces/css-contain.idl @@ -0,0 +1,13 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Containment Module Level 2 (https://drafts.csswg.org/css-contain-2/) + +[Exposed=Window] +interface ContentVisibilityAutoStateChangeEvent : Event { + constructor(DOMString type, optional ContentVisibilityAutoStateChangeEventInit eventInitDict = {}); + readonly attribute boolean skipped; +}; +dictionary ContentVisibilityAutoStateChangeEventInit : EventInit { + boolean skipped = false; +}; diff --git a/test/wpt/tests/interfaces/css-counter-styles.idl b/test/wpt/tests/interfaces/css-counter-styles.idl new file mode 100644 index 0000000..f679e0f --- /dev/null +++ b/test/wpt/tests/interfaces/css-counter-styles.idl @@ -0,0 +1,23 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Counter Styles Level 3 (https://drafts.csswg.org/css-counter-styles-3/) + +partial interface CSSRule { + const unsigned short COUNTER_STYLE_RULE = 11; +}; + +[Exposed=Window] +interface CSSCounterStyleRule : CSSRule { + attribute CSSOMString name; + attribute CSSOMString system; + attribute CSSOMString symbols; + attribute CSSOMString additiveSymbols; + attribute CSSOMString negative; + attribute CSSOMString prefix; + attribute CSSOMString suffix; + attribute CSSOMString range; + attribute CSSOMString pad; + attribute CSSOMString speakAs; + attribute CSSOMString fallback; +}; diff --git a/test/wpt/tests/interfaces/css-font-loading.idl b/test/wpt/tests/interfaces/css-font-loading.idl new file mode 100644 index 0000000..6f2e16d --- /dev/null +++ b/test/wpt/tests/interfaces/css-font-loading.idl @@ -0,0 +1,134 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Font Loading Module Level 3 (https://drafts.csswg.org/css-font-loading-3/) + +typedef (ArrayBuffer or ArrayBufferView) BinaryData; + +dictionary FontFaceDescriptors { + CSSOMString style = "normal"; + CSSOMString weight = "normal"; + CSSOMString stretch = "normal"; + CSSOMString unicodeRange = "U+0-10FFFF"; + CSSOMString variant = "normal"; + CSSOMString featureSettings = "normal"; + CSSOMString variationSettings = "normal"; + CSSOMString display = "auto"; + CSSOMString ascentOverride = "normal"; + CSSOMString descentOverride = "normal"; + CSSOMString lineGapOverride = "normal"; +}; + +enum FontFaceLoadStatus { "unloaded", "loading", "loaded", "error" }; + +[Exposed=(Window,Worker)] +interface FontFace { + constructor(CSSOMString family, (CSSOMString or BinaryData) source, + optional FontFaceDescriptors descriptors = {}); + attribute CSSOMString family; + attribute CSSOMString style; + attribute CSSOMString weight; + attribute CSSOMString stretch; + attribute CSSOMString unicodeRange; + attribute CSSOMString variant; + attribute CSSOMString featureSettings; + attribute CSSOMString variationSettings; + attribute CSSOMString display; + attribute CSSOMString ascentOverride; + attribute CSSOMString descentOverride; + attribute CSSOMString lineGapOverride; + + readonly attribute FontFaceLoadStatus status; + + Promise load(); + readonly attribute Promise loaded; +}; + +[Exposed=(Window,Worker)] +interface FontFaceFeatures { + /* The CSSWG is still discussing what goes in here */ +}; + +[Exposed=(Window,Worker)] +interface FontFaceVariationAxis { + readonly attribute DOMString name; + readonly attribute DOMString axisTag; + readonly attribute double minimumValue; + readonly attribute double maximumValue; + readonly attribute double defaultValue; +}; + +[Exposed=(Window,Worker)] +interface FontFaceVariations { + readonly setlike; +}; + +[Exposed=(Window,Worker)] +interface FontFacePalette { + iterable; + readonly attribute unsigned long length; + getter DOMString (unsigned long index); + readonly attribute boolean usableWithLightBackground; + readonly attribute boolean usableWithDarkBackground; +}; + +[Exposed=(Window,Worker)] +interface FontFacePalettes { + iterable; + readonly attribute unsigned long length; + getter FontFacePalette (unsigned long index); +}; + +partial interface FontFace { + readonly attribute FontFaceFeatures features; + readonly attribute FontFaceVariations variations; + readonly attribute FontFacePalettes palettes; +}; + +dictionary FontFaceSetLoadEventInit : EventInit { + sequence fontfaces = []; +}; + +[Exposed=(Window,Worker)] +interface FontFaceSetLoadEvent : Event { + constructor(CSSOMString type, optional FontFaceSetLoadEventInit eventInitDict = {}); + [SameObject] readonly attribute FrozenArray fontfaces; +}; + +enum FontFaceSetLoadStatus { "loading", "loaded" }; + +[Exposed=(Window,Worker)] +interface FontFaceSet : EventTarget { + constructor(sequence initialFaces); + + setlike; + FontFaceSet add(FontFace font); + boolean delete(FontFace font); + undefined clear(); + + // events for when loading state changes + attribute EventHandler onloading; + attribute EventHandler onloadingdone; + attribute EventHandler onloadingerror; + + // check and start loads if appropriate + // and fulfill promise when all loads complete + Promise> load(CSSOMString font, optional CSSOMString text = " "); + + // return whether all fonts in the fontlist are loaded + // (does not initiate load if not available) + boolean check(CSSOMString font, optional CSSOMString text = " "); + + // async notification that font loading and layout operations are done + readonly attribute Promise ready; + + // loading state, "loading" while one or more fonts loading, "loaded" otherwise + readonly attribute FontFaceSetLoadStatus status; +}; + +interface mixin FontFaceSource { + readonly attribute FontFaceSet fonts; +}; + +Document includes FontFaceSource; +WorkerGlobalScope includes FontFaceSource; diff --git a/test/wpt/tests/interfaces/css-fonts.idl b/test/wpt/tests/interfaces/css-fonts.idl new file mode 100644 index 0000000..eddfc02 --- /dev/null +++ b/test/wpt/tests/interfaces/css-fonts.idl @@ -0,0 +1,36 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Fonts Module Level 4 (https://drafts.csswg.org/css-fonts-4/) + +[Exposed=Window] +interface CSSFontFaceRule : CSSRule { + readonly attribute CSSStyleDeclaration style; +}; + +partial interface CSSRule { const unsigned short FONT_FEATURE_VALUES_RULE = 14; +}; +[Exposed=Window] +interface CSSFontFeatureValuesRule : CSSRule { + attribute CSSOMString fontFamily; + readonly attribute CSSFontFeatureValuesMap annotation; + readonly attribute CSSFontFeatureValuesMap ornaments; + readonly attribute CSSFontFeatureValuesMap stylistic; + readonly attribute CSSFontFeatureValuesMap swash; + readonly attribute CSSFontFeatureValuesMap characterVariant; + readonly attribute CSSFontFeatureValuesMap styleset; +}; + +[Exposed=Window] +interface CSSFontFeatureValuesMap { + maplike>; + undefined set(CSSOMString featureValueName, + (unsigned long or sequence) values); +}; + +[Exposed=Window]interface CSSFontPaletteValuesRule : CSSRule { + readonly attribute CSSOMString name; + readonly attribute CSSOMString fontFamily; + readonly attribute CSSOMString basePalette; + readonly attribute CSSOMString overrideColors; +}; diff --git a/test/wpt/tests/interfaces/css-highlight-api.idl b/test/wpt/tests/interfaces/css-highlight-api.idl new file mode 100644 index 0000000..f3c6b2e --- /dev/null +++ b/test/wpt/tests/interfaces/css-highlight-api.idl @@ -0,0 +1,27 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Custom Highlight API Module Level 1 (https://drafts.csswg.org/css-highlight-api-1/) + +enum HighlightType { + "highlight", + "spelling-error", + "grammar-error" +}; + +[Exposed=Window] +interface Highlight { + constructor(AbstractRange... initialRanges); + setlike; + attribute long priority; + attribute HighlightType type; +}; + +partial namespace CSS { + readonly attribute HighlightRegistry highlights; +}; + +[Exposed=Window] +interface HighlightRegistry { + maplike; +}; diff --git a/test/wpt/tests/interfaces/css-images-4.idl b/test/wpt/tests/interfaces/css-images-4.idl new file mode 100644 index 0000000..8866b00 --- /dev/null +++ b/test/wpt/tests/interfaces/css-images-4.idl @@ -0,0 +1,8 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Images Module Level 4 (https://drafts.csswg.org/css-images-4/) + +partial namespace CSS { + [SameObject] readonly attribute any elementSources; +}; diff --git a/test/wpt/tests/interfaces/css-layout-api.idl b/test/wpt/tests/interfaces/css-layout-api.idl new file mode 100644 index 0000000..2b772d5 --- /dev/null +++ b/test/wpt/tests/interfaces/css-layout-api.idl @@ -0,0 +1,144 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Layout API Level 1 (https://drafts.css-houdini.org/css-layout-api-1/) + +partial namespace CSS { + [SameObject] readonly attribute Worklet layoutWorklet; +}; + +[Global=(Worklet,LayoutWorklet),Exposed=LayoutWorklet] +interface LayoutWorkletGlobalScope : WorkletGlobalScope { + undefined registerLayout(DOMString name, VoidFunction layoutCtor); +}; + +dictionary LayoutOptions { + ChildDisplayType childDisplay = "block"; + LayoutSizingMode sizing = "block-like"; +}; + +enum ChildDisplayType { + "block", // default - "blockifies" the child boxes. + "normal", +}; + +enum LayoutSizingMode { + "block-like", // default - Sizing behaves like block containers. + "manual", // Sizing is specified by the web developer. +}; + +[Exposed=LayoutWorklet] +interface LayoutChild { + readonly attribute StylePropertyMapReadOnly styleMap; + + Promise intrinsicSizes(); + Promise layoutNextFragment(LayoutConstraintsOptions constraints, ChildBreakToken breakToken); +}; + +[Exposed=LayoutWorklet] +interface LayoutFragment { + readonly attribute double inlineSize; + readonly attribute double blockSize; + + attribute double inlineOffset; + attribute double blockOffset; + + readonly attribute any data; + + readonly attribute ChildBreakToken? breakToken; +}; + +[Exposed=LayoutWorklet] +interface IntrinsicSizes { + readonly attribute double minContentSize; + readonly attribute double maxContentSize; +}; + +[Exposed=LayoutWorklet] +interface LayoutConstraints { + readonly attribute double availableInlineSize; + readonly attribute double availableBlockSize; + + readonly attribute double? fixedInlineSize; + readonly attribute double? fixedBlockSize; + + readonly attribute double percentageInlineSize; + readonly attribute double percentageBlockSize; + + readonly attribute double? blockFragmentationOffset; + readonly attribute BlockFragmentationType blockFragmentationType; + + readonly attribute any data; +}; + +enum BlockFragmentationType { "none", "page", "column", "region" }; + +dictionary LayoutConstraintsOptions { + double availableInlineSize; + double availableBlockSize; + + double fixedInlineSize; + double fixedBlockSize; + + double percentageInlineSize; + double percentageBlockSize; + + double blockFragmentationOffset; + BlockFragmentationType blockFragmentationType = "none"; + + any data; +}; + +[Exposed=LayoutWorklet] +interface ChildBreakToken { + readonly attribute BreakType breakType; + readonly attribute LayoutChild child; +}; + +[Exposed=LayoutWorklet] +interface BreakToken { + readonly attribute FrozenArray childBreakTokens; + readonly attribute any data; +}; + +dictionary BreakTokenOptions { + sequence childBreakTokens; + any data = null; +}; + +enum BreakType { "none", "line", "column", "page", "region" }; + +[Exposed=LayoutWorklet] +interface LayoutEdges { + readonly attribute double inlineStart; + readonly attribute double inlineEnd; + + readonly attribute double blockStart; + readonly attribute double blockEnd; + + // Convenience attributes for the sum in one direction. + readonly attribute double inline; + readonly attribute double block; +}; + +// This is the final return value from the author defined layout() method. +dictionary FragmentResultOptions { + double inlineSize = 0; + double blockSize = 0; + double autoBlockSize = 0; + sequence childFragments = []; + any data = null; + BreakTokenOptions breakToken = null; +}; + +[Exposed=LayoutWorklet] +interface FragmentResult { + constructor(optional FragmentResultOptions options = {}); + readonly attribute double inlineSize; + readonly attribute double blockSize; +}; + +dictionary IntrinsicSizesResultOptions { + double maxContentSize; + double minContentSize; +}; diff --git a/test/wpt/tests/interfaces/css-masking.idl b/test/wpt/tests/interfaces/css-masking.idl new file mode 100644 index 0000000..72fbd9a --- /dev/null +++ b/test/wpt/tests/interfaces/css-masking.idl @@ -0,0 +1,20 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Masking Module Level 1 (https://drafts.fxtf.org/css-masking-1/) + +[Exposed=Window] +interface SVGClipPathElement : SVGElement { + readonly attribute SVGAnimatedEnumeration clipPathUnits; + readonly attribute SVGAnimatedTransformList transform; +}; + +[Exposed=Window] +interface SVGMaskElement : SVGElement { + readonly attribute SVGAnimatedEnumeration maskUnits; + readonly attribute SVGAnimatedEnumeration maskContentUnits; + readonly attribute SVGAnimatedLength x; + readonly attribute SVGAnimatedLength y; + readonly attribute SVGAnimatedLength width; + readonly attribute SVGAnimatedLength height; +}; diff --git a/test/wpt/tests/interfaces/css-nav.idl b/test/wpt/tests/interfaces/css-nav.idl new file mode 100644 index 0000000..03f039e --- /dev/null +++ b/test/wpt/tests/interfaces/css-nav.idl @@ -0,0 +1,48 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Spatial Navigation Level 1 (https://drafts.csswg.org/css-nav-1/) + +enum SpatialNavigationDirection { + "up", + "down", + "left", + "right", +}; + +partial interface Window { + undefined navigate(SpatialNavigationDirection dir); +}; + +enum FocusableAreaSearchMode { + "visible", + "all" +}; + +dictionary FocusableAreasOption { + FocusableAreaSearchMode mode; +}; + +dictionary SpatialNavigationSearchOptions { + sequence? candidates; + Node? container; +}; + +partial interface Element { + Node getSpatialNavigationContainer(); + sequence focusableAreas(optional FocusableAreasOption option = {}); + Node? spatialNavigationSearch(SpatialNavigationDirection dir, optional SpatialNavigationSearchOptions options = {}); +}; + +[Exposed=Window] +interface NavigationEvent : UIEvent { + constructor(DOMString type, + optional NavigationEventInit eventInitDict = {}); + readonly attribute SpatialNavigationDirection dir; + readonly attribute EventTarget? relatedTarget; +}; + +dictionary NavigationEventInit : UIEventInit { + SpatialNavigationDirection dir; + EventTarget? relatedTarget = null; +}; diff --git a/test/wpt/tests/interfaces/css-nesting.idl b/test/wpt/tests/interfaces/css-nesting.idl new file mode 100644 index 0000000..01f27ab --- /dev/null +++ b/test/wpt/tests/interfaces/css-nesting.idl @@ -0,0 +1,10 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Nesting Module (https://drafts.csswg.org/css-nesting-1/) + +partial interface CSSStyleRule { + [SameObject] readonly attribute CSSRuleList cssRules; + unsigned long insertRule(CSSOMString rule, optional unsigned long index = 0); + undefined deleteRule(unsigned long index); +}; diff --git a/test/wpt/tests/interfaces/css-paint-api.idl b/test/wpt/tests/interfaces/css-paint-api.idl new file mode 100644 index 0000000..0924c53 --- /dev/null +++ b/test/wpt/tests/interfaces/css-paint-api.idl @@ -0,0 +1,39 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Painting API Level 1 (https://drafts.css-houdini.org/css-paint-api-1/) + +partial namespace CSS { + [SameObject] readonly attribute Worklet paintWorklet; +}; + +[Global=(Worklet,PaintWorklet),Exposed=PaintWorklet] +interface PaintWorkletGlobalScope : WorkletGlobalScope { + undefined registerPaint(DOMString name, VoidFunction paintCtor); + readonly attribute unrestricted double devicePixelRatio; +}; + +dictionary PaintRenderingContext2DSettings { + boolean alpha = true; +}; + +[Exposed=PaintWorklet] +interface PaintRenderingContext2D { +}; +PaintRenderingContext2D includes CanvasState; +PaintRenderingContext2D includes CanvasTransform; +PaintRenderingContext2D includes CanvasCompositing; +PaintRenderingContext2D includes CanvasImageSmoothing; +PaintRenderingContext2D includes CanvasFillStrokeStyles; +PaintRenderingContext2D includes CanvasShadowStyles; +PaintRenderingContext2D includes CanvasRect; +PaintRenderingContext2D includes CanvasDrawPath; +PaintRenderingContext2D includes CanvasDrawImage; +PaintRenderingContext2D includes CanvasPathDrawingStyles; +PaintRenderingContext2D includes CanvasPath; + +[Exposed=PaintWorklet] +interface PaintSize { + readonly attribute double width; + readonly attribute double height; +}; diff --git a/test/wpt/tests/interfaces/css-parser-api.idl b/test/wpt/tests/interfaces/css-parser-api.idl new file mode 100644 index 0000000..4e34a3f --- /dev/null +++ b/test/wpt/tests/interfaces/css-parser-api.idl @@ -0,0 +1,76 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Parser API (https://wicg.github.io/css-parser-api/) + +typedef (DOMString or ReadableStream) CSSStringSource; +typedef (DOMString or CSSStyleValue or CSSParserValue) CSSToken; + +partial namespace CSS { + Promise> parseStylesheet(CSSStringSource css, optional CSSParserOptions options = {}); + Promise> parseRuleList(CSSStringSource css, optional CSSParserOptions options = {}); + Promise parseRule(CSSStringSource css, optional CSSParserOptions options = {}); + Promise> parseDeclarationList(CSSStringSource css, optional CSSParserOptions options = {}); + CSSParserDeclaration parseDeclaration(DOMString css, optional CSSParserOptions options = {}); + CSSToken parseValue(DOMString css); + sequence parseValueList(DOMString css); + sequence> parseCommaValueList(DOMString css); +}; + +dictionary CSSParserOptions { + object atRules; + /* dict of at-rule name => at-rule type + (contains decls or contains qualified rules) */ +}; + +[Exposed=Window] +interface CSSParserRule { + /* Just a superclass. */ +}; + +[Exposed=Window] +interface CSSParserAtRule : CSSParserRule { + constructor(DOMString name, sequence prelude, optional sequence? body); + readonly attribute DOMString name; + readonly attribute FrozenArray prelude; + readonly attribute FrozenArray? body; + /* nullable to handle at-statements */ + stringifier; +}; + +[Exposed=Window] +interface CSSParserQualifiedRule : CSSParserRule { + constructor(sequence prelude, optional sequence? body); + readonly attribute FrozenArray prelude; + readonly attribute FrozenArray body; + stringifier; +}; + +[Exposed=Window] +interface CSSParserDeclaration : CSSParserRule { + constructor(DOMString name, optional sequence body); + readonly attribute DOMString name; + readonly attribute FrozenArray body; + stringifier; +}; + +[Exposed=Window] +interface CSSParserValue { + /* Just a superclass. */ +}; + +[Exposed=Window] +interface CSSParserBlock : CSSParserValue { + constructor(DOMString name, sequence body); + readonly attribute DOMString name; /* "[]", "{}", or "()" */ + readonly attribute FrozenArray body; + stringifier; +}; + +[Exposed=Window] +interface CSSParserFunction : CSSParserValue { + constructor(DOMString name, sequence> args); + readonly attribute DOMString name; + readonly attribute FrozenArray> args; + stringifier; +}; diff --git a/test/wpt/tests/interfaces/css-properties-values-api.idl b/test/wpt/tests/interfaces/css-properties-values-api.idl new file mode 100644 index 0000000..eb7d7b0 --- /dev/null +++ b/test/wpt/tests/interfaces/css-properties-values-api.idl @@ -0,0 +1,23 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Properties and Values API Level 1 (https://drafts.css-houdini.org/css-properties-values-api-1/) + +dictionary PropertyDefinition { + required DOMString name; + DOMString syntax = "*"; + required boolean inherits; + DOMString initialValue; +}; + +partial namespace CSS { + undefined registerProperty(PropertyDefinition definition); +}; + +[Exposed=Window] +interface CSSPropertyRule : CSSRule { + readonly attribute CSSOMString name; + readonly attribute CSSOMString syntax; + readonly attribute boolean inherits; + readonly attribute CSSOMString? initialValue; +}; diff --git a/test/wpt/tests/interfaces/css-pseudo.idl b/test/wpt/tests/interfaces/css-pseudo.idl new file mode 100644 index 0000000..dbe4c54 --- /dev/null +++ b/test/wpt/tests/interfaces/css-pseudo.idl @@ -0,0 +1,16 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Pseudo-Elements Module Level 4 (https://drafts.csswg.org/css-pseudo-4/) + +[Exposed=Window] +interface CSSPseudoElement : EventTarget { + readonly attribute CSSOMString type; + readonly attribute Element element; + readonly attribute (Element or CSSPseudoElement) parent; + CSSPseudoElement? pseudo(CSSOMString type); +}; + +partial interface Element { + CSSPseudoElement? pseudo(CSSOMString type); +}; diff --git a/test/wpt/tests/interfaces/css-regions.idl b/test/wpt/tests/interfaces/css-regions.idl new file mode 100644 index 0000000..113438f --- /dev/null +++ b/test/wpt/tests/interfaces/css-regions.idl @@ -0,0 +1,29 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Regions Module Level 1 (https://drafts.csswg.org/css-regions-1/) + +partial interface Document { + readonly attribute NamedFlowMap namedFlows; +}; + +[Exposed=Window] interface NamedFlowMap { + maplike; +}; + +[Exposed=Window] +interface NamedFlow : EventTarget { + readonly attribute CSSOMString name; + readonly attribute boolean overset; + sequence getRegions(); + readonly attribute short firstEmptyRegionIndex; + sequence getContent(); + sequence getRegionsByContent(Node node); +}; + +interface mixin Region { + readonly attribute CSSOMString regionOverset; + sequence? getRegionFlowRanges(); +}; + +Element includes Region; diff --git a/test/wpt/tests/interfaces/css-shadow-parts.idl b/test/wpt/tests/interfaces/css-shadow-parts.idl new file mode 100644 index 0000000..3759199 --- /dev/null +++ b/test/wpt/tests/interfaces/css-shadow-parts.idl @@ -0,0 +1,8 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Shadow Parts (https://drafts.csswg.org/css-shadow-parts-1/) + +partial interface Element { + [SameObject, PutForwards=value] readonly attribute DOMTokenList part; +}; diff --git a/test/wpt/tests/interfaces/css-toggle.tentative.idl b/test/wpt/tests/interfaces/css-toggle.tentative.idl new file mode 100644 index 0000000..5587019 --- /dev/null +++ b/test/wpt/tests/interfaces/css-toggle.tentative.idl @@ -0,0 +1,51 @@ +partial interface Element { + [SameObject] readonly attribute CSSToggleMap toggles; +}; + +interface CSSToggleMap { + maplike; + CSSToggleMap set(DOMString key, CSSToggle value); +}; + +interface CSSToggle { + attribute (unsigned long or DOMString) value; + attribute unsigned long? valueAsNumber; + attribute DOMString? valueAsString; + + attribute (unsigned long or FrozenArray) states; + attribute boolean group; + attribute CSSToggleScope scope; + attribute CSSToggleCycle cycle; + + constructor(optional CSSToggleData options); +}; + +dictionary CSSToggleData { + (unsigned long or DOMString) value = 0; + (unsigned long or sequence) states = 1; + boolean group = false; + CSSToggleScope scope = "wide"; + CSSToggleCycle cycle = "cycle"; +}; + +enum CSSToggleScope { + "narrow", + "wide", +}; + +enum CSSToggleCycle { + "cycle", + "cycle-on", + "sticky", +}; + +interface CSSToggleEvent : Event { + constructor(DOMString type, optional CSSToggleEventInit eventInitDict = {}); + readonly attribute DOMString toggleName; + readonly attribute CSSToggle? toggle; +}; + +dictionary CSSToggleEventInit : EventInit { + DOMString toggleName = ""; + CSSToggle? toggle = null; +}; diff --git a/test/wpt/tests/interfaces/css-transitions-2.idl b/test/wpt/tests/interfaces/css-transitions-2.idl new file mode 100644 index 0000000..9d06f3c --- /dev/null +++ b/test/wpt/tests/interfaces/css-transitions-2.idl @@ -0,0 +1,9 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Transitions Level 2 (https://drafts.csswg.org/css-transitions-2/) + +[Exposed=Window] +interface CSSTransition : Animation { + readonly attribute CSSOMString transitionProperty; +}; diff --git a/test/wpt/tests/interfaces/css-transitions.idl b/test/wpt/tests/interfaces/css-transitions.idl new file mode 100644 index 0000000..0f00b2c --- /dev/null +++ b/test/wpt/tests/interfaces/css-transitions.idl @@ -0,0 +1,25 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Transitions (https://drafts.csswg.org/css-transitions-1/) + +[Exposed=Window] +interface TransitionEvent : Event { + constructor(CSSOMString type, optional TransitionEventInit transitionEventInitDict = {}); + readonly attribute CSSOMString propertyName; + readonly attribute double elapsedTime; + readonly attribute CSSOMString pseudoElement; +}; + +dictionary TransitionEventInit : EventInit { + CSSOMString propertyName = ""; + double elapsedTime = 0.0; + CSSOMString pseudoElement = ""; +}; + +partial interface mixin GlobalEventHandlers { + attribute EventHandler ontransitionrun; + attribute EventHandler ontransitionstart; + attribute EventHandler ontransitionend; + attribute EventHandler ontransitioncancel; +}; diff --git a/test/wpt/tests/interfaces/css-typed-om.idl b/test/wpt/tests/interfaces/css-typed-om.idl new file mode 100644 index 0000000..0df6a03 --- /dev/null +++ b/test/wpt/tests/interfaces/css-typed-om.idl @@ -0,0 +1,423 @@ +// GENERATED CONTENT - DO NOT EDIT +// Content was automatically extracted by Reffy into webref +// (https://github.com/w3c/webref) +// Source: CSS Typed OM Level 1 (https://drafts.css-houdini.org/css-typed-om-1/) + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSStyleValue { + stringifier; + [Exposed=Window] static CSSStyleValue parse(USVString property, USVString cssText); + [Exposed=Window] static sequence parseAll(USVString property, USVString cssText); +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface StylePropertyMapReadOnly { + iterable>; + (undefined or CSSStyleValue) get(USVString property); + sequence getAll(USVString property); + boolean has(USVString property); + readonly attribute unsigned long size; +}; + +[Exposed=Window] +interface StylePropertyMap : StylePropertyMapReadOnly { + undefined set(USVString property, (CSSStyleValue or USVString)... values); + undefined append(USVString property, (CSSStyleValue or USVString)... values); + undefined delete(USVString property); + undefined clear(); +}; + +partial interface Element { + [SameObject] StylePropertyMapReadOnly computedStyleMap(); +}; + +partial interface CSSStyleRule { + [SameObject] readonly attribute StylePropertyMap styleMap; +}; + +partial interface mixin ElementCSSInlineStyle { + [SameObject] readonly attribute StylePropertyMap attributeStyleMap; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSUnparsedValue : CSSStyleValue { + constructor(sequence members); + iterable; + readonly attribute unsigned long length; + getter CSSUnparsedSegment (unsigned long index); + setter CSSUnparsedSegment (unsigned long index, CSSUnparsedSegment val); +}; + +typedef (USVString or CSSVariableReferenceValue) CSSUnparsedSegment; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSVariableReferenceValue { + constructor(USVString variable, optional CSSUnparsedValue? fallback = null); + attribute USVString variable; + readonly attribute CSSUnparsedValue? fallback; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSKeywordValue : CSSStyleValue { + constructor(USVString value); + attribute USVString value; +}; + +typedef (DOMString or CSSKeywordValue) CSSKeywordish; + +typedef (double or CSSNumericValue) CSSNumberish; + +enum CSSNumericBaseType { + "length", + "angle", + "time", + "frequency", + "resolution", + "flex", + "percent", +}; + +dictionary CSSNumericType { + long length; + long angle; + long time; + long frequency; + long resolution; + long flex; + long percent; + CSSNumericBaseType percentHint; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSNumericValue : CSSStyleValue { + CSSNumericValue add(CSSNumberish... values); + CSSNumericValue sub(CSSNumberish... values); + CSSNumericValue mul(CSSNumberish... values); + CSSNumericValue div(CSSNumberish... values); + CSSNumericValue min(CSSNumberish... values); + CSSNumericValue max(CSSNumberish... values); + + boolean equals(CSSNumberish... value); + + CSSUnitValue to(USVString unit); + CSSMathSum toSum(USVString... units); + CSSNumericType type(); + + [Exposed=Window] static CSSNumericValue parse(USVString cssText); +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSUnitValue : CSSNumericValue { + constructor(double value, USVString unit); + attribute double value; + readonly attribute USVString unit; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSMathValue : CSSNumericValue { + readonly attribute CSSMathOperator operator; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSMathSum : CSSMathValue { + constructor(CSSNumberish... args); + readonly attribute CSSNumericArray values; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSMathProduct : CSSMathValue { + constructor(CSSNumberish... args); + readonly attribute CSSNumericArray values; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSMathNegate : CSSMathValue { + constructor(CSSNumberish arg); + readonly attribute CSSNumericValue value; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSMathInvert : CSSMathValue { + constructor(CSSNumberish arg); + readonly attribute CSSNumericValue value; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSMathMin : CSSMathValue { + constructor(CSSNumberish... args); + readonly attribute CSSNumericArray values; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSMathMax : CSSMathValue { + constructor(CSSNumberish... args); + readonly attribute CSSNumericArray values; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSMathClamp : CSSMathValue { + constructor(CSSNumberish lower, CSSNumberish value, CSSNumberish upper); + readonly attribute CSSNumericValue lower; + readonly attribute CSSNumericValue value; + readonly attribute CSSNumericValue upper; +}; + +[Exposed=(Window, Worker, PaintWorklet, LayoutWorklet)] +interface CSSNumericArray { + iterable; + readonly attribute unsigned long length; + getter CSSNumericValue (unsigned long index); +}; + +enum CSSMathOperator { + "sum", + "product", + "negate", + "invert", + "min", + "max", + "clamp", +}; + +partial namespace CSS { + CSSUnitValue number(double value); + CSSUnitValue percent(double value); + + // + CSSUnitValue em(double value); + CSSUnitValue ex(double value); + CSSUnitValue ch(double value); + CSSUnitValue ic(double value); + CSSUnitValue rem(double value); + CSSUnitValue lh(double value); + CSSUnitValue rlh(double value); + CSSUnitValue vw(double value); + CSSUnitValue vh(double value); + CSSUnitValue vi(double value); + CSSUnitValue vb(double value); + CSSUnitValue vmin(double value); + CSSUnitValue vmax(double value); + CSSUnitValue svw(double value); + CSSUnitValue svh(double value); + CSSUnitValue svi(double value); + CSSUnitValue svb(double value); + CSSUnitValue svmin(double value); + CSSUnitValue svmax(double value); + CSSUnitValue lvw(double value); + CSSUnitValue lvh(double value); + CSSUnitValue lvi(double value); + CSSUnitValue lvb(double value); + CSSUnitValue lvmin(double value); + CSSUnitValue lvmax(double value); + CSSUnitValue dvw(double value); + CSSUnitValue dvh(double value); + CSSUnitValue dvi(double value); + CSSUnitValue dvb(double value); + CSSUnitValue dvmin(double value); + CSSUnitValue dvmax(double value); + CSSUnitValue cqw(double value); + CSSUnitValue cqh(double value); + CSSUnitValue cqi(double value); + CSSUnitValue cqb(double value); + CSSUnitValue cqmin(double value); + CSSUnitValue cqmax(double value); + CSSUnitValue cm(double value); + CSSUnitValue mm(double value); + CSSUnitValue Q(double value); + CSSUnitValue in(double value); + CSSUnitValue pt(double value); + CSSUnitValue pc(double value); + CSSUnitValue px(double value); + + // + CSSUnitValue deg(double value); + CSSUnitValue grad(double value); + CSSUnitValue rad(double value); + CSSUnitValue turn(double value); + + //