summaryrefslogtreecommitdiffstats
path: root/test/node-fetch
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-21 20:56:19 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-21 20:56:19 +0000
commit0b6210cd37b68b94252cb798598b12974a20e1c1 (patch)
treee371686554a877842d95aa94f100bee552ff2a8e /test/node-fetch
parentInitial commit. (diff)
downloadnode-undici-0b6210cd37b68b94252cb798598b12974a20e1c1.tar.xz
node-undici-0b6210cd37b68b94252cb798598b12974a20e1c1.zip
Adding upstream version 5.28.2+dfsg1+~cs23.11.12.3.upstream/5.28.2+dfsg1+_cs23.11.12.3upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'test/node-fetch')
-rw-r--r--test/node-fetch/LICENSE22
-rw-r--r--test/node-fetch/headers.js282
-rw-r--r--test/node-fetch/main.js1661
-rw-r--r--test/node-fetch/mock.js112
-rw-r--r--test/node-fetch/request.js281
-rw-r--r--test/node-fetch/response.js251
-rw-r--r--test/node-fetch/utils/chai-timeout.js15
-rw-r--r--test/node-fetch/utils/dummy.txt1
-rw-r--r--test/node-fetch/utils/read-stream.js9
-rw-r--r--test/node-fetch/utils/server.js467
10 files changed, 3101 insertions, 0 deletions
diff --git a/test/node-fetch/LICENSE b/test/node-fetch/LICENSE
new file mode 100644
index 0000000..41ca1b6
--- /dev/null
+++ b/test/node-fetch/LICENSE
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 - 2020 Node Fetch Team
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/test/node-fetch/headers.js b/test/node-fetch/headers.js
new file mode 100644
index 0000000..e509fd8
--- /dev/null
+++ b/test/node-fetch/headers.js
@@ -0,0 +1,282 @@
+/* eslint no-unused-expressions: "off" */
+
+const { format } = require('util')
+const chai = require('chai')
+const chaiIterator = require('chai-iterator')
+const { Headers } = require('../../lib/fetch/headers.js')
+
+chai.use(chaiIterator)
+
+const { expect } = chai
+
+describe('Headers', () => {
+ it('should have attributes conforming to Web IDL', () => {
+ const headers = new Headers()
+ expect(Object.getOwnPropertyNames(headers)).to.be.empty
+ const enumerableProperties = []
+
+ for (const property in headers) {
+ enumerableProperties.push(property)
+ }
+
+ for (const toCheck of [
+ 'append',
+ 'delete',
+ 'entries',
+ 'forEach',
+ 'get',
+ 'has',
+ 'keys',
+ 'set',
+ 'values'
+ ]) {
+ expect(enumerableProperties).to.contain(toCheck)
+ }
+ })
+
+ it('should allow iterating through all headers with forEach', () => {
+ const headers = new Headers([
+ ['b', '2'],
+ ['c', '4'],
+ ['b', '3'],
+ ['a', '1']
+ ])
+ expect(headers).to.have.property('forEach')
+
+ const result = []
+ for (const [key, value] of headers.entries()) {
+ result.push([key, value])
+ }
+
+ expect(result).to.deep.equal([
+ ['a', '1'],
+ ['b', '2, 3'],
+ ['c', '4']
+ ])
+ })
+
+ it('should be iterable with forEach', () => {
+ const headers = new Headers()
+ headers.append('Accept', 'application/json')
+ headers.append('Accept', 'text/plain')
+ headers.append('Content-Type', 'text/html')
+
+ const results = []
+ headers.forEach((value, key, object) => {
+ results.push({ value, key, object })
+ })
+
+ expect(results.length).to.equal(2)
+ expect({ key: 'accept', value: 'application/json, text/plain', object: headers }).to.deep.equal(results[0])
+ expect({ key: 'content-type', value: 'text/html', object: headers }).to.deep.equal(results[1])
+ })
+
+ xit('should set "this" to undefined by default on forEach', () => {
+ const headers = new Headers({ Accept: 'application/json' })
+ headers.forEach(function () {
+ expect(this).to.be.undefined
+ })
+ })
+
+ it('should accept thisArg as a second argument for forEach', () => {
+ const headers = new Headers({ Accept: 'application/json' })
+ const thisArg = {}
+ headers.forEach(function () {
+ expect(this).to.equal(thisArg)
+ }, thisArg)
+ })
+
+ it('should allow iterating through all headers with for-of loop', () => {
+ const headers = new Headers([
+ ['b', '2'],
+ ['c', '4'],
+ ['a', '1']
+ ])
+ headers.append('b', '3')
+ expect(headers).to.be.iterable
+
+ const result = []
+ for (const pair of headers) {
+ result.push(pair)
+ }
+
+ expect(result).to.deep.equal([
+ ['a', '1'],
+ ['b', '2, 3'],
+ ['c', '4']
+ ])
+ })
+
+ it('should allow iterating through all headers with entries()', () => {
+ const headers = new Headers([
+ ['b', '2'],
+ ['c', '4'],
+ ['a', '1']
+ ])
+ headers.append('b', '3')
+
+ expect(headers.entries()).to.be.iterable
+ .and.to.deep.iterate.over([
+ ['a', '1'],
+ ['b', '2, 3'],
+ ['c', '4']
+ ])
+ })
+
+ it('should allow iterating through all headers with keys()', () => {
+ const headers = new Headers([
+ ['b', '2'],
+ ['c', '4'],
+ ['a', '1']
+ ])
+ headers.append('b', '3')
+
+ expect(headers.keys()).to.be.iterable
+ .and.to.iterate.over(['a', 'b', 'c'])
+ })
+
+ it('should allow iterating through all headers with values()', () => {
+ const headers = new Headers([
+ ['b', '2'],
+ ['c', '4'],
+ ['a', '1']
+ ])
+ headers.append('b', '3')
+
+ expect(headers.values()).to.be.iterable
+ .and.to.iterate.over(['1', '2, 3', '4'])
+ })
+
+ it('should reject illegal header', () => {
+ const headers = new Headers()
+ expect(() => new Headers({ 'He y': 'ok' })).to.throw(TypeError)
+ expect(() => new Headers({ 'Hé-y': 'ok' })).to.throw(TypeError)
+ expect(() => new Headers({ 'He-y': 'ăk' })).to.throw(TypeError)
+ expect(() => headers.append('Hé-y', 'ok')).to.throw(TypeError)
+ expect(() => headers.delete('Hé-y')).to.throw(TypeError)
+ expect(() => headers.get('Hé-y')).to.throw(TypeError)
+ expect(() => headers.has('Hé-y')).to.throw(TypeError)
+ expect(() => headers.set('Hé-y', 'ok')).to.throw(TypeError)
+ // Should reject empty header
+ expect(() => headers.append('', 'ok')).to.throw(TypeError)
+ })
+
+ xit('should ignore unsupported attributes while reading headers', () => {
+ const FakeHeader = function () {}
+ // Prototypes are currently ignored
+ // This might change in the future: #181
+ FakeHeader.prototype.z = 'fake'
+
+ const res = new FakeHeader()
+ res.a = 'string'
+ res.b = ['1', '2']
+ res.c = ''
+ res.d = []
+ res.e = 1
+ res.f = [1, 2]
+ res.g = { a: 1 }
+ res.h = undefined
+ res.i = null
+ res.j = Number.NaN
+ res.k = true
+ res.l = false
+ res.m = Buffer.from('test')
+
+ const h1 = new Headers(res)
+ h1.set('n', [1, 2])
+ h1.append('n', ['3', 4])
+
+ const h1Raw = h1.raw()
+
+ expect(h1Raw.a).to.include('string')
+ expect(h1Raw.b).to.include('1,2')
+ expect(h1Raw.c).to.include('')
+ expect(h1Raw.d).to.include('')
+ expect(h1Raw.e).to.include('1')
+ expect(h1Raw.f).to.include('1,2')
+ expect(h1Raw.g).to.include('[object Object]')
+ expect(h1Raw.h).to.include('undefined')
+ expect(h1Raw.i).to.include('null')
+ expect(h1Raw.j).to.include('NaN')
+ expect(h1Raw.k).to.include('true')
+ expect(h1Raw.l).to.include('false')
+ expect(h1Raw.m).to.include('test')
+ expect(h1Raw.n).to.include('1,2')
+ expect(h1Raw.n).to.include('3,4')
+
+ expect(h1Raw.z).to.be.undefined
+ })
+
+ xit('should wrap headers', () => {
+ const h1 = new Headers({
+ a: '1'
+ })
+ const h1Raw = h1.raw()
+
+ const h2 = new Headers(h1)
+ h2.set('b', '1')
+ const h2Raw = h2.raw()
+
+ const h3 = new Headers(h2)
+ h3.append('a', '2')
+ const h3Raw = h3.raw()
+
+ expect(h1Raw.a).to.include('1')
+ expect(h1Raw.a).to.not.include('2')
+
+ expect(h2Raw.a).to.include('1')
+ expect(h2Raw.a).to.not.include('2')
+ expect(h2Raw.b).to.include('1')
+
+ expect(h3Raw.a).to.include('1')
+ expect(h3Raw.a).to.include('2')
+ expect(h3Raw.b).to.include('1')
+ })
+
+ it('should accept headers as an iterable of tuples', () => {
+ let headers
+
+ headers = new Headers([
+ ['a', '1'],
+ ['b', '2'],
+ ['a', '3']
+ ])
+ expect(headers.get('a')).to.equal('1, 3')
+ expect(headers.get('b')).to.equal('2')
+
+ headers = new Headers([
+ new Set(['a', '1']),
+ ['b', '2'],
+ new Map([['a', null], ['3', null]]).keys()
+ ])
+ expect(headers.get('a')).to.equal('1, 3')
+ expect(headers.get('b')).to.equal('2')
+
+ headers = new Headers(new Map([
+ ['a', '1'],
+ ['b', '2']
+ ]))
+ expect(headers.get('a')).to.equal('1')
+ expect(headers.get('b')).to.equal('2')
+ })
+
+ it('should throw a TypeError if non-tuple exists in a headers initializer', () => {
+ expect(() => new Headers([['b', '2', 'huh?']])).to.throw(TypeError)
+ expect(() => new Headers(['b2'])).to.throw(TypeError)
+ expect(() => new Headers('b2')).to.throw(TypeError)
+ expect(() => new Headers({ [Symbol.iterator]: 42 })).to.throw(TypeError)
+ })
+
+ xit('should use a custom inspect function', () => {
+ const headers = new Headers([
+ ['Host', 'thehost'],
+ ['Host', 'notthehost'],
+ ['a', '1'],
+ ['b', '2'],
+ ['a', '3']
+ ])
+
+ // eslint-disable-next-line quotes
+ expect(format(headers)).to.equal("{ a: [ '1', '3' ], b: '2', host: 'thehost' }")
+ })
+})
diff --git a/test/node-fetch/main.js b/test/node-fetch/main.js
new file mode 100644
index 0000000..358a969
--- /dev/null
+++ b/test/node-fetch/main.js
@@ -0,0 +1,1661 @@
+/* eslint no-unused-expressions: "off" */
+/* globals AbortController */
+
+// Test tools
+const zlib = require('zlib')
+const stream = require('stream')
+const vm = require('vm')
+const chai = require('chai')
+const crypto = require('crypto')
+const chaiPromised = require('chai-as-promised')
+const chaiIterator = require('chai-iterator')
+const chaiString = require('chai-string')
+const delay = require('delay')
+const { Blob } = require('buffer')
+
+const {
+ fetch,
+ Headers,
+ Request,
+ FormData,
+ Response,
+ setGlobalDispatcher,
+ Agent
+} = require('../../index.js')
+const HeadersOrig = require('../../lib/fetch/headers.js').Headers
+const RequestOrig = require('../../lib/fetch/request.js').Request
+const ResponseOrig = require('../../lib/fetch/response.js').Response
+const TestServer = require('./utils/server.js')
+const chaiTimeout = require('./utils/chai-timeout.js')
+const { ReadableStream } = require('stream/web')
+
+function isNodeLowerThan (version) {
+ return !~process.version.localeCompare(version, undefined, { numeric: true })
+}
+
+const {
+ Uint8Array: VMUint8Array
+} = vm.runInNewContext('this')
+
+chai.use(chaiPromised)
+chai.use(chaiIterator)
+chai.use(chaiString)
+chai.use(chaiTimeout)
+const { expect } = chai
+
+describe('node-fetch', () => {
+ const local = new TestServer()
+ let base
+
+ before(async () => {
+ await local.start()
+ setGlobalDispatcher(new Agent({
+ connect: {
+ rejectUnauthorized: false
+ }
+ }))
+ base = `http://${local.hostname}:${local.port}/`
+ })
+
+ after(async () => {
+ return local.stop()
+ })
+
+ it('should return a promise', () => {
+ const url = `${base}hello`
+ const p = fetch(url)
+ expect(p).to.be.an.instanceof(Promise)
+ expect(p).to.have.property('then')
+ })
+
+ it('should expose Headers, Response and Request constructors', () => {
+ expect(Headers).to.equal(HeadersOrig)
+ expect(Response).to.equal(ResponseOrig)
+ expect(Request).to.equal(RequestOrig)
+ })
+
+ it('should support proper toString output for Headers, Response and Request objects', () => {
+ expect(new Headers().toString()).to.equal('[object Headers]')
+ expect(new Response().toString()).to.equal('[object Response]')
+ expect(new Request(base).toString()).to.equal('[object Request]')
+ })
+
+ it('should reject with error if url is protocol relative', () => {
+ const url = '//example.com/'
+ return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError)
+ })
+
+ it('should reject with error if url is relative path', () => {
+ const url = '/some/path'
+ return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError)
+ })
+
+ it('should reject with error if protocol is unsupported', () => {
+ const url = 'ftp://example.com/'
+ return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError)
+ })
+
+ it('should reject with error on network failure', function () {
+ this.timeout(5000)
+ const url = 'http://localhost:50000/'
+ return expect(fetch(url)).to.eventually.be.rejected
+ .and.be.an.instanceOf(TypeError)
+ })
+
+ it('should resolve into response', () => {
+ const url = `${base}hello`
+ return fetch(url).then(res => {
+ expect(res).to.be.an.instanceof(Response)
+ expect(res.headers).to.be.an.instanceof(Headers)
+ expect(res.body).to.be.an.instanceof(ReadableStream)
+ expect(res.bodyUsed).to.be.false
+
+ expect(res.url).to.equal(url)
+ expect(res.ok).to.be.true
+ expect(res.status).to.equal(200)
+ expect(res.statusText).to.equal('OK')
+ })
+ })
+
+ it('Response.redirect should resolve into response', () => {
+ const res = Response.redirect('http://localhost')
+ expect(res).to.be.an.instanceof(Response)
+ expect(res.headers).to.be.an.instanceof(Headers)
+ expect(res.headers.get('location')).to.equal('http://localhost/')
+ expect(res.status).to.equal(302)
+ })
+
+ it('Response.redirect /w invalid url should fail', () => {
+ expect(() => {
+ Response.redirect('localhost')
+ }).to.throw()
+ })
+
+ it('Response.redirect /w invalid status should fail', () => {
+ expect(() => {
+ Response.redirect('http://localhost', 200)
+ }).to.throw()
+ })
+
+ it('should accept plain text response', () => {
+ const url = `${base}plain`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ return res.text().then(result => {
+ expect(res.bodyUsed).to.be.true
+ expect(result).to.be.a('string')
+ expect(result).to.equal('text')
+ })
+ })
+ })
+
+ it('should accept html response (like plain text)', () => {
+ const url = `${base}html`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/html')
+ return res.text().then(result => {
+ expect(res.bodyUsed).to.be.true
+ expect(result).to.be.a('string')
+ expect(result).to.equal('<html></html>')
+ })
+ })
+ })
+
+ it('should accept json response', () => {
+ const url = `${base}json`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('application/json')
+ return res.json().then(result => {
+ expect(res.bodyUsed).to.be.true
+ expect(result).to.be.an('object')
+ expect(result).to.deep.equal({ name: 'value' })
+ })
+ })
+ })
+
+ it('should send request with custom headers', () => {
+ const url = `${base}inspect`
+ const options = {
+ headers: { 'x-custom-header': 'abc' }
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.headers['x-custom-header']).to.equal('abc')
+ })
+ })
+
+ it('should send request with custom headers array', () => {
+ const url = `${base}inspect`
+ const options = {
+ headers: { 'x-custom-header': ['abc'] }
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.headers['x-custom-header']).to.equal('abc')
+ })
+ })
+
+ it('should send request with multi-valued headers', () => {
+ const url = `${base}inspect`
+ const options = {
+ headers: { 'x-custom-header': ['abc', '123'] }
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.headers['x-custom-header']).to.equal('abc,123')
+ })
+ })
+
+ it('should accept headers instance', () => {
+ const url = `${base}inspect`
+ const options = {
+ headers: new Headers({ 'x-custom-header': 'abc' })
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.headers['x-custom-header']).to.equal('abc')
+ })
+ })
+
+ it('should follow redirect code 301', () => {
+ const url = `${base}redirect/301`
+ return fetch(url).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ expect(res.ok).to.be.true
+ })
+ })
+
+ it('should follow redirect code 302', () => {
+ const url = `${base}redirect/302`
+ return fetch(url).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ })
+ })
+
+ it('should follow redirect code 303', () => {
+ const url = `${base}redirect/303`
+ return fetch(url).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ })
+ })
+
+ it('should follow redirect code 307', () => {
+ const url = `${base}redirect/307`
+ return fetch(url).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ })
+ })
+
+ it('should follow redirect code 308', () => {
+ const url = `${base}redirect/308`
+ return fetch(url).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ })
+ })
+
+ it('should follow redirect chain', () => {
+ const url = `${base}redirect/chain`
+ return fetch(url).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ })
+ })
+
+ it('should follow POST request redirect code 301 with GET', () => {
+ const url = `${base}redirect/301`
+ const options = {
+ method: 'POST',
+ body: 'a=1'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ return res.json().then(result => {
+ expect(result.method).to.equal('GET')
+ expect(result.body).to.equal('')
+ })
+ })
+ })
+
+ it('should follow PATCH request redirect code 301 with PATCH', () => {
+ const url = `${base}redirect/301`
+ const options = {
+ method: 'PATCH',
+ body: 'a=1'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ return res.json().then(res => {
+ expect(res.method).to.equal('PATCH')
+ expect(res.body).to.equal('a=1')
+ })
+ })
+ })
+
+ it('should follow POST request redirect code 302 with GET', () => {
+ const url = `${base}redirect/302`
+ const options = {
+ method: 'POST',
+ body: 'a=1'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ return res.json().then(result => {
+ expect(result.method).to.equal('GET')
+ expect(result.body).to.equal('')
+ })
+ })
+ })
+
+ it('should follow PATCH request redirect code 302 with PATCH', () => {
+ const url = `${base}redirect/302`
+ const options = {
+ method: 'PATCH',
+ body: 'a=1'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ return res.json().then(res => {
+ expect(res.method).to.equal('PATCH')
+ expect(res.body).to.equal('a=1')
+ })
+ })
+ })
+
+ it('should follow redirect code 303 with GET', () => {
+ const url = `${base}redirect/303`
+ const options = {
+ method: 'PUT',
+ body: 'a=1'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ return res.json().then(result => {
+ expect(result.method).to.equal('GET')
+ expect(result.body).to.equal('')
+ })
+ })
+ })
+
+ it('should follow PATCH request redirect code 307 with PATCH', () => {
+ const url = `${base}redirect/307`
+ const options = {
+ method: 'PATCH',
+ body: 'a=1'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ return res.json().then(result => {
+ expect(result.method).to.equal('PATCH')
+ expect(result.body).to.equal('a=1')
+ })
+ })
+ })
+
+ it('should not follow non-GET redirect if body is a readable stream', () => {
+ const url = `${base}redirect/307`
+ const options = {
+ method: 'PATCH',
+ body: stream.Readable.from('tada')
+ }
+ return expect(fetch(url, options)).to.eventually.be.rejected
+ .and.be.an.instanceOf(TypeError)
+ })
+
+ it('should obey maximum redirect, reject case', () => {
+ const url = `${base}redirect/chain/20`
+ return expect(fetch(url)).to.eventually.be.rejected
+ .and.be.an.instanceOf(TypeError)
+ })
+
+ it('should obey redirect chain, resolve case', () => {
+ const url = `${base}redirect/chain/19`
+ return fetch(url).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ expect(res.status).to.equal(200)
+ })
+ })
+
+ it('should support redirect mode, error flag', () => {
+ const url = `${base}redirect/301`
+ const options = {
+ redirect: 'error'
+ }
+ return expect(fetch(url, options)).to.eventually.be.rejected
+ .and.be.an.instanceOf(TypeError)
+ })
+
+ it('should support redirect mode, manual flag when there is no redirect', () => {
+ const url = `${base}hello`
+ const options = {
+ redirect: 'manual'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.url).to.equal(url)
+ expect(res.status).to.equal(200)
+ expect(res.headers.get('location')).to.be.null
+ })
+ })
+
+ it('should follow redirect code 301 and keep existing headers', () => {
+ const url = `${base}redirect/301`
+ const options = {
+ headers: new Headers({ 'x-custom-header': 'abc' })
+ }
+ return fetch(url, options).then(res => {
+ expect(res.url).to.equal(`${base}inspect`)
+ return res.json()
+ }).then(res => {
+ expect(res.headers['x-custom-header']).to.equal('abc')
+ })
+ })
+
+ it('should treat broken redirect as ordinary response (follow)', () => {
+ const url = `${base}redirect/no-location`
+ return fetch(url).then(res => {
+ expect(res.url).to.equal(url)
+ expect(res.status).to.equal(301)
+ expect(res.headers.get('location')).to.be.null
+ })
+ })
+
+ it('should treat broken redirect as ordinary response (manual)', () => {
+ const url = `${base}redirect/no-location`
+ const options = {
+ redirect: 'manual'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.url).to.equal(url)
+ expect(res.status).to.equal(301)
+ expect(res.headers.get('location')).to.be.null
+ })
+ })
+
+ it('should throw a TypeError on an invalid redirect option', () => {
+ const url = `${base}redirect/301`
+ const options = {
+ redirect: 'foobar'
+ }
+ return fetch(url, options).then(() => {
+ expect.fail()
+ }, error => {
+ expect(error).to.be.an.instanceOf(TypeError)
+ })
+ })
+
+ it('should set redirected property on response when redirect', () => {
+ const url = `${base}redirect/301`
+ return fetch(url).then(res => {
+ expect(res.redirected).to.be.true
+ })
+ })
+
+ it('should not set redirected property on response without redirect', () => {
+ const url = `${base}hello`
+ return fetch(url).then(res => {
+ expect(res.redirected).to.be.false
+ })
+ })
+
+ it('should handle client-error response', () => {
+ const url = `${base}error/400`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ expect(res.status).to.equal(400)
+ expect(res.statusText).to.equal('Bad Request')
+ expect(res.ok).to.be.false
+ return res.text().then(result => {
+ expect(res.bodyUsed).to.be.true
+ expect(result).to.be.a('string')
+ expect(result).to.equal('client error')
+ })
+ })
+ })
+
+ it('should handle server-error response', () => {
+ const url = `${base}error/500`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ expect(res.status).to.equal(500)
+ expect(res.statusText).to.equal('Internal Server Error')
+ expect(res.ok).to.be.false
+ return res.text().then(result => {
+ expect(res.bodyUsed).to.be.true
+ expect(result).to.be.a('string')
+ expect(result).to.equal('server error')
+ })
+ })
+ })
+
+ it('should handle network-error response', () => {
+ const url = `${base}error/reset`
+ return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError)
+ })
+
+ it('should handle network-error partial response', () => {
+ const url = `${base}error/premature`
+ return fetch(url).then(res => {
+ expect(res.status).to.equal(200)
+ expect(res.ok).to.be.true
+ return expect(res.text()).to.eventually.be.rejectedWith(Error)
+ })
+ })
+
+ it('should handle network-error in chunked response async iterator', () => {
+ const url = `${base}error/premature/chunked`
+ return fetch(url).then(res => {
+ expect(res.status).to.equal(200)
+ expect(res.ok).to.be.true
+
+ const read = async body => {
+ const chunks = []
+ for await (const chunk of body) {
+ chunks.push(chunk)
+ }
+
+ return chunks
+ }
+
+ return expect(read(res.body))
+ .to.eventually.be.rejectedWith(Error)
+ })
+ })
+
+ it('should handle network-error in chunked response in consumeBody', () => {
+ const url = `${base}error/premature/chunked`
+ return fetch(url).then(res => {
+ expect(res.status).to.equal(200)
+ expect(res.ok).to.be.true
+
+ return expect(res.text()).to.eventually.be.rejectedWith(Error)
+ })
+ })
+
+ it('should handle DNS-error response', () => {
+ const url = 'http://domain.invalid'
+ return expect(fetch(url)).to.eventually.be.rejectedWith(TypeError)
+ })
+
+ it('should reject invalid json response', () => {
+ const url = `${base}error/json`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('application/json')
+ return expect(res.json()).to.eventually.be.rejectedWith(Error)
+ })
+ })
+
+ it('should handle response with no status text', () => {
+ const url = `${base}no-status-text`
+ return fetch(url).then(res => {
+ expect(res.statusText).to.equal('')
+ })
+ })
+
+ it('should handle no content response', () => {
+ const url = `${base}no-content`
+ return fetch(url).then(res => {
+ expect(res.status).to.equal(204)
+ expect(res.statusText).to.equal('No Content')
+ expect(res.ok).to.be.true
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.be.empty
+ })
+ })
+ })
+
+ it('should reject when trying to parse no content response as json', () => {
+ const url = `${base}no-content`
+ return fetch(url).then(res => {
+ expect(res.status).to.equal(204)
+ expect(res.statusText).to.equal('No Content')
+ expect(res.ok).to.be.true
+ return expect(res.json()).to.eventually.be.rejectedWith(Error)
+ })
+ })
+
+ it('should handle no content response with gzip encoding', () => {
+ const url = `${base}no-content/gzip`
+ return fetch(url).then(res => {
+ expect(res.status).to.equal(204)
+ expect(res.statusText).to.equal('No Content')
+ expect(res.headers.get('content-encoding')).to.equal('gzip')
+ expect(res.ok).to.be.true
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.be.empty
+ })
+ })
+ })
+
+ it('should handle not modified response', () => {
+ const url = `${base}not-modified`
+ return fetch(url).then(res => {
+ expect(res.status).to.equal(304)
+ expect(res.statusText).to.equal('Not Modified')
+ expect(res.ok).to.be.false
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.be.empty
+ })
+ })
+ })
+
+ it('should handle not modified response with gzip encoding', () => {
+ const url = `${base}not-modified/gzip`
+ return fetch(url).then(res => {
+ expect(res.status).to.equal(304)
+ expect(res.statusText).to.equal('Not Modified')
+ expect(res.headers.get('content-encoding')).to.equal('gzip')
+ expect(res.ok).to.be.false
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.be.empty
+ })
+ })
+ })
+
+ it('should decompress gzip response', () => {
+ const url = `${base}gzip`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.equal('hello world')
+ })
+ })
+ })
+
+ it('should decompress slightly invalid gzip response', async () => {
+ const url = `${base}gzip-truncated`
+ const res = await fetch(url)
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ const result = await res.text()
+ expect(result).to.be.a('string')
+ expect(result).to.equal('hello world')
+ })
+
+ it('should decompress deflate response', () => {
+ const url = `${base}deflate`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.equal('hello world')
+ })
+ })
+ })
+
+ xit('should decompress deflate raw response from old apache server', () => {
+ const url = `${base}deflate-raw`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.equal('hello world')
+ })
+ })
+ })
+
+ it('should decompress brotli response', function () {
+ if (typeof zlib.createBrotliDecompress !== 'function') {
+ this.skip()
+ }
+
+ const url = `${base}brotli`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.equal('hello world')
+ })
+ })
+ })
+
+ it('should handle no content response with brotli encoding', function () {
+ if (typeof zlib.createBrotliDecompress !== 'function') {
+ this.skip()
+ }
+
+ const url = `${base}no-content/brotli`
+ return fetch(url).then(res => {
+ expect(res.status).to.equal(204)
+ expect(res.statusText).to.equal('No Content')
+ expect(res.headers.get('content-encoding')).to.equal('br')
+ expect(res.ok).to.be.true
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.be.empty
+ })
+ })
+ })
+
+ it('should skip decompression if unsupported', () => {
+ const url = `${base}sdch`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.equal('fake sdch string')
+ })
+ })
+ })
+
+ it('should skip decompression if unsupported codings', () => {
+ const url = `${base}multiunsupported`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.equal('multiunsupported')
+ })
+ })
+ })
+
+ it('should decompress multiple coding', () => {
+ const url = `${base}multisupported`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ return res.text().then(result => {
+ expect(result).to.be.a('string')
+ expect(result).to.equal('hello world')
+ })
+ })
+ })
+
+ it('should reject if response compression is invalid', () => {
+ const url = `${base}invalid-content-encoding`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ return expect(res.text()).to.eventually.be.rejected
+ })
+ })
+
+ it('should handle errors on the body stream even if it is not used', done => {
+ const url = `${base}invalid-content-encoding`
+ fetch(url)
+ .then(res => {
+ expect(res.status).to.equal(200)
+ })
+ .catch(() => {})
+ .then(() => {
+ // Wait a few ms to see if a uncaught error occurs
+ setTimeout(() => {
+ done()
+ }, 20)
+ })
+ })
+
+ it('should collect handled errors on the body stream to reject if the body is used later', () => {
+ const url = `${base}invalid-content-encoding`
+ return fetch(url).then(delay(20)).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ return expect(res.text()).to.eventually.be.rejected
+ })
+ })
+
+ it('should not overwrite existing accept-encoding header when auto decompression is true', () => {
+ const url = `${base}inspect`
+ const options = {
+ compress: true,
+ headers: {
+ 'Accept-Encoding': 'gzip'
+ }
+ }
+ return fetch(url, options).then(res => res.json()).then(res => {
+ expect(res.headers['accept-encoding']).to.equal('gzip')
+ })
+ })
+
+ describe('AbortController', () => {
+ let controller
+
+ beforeEach(() => {
+ controller = new AbortController()
+ })
+
+ it('should support request cancellation with signal', () => {
+ const fetches = [
+ fetch(
+ `${base}timeout`,
+ {
+ method: 'POST',
+ signal: controller.signal,
+ headers: {
+ 'Content-Type': 'application/json',
+ body: JSON.stringify({ hello: 'world' })
+ }
+ }
+ )
+ ]
+
+ controller.abort()
+
+ return Promise.all(fetches.map(fetched => expect(fetched)
+ .to.eventually.be.rejected
+ .and.be.an.instanceOf(Error)
+ .and.have.property('name', 'AbortError')
+ ))
+ })
+
+ it('should support multiple request cancellation with signal', () => {
+ const fetches = [
+ fetch(`${base}timeout`, { signal: controller.signal }),
+ fetch(
+ `${base}timeout`,
+ {
+ method: 'POST',
+ signal: controller.signal,
+ headers: {
+ 'Content-Type': 'application/json',
+ body: JSON.stringify({ hello: 'world' })
+ }
+ }
+ )
+ ]
+
+ controller.abort()
+
+ return Promise.all(fetches.map(fetched => expect(fetched)
+ .to.eventually.be.rejected
+ .and.be.an.instanceOf(Error)
+ .and.have.property('name', 'AbortError')
+ ))
+ })
+
+ it('should reject immediately if signal has already been aborted', () => {
+ const url = `${base}timeout`
+ const options = {
+ signal: controller.signal
+ }
+ controller.abort()
+ const fetched = fetch(url, options)
+ return expect(fetched).to.eventually.be.rejected
+ .and.be.an.instanceOf(Error)
+ .and.have.property('name', 'AbortError')
+ })
+
+ it('should allow redirects to be aborted', () => {
+ const request = new Request(`${base}redirect/slow`, {
+ signal: controller.signal
+ })
+ setTimeout(() => {
+ controller.abort()
+ }, 20)
+ return expect(fetch(request)).to.be.eventually.rejected
+ .and.be.an.instanceOf(Error)
+ .and.have.property('name', 'AbortError')
+ })
+
+ it('should allow redirected response body to be aborted', () => {
+ const request = new Request(`${base}redirect/slow-stream`, {
+ signal: controller.signal
+ })
+ return expect(fetch(request).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ const result = res.text()
+ controller.abort()
+ return result
+ })).to.be.eventually.rejected
+ .and.be.an.instanceOf(Error)
+ .and.have.property('name', 'AbortError')
+ })
+
+ it('should reject response body with AbortError when aborted before stream has been read completely', () => {
+ return expect(fetch(
+ `${base}slow`,
+ { signal: controller.signal }
+ ))
+ .to.eventually.be.fulfilled
+ .then(res => {
+ const promise = res.text()
+ controller.abort()
+ return expect(promise)
+ .to.eventually.be.rejected
+ .and.be.an.instanceof(Error)
+ .and.have.property('name', 'AbortError')
+ })
+ })
+
+ it('should reject response body methods immediately with AbortError when aborted before stream is disturbed', () => {
+ return expect(fetch(
+ `${base}slow`,
+ { signal: controller.signal }
+ ))
+ .to.eventually.be.fulfilled
+ .then(res => {
+ controller.abort()
+ return expect(res.text())
+ .to.eventually.be.rejected
+ .and.be.an.instanceof(Error)
+ .and.have.property('name', 'AbortError')
+ })
+ })
+ })
+
+ it('should throw a TypeError if a signal is not of type AbortSignal or EventTarget', () => {
+ return Promise.all([
+ expect(fetch(`${base}inspect`, { signal: {} }))
+ .to.be.eventually.rejected
+ .and.be.an.instanceof(TypeError),
+ expect(fetch(`${base}inspect`, { signal: '' }))
+ .to.be.eventually.rejected
+ .and.be.an.instanceof(TypeError),
+ expect(fetch(`${base}inspect`, { signal: Object.create(null) }))
+ .to.be.eventually.rejected
+ .and.be.an.instanceof(TypeError)
+ ])
+ })
+
+ it('should gracefully handle a null signal', () => {
+ return fetch(`${base}hello`, { signal: null }).then(res => {
+ return expect(res.ok).to.be.true
+ })
+ })
+
+ it('should allow setting User-Agent', () => {
+ const url = `${base}inspect`
+ const options = {
+ headers: {
+ 'user-agent': 'faked'
+ }
+ }
+ return fetch(url, options).then(res => res.json()).then(res => {
+ expect(res.headers['user-agent']).to.equal('faked')
+ })
+ })
+
+ it('should set default Accept header', () => {
+ const url = `${base}inspect`
+ fetch(url).then(res => res.json()).then(res => {
+ expect(res.headers.accept).to.equal('*/*')
+ })
+ })
+
+ it('should allow setting Accept header', () => {
+ const url = `${base}inspect`
+ const options = {
+ headers: {
+ accept: 'application/json'
+ }
+ }
+ return fetch(url, options).then(res => res.json()).then(res => {
+ expect(res.headers.accept).to.equal('application/json')
+ })
+ })
+
+ it('should allow POST request', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST'
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-type']).to.be.undefined
+ expect(res.headers['content-length']).to.equal('0')
+ })
+ })
+
+ it('should allow POST request with string body', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: 'a=1'
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('a=1')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-type']).to.equal('text/plain;charset=UTF-8')
+ expect(res.headers['content-length']).to.equal('3')
+ })
+ })
+
+ it('should allow POST request with buffer body', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: Buffer.from('a=1', 'utf-8')
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('a=1')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-type']).to.be.undefined
+ expect(res.headers['content-length']).to.equal('3')
+ })
+ })
+
+ it('should allow POST request with ArrayBuffer body', () => {
+ const encoder = new TextEncoder()
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: encoder.encode('Hello, world!\n').buffer
+ }
+ return fetch(url, options).then(res => res.json()).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('Hello, world!\n')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-type']).to.be.undefined
+ expect(res.headers['content-length']).to.equal('14')
+ })
+ })
+
+ it('should allow POST request with ArrayBuffer body from a VM context', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: new VMUint8Array(Buffer.from('Hello, world!\n')).buffer
+ }
+ return fetch(url, options).then(res => res.json()).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('Hello, world!\n')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-type']).to.be.undefined
+ expect(res.headers['content-length']).to.equal('14')
+ })
+ })
+
+ it('should allow POST request with ArrayBufferView (Uint8Array) body', () => {
+ const encoder = new TextEncoder()
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: encoder.encode('Hello, world!\n')
+ }
+ return fetch(url, options).then(res => res.json()).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('Hello, world!\n')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-type']).to.be.undefined
+ expect(res.headers['content-length']).to.equal('14')
+ })
+ })
+
+ it('should allow POST request with ArrayBufferView (BigUint64Array) body', () => {
+ const encoder = new TextEncoder()
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: new BigUint64Array(encoder.encode('0123456789abcdef').buffer)
+ }
+ return fetch(url, options).then(res => res.json()).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('0123456789abcdef')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-type']).to.be.undefined
+ expect(res.headers['content-length']).to.equal('16')
+ })
+ })
+
+ it('should allow POST request with ArrayBufferView (DataView) body', () => {
+ const encoder = new TextEncoder()
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: new DataView(encoder.encode('Hello, world!\n').buffer)
+ }
+ return fetch(url, options).then(res => res.json()).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('Hello, world!\n')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-type']).to.be.undefined
+ expect(res.headers['content-length']).to.equal('14')
+ })
+ })
+
+ it('should allow POST request with ArrayBufferView (Uint8Array) body from a VM context', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: new VMUint8Array(Buffer.from('Hello, world!\n'))
+ }
+ return fetch(url, options).then(res => res.json()).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('Hello, world!\n')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-type']).to.be.undefined
+ expect(res.headers['content-length']).to.equal('14')
+ })
+ })
+
+ it('should allow POST request with ArrayBufferView (Uint8Array, offset, length) body', () => {
+ const encoder = new TextEncoder()
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: encoder.encode('Hello, world!\n').subarray(7, 13)
+ }
+ return fetch(url, options).then(res => res.json()).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('world!')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-type']).to.be.undefined
+ expect(res.headers['content-length']).to.equal('6')
+ })
+ })
+
+ it('should allow POST request with blob body without type', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: new Blob(['a=1'])
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('a=1')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ // expect(res.headers['content-type']).to.be.undefined
+ expect(res.headers['content-length']).to.equal('3')
+ })
+ })
+
+ it('should allow POST request with blob body with type', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: new Blob(['a=1'], {
+ type: 'text/plain;charset=UTF-8'
+ })
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('a=1')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-type']).to.equal('text/plain;charset=utf-8')
+ expect(res.headers['content-length']).to.equal('3')
+ })
+ })
+
+ it('should allow POST request with readable stream as body', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: stream.Readable.from('a=1'),
+ duplex: 'half'
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('a=1')
+ expect(res.headers['transfer-encoding']).to.equal('chunked')
+ expect(res.headers['content-type']).to.be.undefined
+ expect(res.headers['content-length']).to.be.undefined
+ })
+ })
+
+ it('should allow POST request with object body', () => {
+ const url = `${base}inspect`
+ // Note that fetch simply calls tostring on an object
+ const options = {
+ method: 'POST',
+ body: { a: 1 }
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.body).to.equal('[object Object]')
+ expect(res.headers['content-type']).to.equal('text/plain;charset=UTF-8')
+ expect(res.headers['content-length']).to.equal('15')
+ })
+ })
+
+ it('should allow POST request with form-data as body', () => {
+ const form = new FormData()
+ form.append('a', '1')
+
+ const url = `${base}multipart`
+ const options = {
+ method: 'POST',
+ body: form
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.headers['content-type']).to.startWith('multipart/form-data; boundary=')
+ expect(res.body).to.equal('a=1')
+ })
+ })
+
+ it('constructing a Response with URLSearchParams as body should have a Content-Type', () => {
+ const parameters = new URLSearchParams()
+ const res = new Response(parameters)
+ res.headers.get('Content-Type')
+ expect(res.headers.get('Content-Type')).to.equal('application/x-www-form-urlencoded;charset=UTF-8')
+ })
+
+ it('constructing a Request with URLSearchParams as body should have a Content-Type', () => {
+ const parameters = new URLSearchParams()
+ const request = new Request(base, { method: 'POST', body: parameters })
+ expect(request.headers.get('Content-Type')).to.equal('application/x-www-form-urlencoded;charset=UTF-8')
+ })
+
+ it('Reading a body with URLSearchParams should echo back the result', () => {
+ const parameters = new URLSearchParams()
+ parameters.append('a', '1')
+ return new Response(parameters).text().then(text => {
+ expect(text).to.equal('a=1')
+ })
+ })
+
+ // Body should been cloned...
+ it('constructing a Request/Response with URLSearchParams and mutating it should not affected body', () => {
+ const parameters = new URLSearchParams()
+ const request = new Request(`${base}inspect`, { method: 'POST', body: parameters })
+ parameters.append('a', '1')
+ return request.text().then(text => {
+ expect(text).to.equal('')
+ })
+ })
+
+ it('should allow POST request with URLSearchParams as body', () => {
+ const parameters = new URLSearchParams()
+ parameters.append('a', '1')
+
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: parameters
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.headers['content-type']).to.equal('application/x-www-form-urlencoded;charset=UTF-8')
+ expect(res.headers['content-length']).to.equal('3')
+ expect(res.body).to.equal('a=1')
+ })
+ })
+
+ it('should still recognize URLSearchParams when extended', () => {
+ class CustomSearchParameters extends URLSearchParams {}
+ const parameters = new CustomSearchParameters()
+ parameters.append('a', '1')
+
+ const url = `${base}inspect`
+ const options = {
+ method: 'POST',
+ body: parameters
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('POST')
+ expect(res.headers['content-type']).to.equal('application/x-www-form-urlencoded;charset=UTF-8')
+ expect(res.headers['content-length']).to.equal('3')
+ expect(res.body).to.equal('a=1')
+ })
+ })
+
+ it('should allow PUT request', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'PUT',
+ body: 'a=1'
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('PUT')
+ expect(res.body).to.equal('a=1')
+ })
+ })
+
+ it('should allow DELETE request', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'DELETE'
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('DELETE')
+ })
+ })
+
+ it('should allow DELETE request with string body', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'DELETE',
+ body: 'a=1'
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('DELETE')
+ expect(res.body).to.equal('a=1')
+ expect(res.headers['transfer-encoding']).to.be.undefined
+ expect(res.headers['content-length']).to.equal('3')
+ })
+ })
+
+ it('should allow PATCH request', () => {
+ const url = `${base}inspect`
+ const options = {
+ method: 'PATCH',
+ body: 'a=1'
+ }
+ return fetch(url, options).then(res => {
+ return res.json()
+ }).then(res => {
+ expect(res.method).to.equal('PATCH')
+ expect(res.body).to.equal('a=1')
+ })
+ })
+
+ it('should allow HEAD request', () => {
+ const url = `${base}hello`
+ const options = {
+ method: 'HEAD'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.status).to.equal(200)
+ expect(res.statusText).to.equal('OK')
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ // expect(res.body).to.be.an.instanceof(stream.Transform)
+ return res.text()
+ }).then(text => {
+ expect(text).to.equal('')
+ })
+ })
+
+ it('should allow HEAD request with content-encoding header', () => {
+ const url = `${base}error/404`
+ const options = {
+ method: 'HEAD'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.status).to.equal(404)
+ expect(res.headers.get('content-encoding')).to.equal('gzip')
+ return res.text()
+ }).then(text => {
+ expect(text).to.equal('')
+ })
+ })
+
+ it('should allow OPTIONS request', () => {
+ const url = `${base}options`
+ const options = {
+ method: 'OPTIONS'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.status).to.equal(200)
+ expect(res.statusText).to.equal('OK')
+ expect(res.headers.get('allow')).to.equal('GET, HEAD, OPTIONS')
+ // expect(res.body).to.be.an.instanceof(stream.Transform)
+ })
+ })
+
+ it('should reject decoding body twice', () => {
+ const url = `${base}plain`
+ return fetch(url).then(res => {
+ expect(res.headers.get('content-type')).to.equal('text/plain')
+ return res.text().then(() => {
+ expect(res.bodyUsed).to.be.true
+ return expect(res.text()).to.eventually.be.rejectedWith(Error)
+ })
+ })
+ })
+
+ it('should allow cloning a json response and log it as text response', () => {
+ const url = `${base}json`
+ return fetch(url).then(res => {
+ const r1 = res.clone()
+ return Promise.all([res.json(), r1.text()]).then(results => {
+ expect(results[0]).to.deep.equal({ name: 'value' })
+ expect(results[1]).to.equal('{"name":"value"}')
+ })
+ })
+ })
+
+ it('should allow cloning a json response, and then log it as text response', () => {
+ const url = `${base}json`
+ return fetch(url).then(res => {
+ const r1 = res.clone()
+ return res.json().then(result => {
+ expect(result).to.deep.equal({ name: 'value' })
+ return r1.text().then(result => {
+ expect(result).to.equal('{"name":"value"}')
+ })
+ })
+ })
+ })
+
+ it('should allow cloning a json response, first log as text response, then return json object', () => {
+ const url = `${base}json`
+ return fetch(url).then(res => {
+ const r1 = res.clone()
+ return r1.text().then(result => {
+ expect(result).to.equal('{"name":"value"}')
+ return res.json().then(result => {
+ expect(result).to.deep.equal({ name: 'value' })
+ })
+ })
+ })
+ })
+
+ it('should not allow cloning a response after its been used', () => {
+ const url = `${base}hello`
+ return fetch(url).then(res =>
+ res.text().then(() => {
+ expect(() => {
+ res.clone()
+ }).to.throw(Error)
+ })
+ )
+ })
+
+ xit('should timeout on cloning response without consuming one of the streams when the second packet size is equal default highWaterMark', function () {
+ this.timeout(300)
+ const url = local.mockState(res => {
+ // Observed behavior of TCP packets splitting:
+ // - response body size <= 65438 → single packet sent
+ // - response body size > 65438 → multiple packets sent
+ // Max TCP packet size is 64kB (http://stackoverflow.com/a/2614188/5763764),
+ // but first packet probably transfers more than the response body.
+ const firstPacketMaxSize = 65438
+ const secondPacketSize = 16 * 1024 // = defaultHighWaterMark
+ res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize))
+ })
+ return expect(
+ fetch(url).then(res => res.clone().buffer())
+ ).to.timeout
+ })
+
+ xit('should timeout on cloning response without consuming one of the streams when the second packet size is equal custom highWaterMark', function () {
+ this.timeout(300)
+ const url = local.mockState(res => {
+ const firstPacketMaxSize = 65438
+ const secondPacketSize = 10
+ res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize))
+ })
+ return expect(
+ fetch(url, { highWaterMark: 10 }).then(res => res.clone().buffer())
+ ).to.timeout
+ })
+
+ xit('should not timeout on cloning response without consuming one of the streams when the second packet size is less than default highWaterMark', function () {
+ // TODO: fix test.
+ if (!isNodeLowerThan('v16.0.0')) {
+ this.skip()
+ }
+
+ this.timeout(300)
+ const url = local.mockState(res => {
+ const firstPacketMaxSize = 65438
+ const secondPacketSize = 16 * 1024 // = defaultHighWaterMark
+ res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize - 1))
+ })
+ return expect(
+ fetch(url).then(res => res.clone().buffer())
+ ).not.to.timeout
+ })
+
+ xit('should not timeout on cloning response without consuming one of the streams when the second packet size is less than custom highWaterMark', function () {
+ // TODO: fix test.
+ if (!isNodeLowerThan('v16.0.0')) {
+ this.skip()
+ }
+
+ this.timeout(300)
+ const url = local.mockState(res => {
+ const firstPacketMaxSize = 65438
+ const secondPacketSize = 10
+ res.end(crypto.randomBytes(firstPacketMaxSize + secondPacketSize - 1))
+ })
+ return expect(
+ fetch(url, { highWaterMark: 10 }).then(res => res.clone().buffer())
+ ).not.to.timeout
+ })
+
+ xit('should not timeout on cloning response without consuming one of the streams when the response size is double the custom large highWaterMark - 1', function () {
+ // TODO: fix test.
+ if (!isNodeLowerThan('v16.0.0')) {
+ this.skip()
+ }
+
+ this.timeout(300)
+ const url = local.mockState(res => {
+ res.end(crypto.randomBytes((2 * 512 * 1024) - 1))
+ })
+ return expect(
+ fetch(url, { highWaterMark: 512 * 1024 }).then(res => res.clone().buffer())
+ ).not.to.timeout
+ })
+
+ xit('should allow get all responses of a header', () => {
+ // TODO: fix test.
+ const url = `${base}cookie`
+ return fetch(url).then(res => {
+ const expected = 'a=1, b=1'
+ expect(res.headers.get('set-cookie')).to.equal(expected)
+ expect(res.headers.get('Set-Cookie')).to.equal(expected)
+ })
+ })
+
+ it('should support fetch with Request instance', () => {
+ const url = `${base}hello`
+ const request = new Request(url)
+ return fetch(request).then(res => {
+ expect(res.url).to.equal(url)
+ expect(res.ok).to.be.true
+ expect(res.status).to.equal(200)
+ })
+ })
+
+ it('should support fetch with Node.js URL object', () => {
+ const url = `${base}hello`
+ const urlObject = new URL(url)
+ const request = new Request(urlObject)
+ return fetch(request).then(res => {
+ expect(res.url).to.equal(url)
+ expect(res.ok).to.be.true
+ expect(res.status).to.equal(200)
+ })
+ })
+
+ it('should support fetch with WHATWG URL object', () => {
+ const url = `${base}hello`
+ const urlObject = new URL(url)
+ const request = new Request(urlObject)
+ return fetch(request).then(res => {
+ expect(res.url).to.equal(url)
+ expect(res.ok).to.be.true
+ expect(res.status).to.equal(200)
+ })
+ })
+
+ it('if params are given, do not modify anything', () => {
+ const url = `${base}question?a=1`
+ const urlObject = new URL(url)
+ const request = new Request(urlObject)
+ return fetch(request).then(res => {
+ expect(res.url).to.equal(url)
+ expect(res.ok).to.be.true
+ expect(res.status).to.equal(200)
+ })
+ })
+
+ it('should support reading blob as text', () => {
+ return new Response('hello')
+ .blob()
+ .then(blob => blob.text())
+ .then(body => {
+ expect(body).to.equal('hello')
+ })
+ })
+
+ it('should support reading blob as arrayBuffer', () => {
+ return new Response('hello')
+ .blob()
+ .then(blob => blob.arrayBuffer())
+ .then(ab => {
+ const string = String.fromCharCode.apply(null, new Uint8Array(ab))
+ expect(string).to.equal('hello')
+ })
+ })
+
+ it('should support blob round-trip', () => {
+ const url = `${base}hello`
+
+ let length
+ let type
+
+ return fetch(url).then(res => res.blob()).then(async blob => {
+ const url = `${base}inspect`
+ length = blob.size
+ type = blob.type
+ return fetch(url, {
+ method: 'POST',
+ body: blob
+ })
+ }).then(res => res.json()).then(({ body, headers }) => {
+ expect(body).to.equal('world')
+ expect(headers['content-type']).to.equal(type)
+ expect(headers['content-length']).to.equal(String(length))
+ })
+ })
+
+ it('should support overwrite Request instance', () => {
+ const url = `${base}inspect`
+ const request = new Request(url, {
+ method: 'POST',
+ headers: {
+ a: '1'
+ }
+ })
+ return fetch(request, {
+ method: 'GET',
+ headers: {
+ a: '2'
+ }
+ }).then(res => {
+ return res.json()
+ }).then(body => {
+ expect(body.method).to.equal('GET')
+ expect(body.headers.a).to.equal('2')
+ })
+ })
+
+ it('should support http request', function () {
+ this.timeout(5000)
+ const url = 'https://github.com/'
+ const options = {
+ method: 'HEAD'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.status).to.equal(200)
+ expect(res.ok).to.be.true
+ })
+ })
+
+ it('should encode URLs as UTF-8', async () => {
+ const url = `${base}möbius`
+ const res = await fetch(url)
+ expect(res.url).to.equal(`${base}m%C3%B6bius`)
+ })
+
+ it('should allow manual redirect handling', function () {
+ this.timeout(5000)
+ const url = `${base}redirect/302`
+ const options = {
+ redirect: 'manual'
+ }
+ return fetch(url, options).then(res => {
+ expect(res.status).to.equal(302)
+ expect(res.url).to.equal(url)
+ expect(res.type).to.equal('basic')
+ expect(res.headers.get('Location')).to.equal('/inspect')
+ expect(res.ok).to.be.false
+ })
+ })
+})
diff --git a/test/node-fetch/mock.js b/test/node-fetch/mock.js
new file mode 100644
index 0000000..a53f464
--- /dev/null
+++ b/test/node-fetch/mock.js
@@ -0,0 +1,112 @@
+/* eslint no-unused-expressions: "off" */
+
+// Test tools
+const chai = require('chai')
+
+const {
+ fetch,
+ MockAgent,
+ setGlobalDispatcher,
+ Headers
+} = require('../../index.js')
+
+const { expect } = chai
+
+describe('node-fetch with MockAgent', () => {
+ it('should match the url', async () => {
+ const mockAgent = new MockAgent()
+ setGlobalDispatcher(mockAgent)
+ const mockPool = mockAgent.get('http://localhost:3000')
+
+ mockPool
+ .intercept({
+ path: '/test',
+ method: 'GET'
+ })
+ .reply(200, { success: true })
+ .persist()
+
+ const res = await fetch('http://localhost:3000/test', {
+ method: 'GET'
+ })
+
+ expect(res.status).to.equal(200)
+ expect(await res.json()).to.deep.equal({ success: true })
+ })
+
+ it('should match the body', async () => {
+ const mockAgent = new MockAgent()
+ setGlobalDispatcher(mockAgent)
+ const mockPool = mockAgent.get('http://localhost:3000')
+
+ mockPool
+ .intercept({
+ path: '/test',
+ method: 'POST',
+ body: (value) => {
+ return value === 'request body'
+ }
+ })
+ .reply(200, { success: true })
+ .persist()
+
+ const res = await fetch('http://localhost:3000/test', {
+ method: 'POST',
+ body: 'request body'
+ })
+
+ expect(res.status).to.equal(200)
+ expect(await res.json()).to.deep.equal({ success: true })
+ })
+
+ it('should match the headers', async () => {
+ const mockAgent = new MockAgent()
+ setGlobalDispatcher(mockAgent)
+ const mockPool = mockAgent.get('http://localhost:3000')
+
+ mockPool
+ .intercept({
+ path: '/test',
+ method: 'GET',
+ headers: (h) => {
+ return h['user-agent'] === 'undici'
+ }
+ })
+ .reply(200, { success: true })
+ .persist()
+
+ const res = await fetch('http://localhost:3000/test', {
+ method: 'GET',
+ headers: new Headers({ 'User-Agent': 'undici' })
+ })
+ expect(res.status).to.equal(200)
+ expect(await res.json()).to.deep.equal({ success: true })
+ })
+
+ it('should match the headers with a matching function', async () => {
+ const mockAgent = new MockAgent()
+ setGlobalDispatcher(mockAgent)
+ const mockPool = mockAgent.get('http://localhost:3000')
+
+ mockPool
+ .intercept({
+ path: '/test',
+ method: 'GET',
+ headers (headers) {
+ expect(headers).to.be.an('object')
+ expect(headers).to.have.property('user-agent', 'undici')
+ return true
+ }
+ })
+ .reply(200, { success: true })
+ .persist()
+
+ const res = await fetch('http://localhost:3000/test', {
+ method: 'GET',
+ headers: new Headers({ 'User-Agent': 'undici' })
+ })
+
+ expect(res.status).to.equal(200)
+ expect(await res.json()).to.deep.equal({ success: true })
+ })
+})
diff --git a/test/node-fetch/request.js b/test/node-fetch/request.js
new file mode 100644
index 0000000..2d29d51
--- /dev/null
+++ b/test/node-fetch/request.js
@@ -0,0 +1,281 @@
+const stream = require('stream')
+const http = require('http')
+
+const chai = require('chai')
+const { Blob } = require('buffer')
+
+const Request = require('../../lib/fetch/request.js').Request
+const TestServer = require('./utils/server.js')
+
+const { expect } = chai
+
+describe('Request', () => {
+ const local = new TestServer()
+ let base
+
+ before(async () => {
+ await local.start()
+ base = `http://${local.hostname}:${local.port}/`
+ })
+
+ after(async () => {
+ return local.stop()
+ })
+
+ it('should have attributes conforming to Web IDL', () => {
+ const request = new Request('http://github.com/')
+ const enumerableProperties = []
+ for (const property in request) {
+ enumerableProperties.push(property)
+ }
+
+ for (const toCheck of [
+ 'body',
+ 'bodyUsed',
+ 'arrayBuffer',
+ 'blob',
+ 'json',
+ 'text',
+ 'method',
+ 'url',
+ 'headers',
+ 'redirect',
+ 'clone',
+ 'signal'
+ ]) {
+ expect(enumerableProperties).to.contain(toCheck)
+ }
+
+ // for (const toCheck of [
+ // 'body', 'bodyUsed', 'method', 'url', 'headers', 'redirect', 'signal'
+ // ]) {
+ // expect(() => {
+ // request[toCheck] = 'abc'
+ // }).to.throw()
+ // }
+ })
+
+ // it('should support wrapping Request instance', () => {
+ // const url = `${base}hello`
+
+ // const form = new FormData()
+ // form.append('a', '1')
+ // const { signal } = new AbortController()
+
+ // const r1 = new Request(url, {
+ // method: 'POST',
+ // follow: 1,
+ // body: form,
+ // signal
+ // })
+ // const r2 = new Request(r1, {
+ // follow: 2
+ // })
+
+ // expect(r2.url).to.equal(url)
+ // expect(r2.method).to.equal('POST')
+ // expect(r2.signal).to.equal(signal)
+ // // Note that we didn't clone the body
+ // expect(r2.body).to.equal(form)
+ // expect(r1.follow).to.equal(1)
+ // expect(r2.follow).to.equal(2)
+ // expect(r1.counter).to.equal(0)
+ // expect(r2.counter).to.equal(0)
+ // })
+
+ xit('should override signal on derived Request instances', () => {
+ const parentAbortController = new AbortController()
+ const derivedAbortController = new AbortController()
+ const parentRequest = new Request(`${base}hello`, {
+ signal: parentAbortController.signal
+ })
+ const derivedRequest = new Request(parentRequest, {
+ signal: derivedAbortController.signal
+ })
+ expect(parentRequest.signal).to.equal(parentAbortController.signal)
+ expect(derivedRequest.signal).to.equal(derivedAbortController.signal)
+ })
+
+ xit('should allow removing signal on derived Request instances', () => {
+ const parentAbortController = new AbortController()
+ const parentRequest = new Request(`${base}hello`, {
+ signal: parentAbortController.signal
+ })
+ const derivedRequest = new Request(parentRequest, {
+ signal: null
+ })
+ expect(parentRequest.signal).to.equal(parentAbortController.signal)
+ expect(derivedRequest.signal).to.equal(null)
+ })
+
+ it('should throw error with GET/HEAD requests with body', () => {
+ expect(() => new Request(base, { body: '' }))
+ .to.throw(TypeError)
+ expect(() => new Request(base, { body: 'a' }))
+ .to.throw(TypeError)
+ expect(() => new Request(base, { body: '', method: 'HEAD' }))
+ .to.throw(TypeError)
+ expect(() => new Request(base, { body: 'a', method: 'HEAD' }))
+ .to.throw(TypeError)
+ expect(() => new Request(base, { body: 'a', method: 'get' }))
+ .to.throw(TypeError)
+ expect(() => new Request(base, { body: 'a', method: 'head' }))
+ .to.throw(TypeError)
+ })
+
+ it('should default to null as body', () => {
+ const request = new Request(base)
+ expect(request.body).to.equal(null)
+ return request.text().then(result => expect(result).to.equal(''))
+ })
+
+ it('should support parsing headers', () => {
+ const url = base
+ const request = new Request(url, {
+ headers: {
+ a: '1'
+ }
+ })
+ expect(request.url).to.equal(url)
+ expect(request.headers.get('a')).to.equal('1')
+ })
+
+ it('should support arrayBuffer() method', () => {
+ const url = base
+ const request = new Request(url, {
+ method: 'POST',
+ body: 'a=1'
+ })
+ expect(request.url).to.equal(url)
+ return request.arrayBuffer().then(result => {
+ expect(result).to.be.an.instanceOf(ArrayBuffer)
+ const string = String.fromCharCode.apply(null, new Uint8Array(result))
+ expect(string).to.equal('a=1')
+ })
+ })
+
+ it('should support text() method', () => {
+ const url = base
+ const request = new Request(url, {
+ method: 'POST',
+ body: 'a=1'
+ })
+ expect(request.url).to.equal(url)
+ return request.text().then(result => {
+ expect(result).to.equal('a=1')
+ })
+ })
+
+ it('should support json() method', () => {
+ const url = base
+ const request = new Request(url, {
+ method: 'POST',
+ body: '{"a":1}'
+ })
+ expect(request.url).to.equal(url)
+ return request.json().then(result => {
+ expect(result.a).to.equal(1)
+ })
+ })
+
+ it('should support blob() method', () => {
+ const url = base
+ const request = new Request(url, {
+ method: 'POST',
+ body: Buffer.from('a=1')
+ })
+ expect(request.url).to.equal(url)
+ return request.blob().then(result => {
+ expect(result).to.be.an.instanceOf(Blob)
+ expect(result.size).to.equal(3)
+ expect(result.type).to.equal('')
+ })
+ })
+
+ it('should support clone() method', () => {
+ const url = base
+ const body = stream.Readable.from('a=1')
+ const agent = new http.Agent()
+ const { signal } = new AbortController()
+ const request = new Request(url, {
+ body,
+ method: 'POST',
+ redirect: 'manual',
+ headers: {
+ b: '2'
+ },
+ follow: 3,
+ compress: false,
+ agent,
+ signal,
+ duplex: 'half'
+ })
+ const cl = request.clone()
+ expect(cl.url).to.equal(url)
+ expect(cl.method).to.equal('POST')
+ expect(cl.redirect).to.equal('manual')
+ expect(cl.headers.get('b')).to.equal('2')
+ expect(cl.method).to.equal('POST')
+ // Clone body shouldn't be the same body
+ expect(cl.body).to.not.equal(body)
+ return Promise.all([cl.text(), request.text()]).then(results => {
+ expect(results[0]).to.equal('a=1')
+ expect(results[1]).to.equal('a=1')
+ })
+ })
+
+ it('should support ArrayBuffer as body', () => {
+ const encoder = new TextEncoder()
+ const body = encoder.encode('a=12345678901234').buffer
+ const request = new Request(base, {
+ method: 'POST',
+ body
+ })
+ new Uint8Array(body)[0] = 0
+ return request.text().then(result => {
+ expect(result).to.equal('a=12345678901234')
+ })
+ })
+
+ it('should support Uint8Array as body', () => {
+ const encoder = new TextEncoder()
+ const fullbuffer = encoder.encode('a=12345678901234').buffer
+ const body = new Uint8Array(fullbuffer, 2, 9)
+ const request = new Request(base, {
+ method: 'POST',
+ body
+ })
+ body[0] = 0
+ return request.text().then(result => {
+ expect(result).to.equal('123456789')
+ })
+ })
+
+ it('should support BigUint64Array as body', () => {
+ const encoder = new TextEncoder()
+ const fullbuffer = encoder.encode('a=12345678901234').buffer
+ const body = new BigUint64Array(fullbuffer, 8, 1)
+ const request = new Request(base, {
+ method: 'POST',
+ body
+ })
+ body[0] = 0n
+ return request.text().then(result => {
+ expect(result).to.equal('78901234')
+ })
+ })
+
+ it('should support DataView as body', () => {
+ const encoder = new TextEncoder()
+ const fullbuffer = encoder.encode('a=12345678901234').buffer
+ const body = new Uint8Array(fullbuffer, 2, 9)
+ const request = new Request(base, {
+ method: 'POST',
+ body
+ })
+ body[0] = 0
+ return request.text().then(result => {
+ expect(result).to.equal('123456789')
+ })
+ })
+})
diff --git a/test/node-fetch/response.js b/test/node-fetch/response.js
new file mode 100644
index 0000000..4bb7c42
--- /dev/null
+++ b/test/node-fetch/response.js
@@ -0,0 +1,251 @@
+/* eslint no-unused-expressions: "off" */
+
+const chai = require('chai')
+const stream = require('stream')
+const { Response } = require('../../lib/fetch/response.js')
+const TestServer = require('./utils/server.js')
+const { Blob } = require('buffer')
+const { kState } = require('../../lib/fetch/symbols.js')
+
+const { expect } = chai
+
+describe('Response', () => {
+ const local = new TestServer()
+ let base
+
+ before(async () => {
+ await local.start()
+ base = `http://${local.hostname}:${local.port}/`
+ })
+
+ after(async () => {
+ return local.stop()
+ })
+
+ it('should have attributes conforming to Web IDL', () => {
+ const res = new Response()
+ const enumerableProperties = []
+ for (const property in res) {
+ enumerableProperties.push(property)
+ }
+
+ for (const toCheck of [
+ 'body',
+ 'bodyUsed',
+ 'arrayBuffer',
+ 'blob',
+ 'json',
+ 'text',
+ 'type',
+ 'url',
+ 'status',
+ 'ok',
+ 'redirected',
+ 'statusText',
+ 'headers',
+ 'clone'
+ ]) {
+ expect(enumerableProperties).to.contain(toCheck)
+ }
+
+ // TODO
+ // for (const toCheck of [
+ // 'body',
+ // 'bodyUsed',
+ // 'type',
+ // 'url',
+ // 'status',
+ // 'ok',
+ // 'redirected',
+ // 'statusText',
+ // 'headers'
+ // ]) {
+ // expect(() => {
+ // res[toCheck] = 'abc'
+ // }).to.throw()
+ // }
+ })
+
+ it('should support empty options', () => {
+ const res = new Response(stream.Readable.from('a=1'))
+ return res.text().then(result => {
+ expect(result).to.equal('a=1')
+ })
+ })
+
+ it('should support parsing headers', () => {
+ const res = new Response(null, {
+ headers: {
+ a: '1'
+ }
+ })
+ expect(res.headers.get('a')).to.equal('1')
+ })
+
+ it('should support text() method', () => {
+ const res = new Response('a=1')
+ return res.text().then(result => {
+ expect(result).to.equal('a=1')
+ })
+ })
+
+ it('should support json() method', () => {
+ const res = new Response('{"a":1}')
+ return res.json().then(result => {
+ expect(result.a).to.equal(1)
+ })
+ })
+
+ if (Blob) {
+ it('should support blob() method', () => {
+ const res = new Response('a=1', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'text/plain'
+ }
+ })
+ return res.blob().then(result => {
+ expect(result).to.be.an.instanceOf(Blob)
+ expect(result.size).to.equal(3)
+ expect(result.type).to.equal('text/plain')
+ })
+ })
+ }
+
+ it('should support clone() method', () => {
+ const body = stream.Readable.from('a=1')
+ const res = new Response(body, {
+ headers: {
+ a: '1'
+ },
+ status: 346,
+ statusText: 'production'
+ })
+ res[kState].urlList = [new URL(base)]
+ const cl = res.clone()
+ expect(cl.headers.get('a')).to.equal('1')
+ expect(cl.type).to.equal('default')
+ expect(cl.url).to.equal(base)
+ expect(cl.status).to.equal(346)
+ expect(cl.statusText).to.equal('production')
+ expect(cl.ok).to.be.false
+ // Clone body shouldn't be the same body
+ expect(cl.body).to.not.equal(body)
+ return Promise.all([cl.text(), res.text()]).then(results => {
+ expect(results[0]).to.equal('a=1')
+ expect(results[1]).to.equal('a=1')
+ })
+ })
+
+ it('should support stream as body', () => {
+ const body = stream.Readable.from('a=1')
+ const res = new Response(body)
+ return res.text().then(result => {
+ expect(result).to.equal('a=1')
+ })
+ })
+
+ it('should support string as body', () => {
+ const res = new Response('a=1')
+ return res.text().then(result => {
+ expect(result).to.equal('a=1')
+ })
+ })
+
+ it('should support buffer as body', () => {
+ const res = new Response(Buffer.from('a=1'))
+ return res.text().then(result => {
+ expect(result).to.equal('a=1')
+ })
+ })
+
+ it('should support ArrayBuffer as body', () => {
+ const encoder = new TextEncoder()
+ const fullbuffer = encoder.encode('a=12345678901234').buffer
+ const res = new Response(fullbuffer)
+ new Uint8Array(fullbuffer)[0] = 0
+ return res.text().then(result => {
+ expect(result).to.equal('a=12345678901234')
+ })
+ })
+
+ it('should support blob as body', async () => {
+ const res = new Response(new Blob(['a=1']))
+ return res.text().then(result => {
+ expect(result).to.equal('a=1')
+ })
+ })
+
+ it('should support Uint8Array as body', () => {
+ const encoder = new TextEncoder()
+ const fullbuffer = encoder.encode('a=12345678901234').buffer
+ const body = new Uint8Array(fullbuffer, 2, 9)
+ const res = new Response(body)
+ body[0] = 0
+ return res.text().then(result => {
+ expect(result).to.equal('123456789')
+ })
+ })
+
+ it('should support BigUint64Array as body', () => {
+ const encoder = new TextEncoder()
+ const fullbuffer = encoder.encode('a=12345678901234').buffer
+ const body = new BigUint64Array(fullbuffer, 8, 1)
+ const res = new Response(body)
+ body[0] = 0n
+ return res.text().then(result => {
+ expect(result).to.equal('78901234')
+ })
+ })
+
+ it('should support DataView as body', () => {
+ const encoder = new TextEncoder()
+ const fullbuffer = encoder.encode('a=12345678901234').buffer
+ const body = new Uint8Array(fullbuffer, 2, 9)
+ const res = new Response(body)
+ body[0] = 0
+ return res.text().then(result => {
+ expect(result).to.equal('123456789')
+ })
+ })
+
+ it('should default to null as body', () => {
+ const res = new Response()
+ expect(res.body).to.equal(null)
+
+ return res.text().then(result => expect(result).to.equal(''))
+ })
+
+ it('should default to 200 as status code', () => {
+ const res = new Response(null)
+ expect(res.status).to.equal(200)
+ })
+
+ it('should default to empty string as url', () => {
+ const res = new Response()
+ expect(res.url).to.equal('')
+ })
+
+ it('should support error() static method', () => {
+ const res = Response.error()
+ expect(res).to.be.an.instanceof(Response)
+ expect(res.type).to.equal('error')
+ expect(res.status).to.equal(0)
+ expect(res.statusText).to.equal('')
+ })
+
+ it('should support undefined status', () => {
+ const res = new Response(null, { status: undefined })
+ expect(res.status).to.equal(200)
+ })
+
+ it('should support undefined statusText', () => {
+ const res = new Response(null, { statusText: undefined })
+ expect(res.statusText).to.equal('')
+ })
+
+ it('should not set bodyUsed to undefined', () => {
+ const res = new Response()
+ expect(res.bodyUsed).to.be.false
+ })
+})
diff --git a/test/node-fetch/utils/chai-timeout.js b/test/node-fetch/utils/chai-timeout.js
new file mode 100644
index 0000000..6838a4c
--- /dev/null
+++ b/test/node-fetch/utils/chai-timeout.js
@@ -0,0 +1,15 @@
+const pTimeout = require('p-timeout')
+
+module.exports = ({ Assertion }, utils) => {
+ utils.addProperty(Assertion.prototype, 'timeout', async function () {
+ let timeouted = false
+ await pTimeout(this._obj, 150, () => {
+ timeouted = true
+ })
+ return this.assert(
+ timeouted,
+ 'expected promise to timeout but it was resolved',
+ 'expected promise not to timeout but it timed out'
+ )
+ })
+}
diff --git a/test/node-fetch/utils/dummy.txt b/test/node-fetch/utils/dummy.txt
new file mode 100644
index 0000000..5ca5191
--- /dev/null
+++ b/test/node-fetch/utils/dummy.txt
@@ -0,0 +1 @@
+i am a dummy \ No newline at end of file
diff --git a/test/node-fetch/utils/read-stream.js b/test/node-fetch/utils/read-stream.js
new file mode 100644
index 0000000..7d79153
--- /dev/null
+++ b/test/node-fetch/utils/read-stream.js
@@ -0,0 +1,9 @@
+module.exports = async function readStream (stream) {
+ const chunks = []
+
+ for await (const chunk of stream) {
+ chunks.push(chunk instanceof Buffer ? chunk : Buffer.from(chunk))
+ }
+
+ return Buffer.concat(chunks)
+}
diff --git a/test/node-fetch/utils/server.js b/test/node-fetch/utils/server.js
new file mode 100644
index 0000000..46dc983
--- /dev/null
+++ b/test/node-fetch/utils/server.js
@@ -0,0 +1,467 @@
+const http = require('http')
+const zlib = require('zlib')
+const { once } = require('events')
+const Busboy = require('@fastify/busboy')
+
+module.exports = class TestServer {
+ constructor () {
+ this.server = http.createServer(this.router)
+ // Node 8 default keepalive timeout is 5000ms
+ // make it shorter here as we want to close server quickly at the end of tests
+ this.server.keepAliveTimeout = 1000
+ this.server.on('error', err => {
+ console.log(err.stack)
+ })
+ this.server.on('connection', socket => {
+ socket.setTimeout(1500)
+ })
+ }
+
+ async start () {
+ this.server.listen(0, 'localhost')
+ return once(this.server, 'listening')
+ }
+
+ async stop () {
+ this.server.close()
+ return once(this.server, 'close')
+ }
+
+ get port () {
+ return this.server.address().port
+ }
+
+ get hostname () {
+ return 'localhost'
+ }
+
+ mockState (responseHandler) {
+ this.server.nextResponseHandler = responseHandler
+ return `http://${this.hostname}:${this.port}/mocked`
+ }
+
+ router (request, res) {
+ const p = request.url
+
+ if (p === '/mocked') {
+ if (this.nextResponseHandler) {
+ this.nextResponseHandler(res)
+ this.nextResponseHandler = undefined
+ } else {
+ throw new Error('No mocked response. Use ’TestServer.mockState()’.')
+ }
+ }
+
+ if (p === '/hello') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('world')
+ }
+
+ if (p.includes('question')) {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('ok')
+ }
+
+ if (p === '/plain') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('text')
+ }
+
+ if (p === '/no-status-text') {
+ res.writeHead(200, '', {}).end()
+ }
+
+ if (p === '/options') {
+ res.statusCode = 200
+ res.setHeader('Allow', 'GET, HEAD, OPTIONS')
+ res.end('hello world')
+ }
+
+ if (p === '/html') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/html')
+ res.end('<html></html>')
+ }
+
+ if (p === '/json') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'application/json')
+ res.end(JSON.stringify({
+ name: 'value'
+ }))
+ }
+
+ if (p === '/gzip') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Encoding', 'gzip')
+ zlib.gzip('hello world', (err, buffer) => {
+ if (err) {
+ throw err
+ }
+
+ res.end(buffer)
+ })
+ }
+
+ if (p === '/gzip-truncated') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Encoding', 'gzip')
+ zlib.gzip('hello world', (err, buffer) => {
+ if (err) {
+ throw err
+ }
+
+ // Truncate the CRC checksum and size check at the end of the stream
+ res.end(buffer.slice(0, -8))
+ })
+ }
+
+ if (p === '/gzip-capital') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Encoding', 'GZip')
+ zlib.gzip('hello world', (err, buffer) => {
+ if (err) {
+ throw err
+ }
+
+ res.end(buffer)
+ })
+ }
+
+ if (p === '/deflate') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Encoding', 'deflate')
+ zlib.deflate('hello world', (err, buffer) => {
+ if (err) {
+ throw err
+ }
+
+ res.end(buffer)
+ })
+ }
+
+ if (p === '/brotli') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ if (typeof zlib.createBrotliDecompress === 'function') {
+ res.setHeader('Content-Encoding', 'br')
+ zlib.brotliCompress('hello world', (err, buffer) => {
+ if (err) {
+ throw err
+ }
+
+ res.end(buffer)
+ })
+ }
+ }
+
+ if (p === '/multiunsupported') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ if (typeof zlib.createBrotliDecompress === 'function') {
+ res.setHeader('Content-Encoding', 'br,asd,br')
+ res.end('multiunsupported')
+ }
+ }
+
+ if (p === '/multisupported') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ if (typeof zlib.createBrotliDecompress === 'function') {
+ res.setHeader('Content-Encoding', 'br,br')
+ zlib.brotliCompress('hello world', (err, buffer) => {
+ if (err) {
+ throw err
+ }
+
+ zlib.brotliCompress(buffer, (err, buffer) => {
+ if (err) {
+ throw err
+ }
+
+ res.end(buffer)
+ })
+ })
+ }
+ }
+
+ if (p === '/deflate-raw') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Encoding', 'deflate')
+ zlib.deflateRaw('hello world', (err, buffer) => {
+ if (err) {
+ throw err
+ }
+
+ res.end(buffer)
+ })
+ }
+
+ if (p === '/sdch') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Encoding', 'sdch')
+ res.end('fake sdch string')
+ }
+
+ if (p === '/invalid-content-encoding') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.setHeader('Content-Encoding', 'gzip')
+ res.end('fake gzip string')
+ }
+
+ if (p === '/timeout') {
+ setTimeout(() => {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('text')
+ }, 1000)
+ }
+
+ if (p === '/slow') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.write('test')
+ setTimeout(() => {
+ res.end('test')
+ }, 1000)
+ }
+
+ if (p === '/cookie') {
+ res.statusCode = 200
+ res.setHeader('Set-Cookie', ['a=1', 'b=1'])
+ res.end('cookie')
+ }
+
+ if (p === '/size/chunk') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ setTimeout(() => {
+ res.write('test')
+ }, 10)
+ setTimeout(() => {
+ res.end('test')
+ }, 20)
+ }
+
+ if (p === '/size/long') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('testtest')
+ }
+
+ if (p === '/redirect/301') {
+ res.statusCode = 301
+ res.setHeader('Location', '/inspect')
+ res.end()
+ }
+
+ if (p === '/redirect/302') {
+ res.statusCode = 302
+ res.setHeader('Location', '/inspect')
+ res.end()
+ }
+
+ if (p === '/redirect/303') {
+ res.statusCode = 303
+ res.setHeader('Location', '/inspect')
+ res.end()
+ }
+
+ if (p === '/redirect/307') {
+ res.statusCode = 307
+ res.setHeader('Location', '/inspect')
+ res.end()
+ }
+
+ if (p === '/redirect/308') {
+ res.statusCode = 308
+ res.setHeader('Location', '/inspect')
+ res.end()
+ }
+
+ if (p === '/redirect/chain') {
+ res.statusCode = 301
+ res.setHeader('Location', '/redirect/301')
+ res.end()
+ }
+
+ if (p.startsWith('/redirect/chain/')) {
+ const count = parseInt(p.split('/').pop()) - 1
+ res.statusCode = 301
+ res.setHeader('Location', count ? `/redirect/chain/${count}` : '/redirect/301')
+ res.end()
+ }
+
+ if (p === '/redirect/no-location') {
+ res.statusCode = 301
+ res.end()
+ }
+
+ if (p === '/redirect/slow') {
+ res.statusCode = 301
+ res.setHeader('Location', '/redirect/301')
+ setTimeout(() => {
+ res.end()
+ }, 1000)
+ }
+
+ if (p === '/redirect/slow-chain') {
+ res.statusCode = 301
+ res.setHeader('Location', '/redirect/slow')
+ setTimeout(() => {
+ res.end()
+ }, 10)
+ }
+
+ if (p === '/redirect/slow-stream') {
+ res.statusCode = 301
+ res.setHeader('Location', '/slow')
+ res.end()
+ }
+
+ if (p === '/redirect/bad-location') {
+ res.socket.write('HTTP/1.1 301\r\nLocation: ☃\r\nContent-Length: 0\r\n')
+ res.socket.end('\r\n')
+ }
+
+ if (p === '/error/400') {
+ res.statusCode = 400
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('client error')
+ }
+
+ if (p === '/error/404') {
+ res.statusCode = 404
+ res.setHeader('Content-Encoding', 'gzip')
+ res.end()
+ }
+
+ if (p === '/error/500') {
+ res.statusCode = 500
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('server error')
+ }
+
+ if (p === '/error/reset') {
+ res.destroy()
+ }
+
+ if (p === '/error/premature') {
+ res.writeHead(200, { 'content-length': 50 })
+ res.write('foo')
+ setTimeout(() => {
+ res.destroy()
+ }, 100)
+ }
+
+ if (p === '/error/premature/chunked') {
+ res.writeHead(200, {
+ 'Content-Type': 'application/json',
+ 'Transfer-Encoding': 'chunked'
+ })
+
+ res.write(`${JSON.stringify({ data: 'hi' })}\n`)
+
+ setTimeout(() => {
+ res.write(`${JSON.stringify({ data: 'bye' })}\n`)
+ }, 200)
+
+ setTimeout(() => {
+ res.destroy()
+ }, 400)
+ }
+
+ if (p === '/error/json') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'application/json')
+ res.end('invalid json')
+ }
+
+ if (p === '/no-content') {
+ res.statusCode = 204
+ res.end()
+ }
+
+ if (p === '/no-content/gzip') {
+ res.statusCode = 204
+ res.setHeader('Content-Encoding', 'gzip')
+ res.end()
+ }
+
+ if (p === '/no-content/brotli') {
+ res.statusCode = 204
+ res.setHeader('Content-Encoding', 'br')
+ res.end()
+ }
+
+ if (p === '/not-modified') {
+ res.statusCode = 304
+ res.end()
+ }
+
+ if (p === '/not-modified/gzip') {
+ res.statusCode = 304
+ res.setHeader('Content-Encoding', 'gzip')
+ res.end()
+ }
+
+ if (p === '/inspect') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'application/json')
+ let body = ''
+ request.on('data', c => {
+ body += c
+ })
+ request.on('end', () => {
+ res.end(JSON.stringify({
+ method: request.method,
+ url: request.url,
+ headers: request.headers,
+ body
+ }))
+ })
+ }
+
+ if (p === '/multipart') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'application/json')
+ const busboy = new Busboy({ headers: request.headers })
+ let body = ''
+ busboy.on('file', async (fieldName, file, fileName) => {
+ body += `${fieldName}=${fileName}`
+ // consume file data
+ // eslint-disable-next-line no-empty, no-unused-vars
+ for await (const c of file) {}
+ })
+
+ busboy.on('field', (fieldName, value) => {
+ body += `${fieldName}=${value}`
+ })
+ busboy.on('finish', () => {
+ res.end(JSON.stringify({
+ method: request.method,
+ url: request.url,
+ headers: request.headers,
+ body
+ }))
+ })
+ request.pipe(busboy)
+ }
+
+ if (p === '/m%C3%B6bius') {
+ res.statusCode = 200
+ res.setHeader('Content-Type', 'text/plain')
+ res.end('ok')
+ }
+ }
+}