summaryrefslogtreecommitdiffstats
path: root/netwerk/test/unit/test_bug1312774_http1.js
blob: 9c13441d4d5245afc5f2dc917ec9b6d6ef1a9838 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
// test bug 1312774.
// Create 6 (=network.http.max-persistent-connections-per-server)
// common Http requests and 2 urgent-start Http requests to a single
// host and path, in parallel.
// Let all the requests unanswered by the server handler. (process them
// async and don't finish)
// The first 6 pending common requests will fill the limit for per-server
// parallelism.
// But the two urgent requests must reach the server despite those 6 common
// pending requests.
// The server handler doesn't let the test finish until all 8 expected requests
// arrive.
// Note: if the urgent request handling is broken (the urgent-marked requests
// get blocked by queuing) this test will time out

"use strict";

const { HttpServer } = ChromeUtils.import("resource://testing-common/httpd.js");
var server = new HttpServer();
server.start(-1);
var baseURL = "http://localhost:" + server.identity.primaryPort + "/";
var maxConnections = 0;
var urgentRequests = 0;
var debug = false;

function log(msg) {
  if (!debug) {
    return;
  }

  if (msg) {
    dump("TEST INFO | " + msg + "\n");
  }
}

function make_channel(url) {
  var request = NetUtil.newChannel({
    uri: url,
    loadUsingSystemPrincipal: true,
  });
  request.QueryInterface(Ci.nsIHttpChannel);
  return request;
}

function serverStopListener() {
  server.stop();
}

function commonHttpRequest(id) {
  let uri = baseURL;
  var chan = make_channel(uri);
  var listner = new HttpResponseListener(id);
  chan.setRequestHeader("X-ID", id, false);
  chan.setRequestHeader("Cache-control", "no-store", false);
  chan.asyncOpen(listner);
  log("Create common http request id=" + id);
}

function urgentStartHttpRequest(id) {
  let uri = baseURL;
  var chan = make_channel(uri);
  var listner = new HttpResponseListener(id);
  var cos = chan.QueryInterface(Ci.nsIClassOfService);
  cos.addClassFlags(Ci.nsIClassOfService.UrgentStart);
  chan.setRequestHeader("X-ID", id, false);
  chan.setRequestHeader("Cache-control", "no-store", false);
  chan.asyncOpen(listner);
  log("Create urgent-start http request id=" + id);
}

function setup_httpRequests() {
  log("setup_httpRequests");
  for (var i = 0; i < maxConnections; i++) {
    commonHttpRequest(i);
    do_test_pending();
  }
}

function setup_urgentStartRequests() {
  for (var i = 0; i < urgentRequests; i++) {
    urgentStartHttpRequest(1000 + i);
    do_test_pending();
  }
}

function HttpResponseListener(id) {
  this.id = id;
}

HttpResponseListener.prototype = {
  onStartRequest(request) {},

  onDataAvailable(request, stream, off, cnt) {},

  onStopRequest(request, status) {
    log("STOP id=" + this.id);
    do_test_finished();
  },
};

var responseQueue = [];
function setup_http_server() {
  log("setup_http_server");
  maxConnections = Services.prefs.getIntPref(
    "network.http.max-persistent-connections-per-server"
  );
  urgentRequests = 2;
  var allCommonHttpRequestReceived = false;
  // Start server; will be stopped at test cleanup time.
  server.registerPathHandler("/", function (metadata, response) {
    var id = metadata.getHeader("X-ID");
    log("Server recived the response id=" + id);
    response.processAsync();
    responseQueue.push(response);

    if (
      responseQueue.length == maxConnections &&
      !allCommonHttpRequestReceived
    ) {
      allCommonHttpRequestReceived = true;
      setup_urgentStartRequests();
    }
    // Wait for all expected requests to come but don't process then.
    // Collect them in a queue for later processing.  We don't want to
    // respond to the client until all the expected requests are made
    // to the server.
    if (responseQueue.length == maxConnections + urgentRequests) {
      processResponse();
    }
  });

  registerCleanupFunction(function () {
    server.stop(serverStopListener);
  });
}

function processResponse() {
  while (responseQueue.length) {
    var resposne = responseQueue.pop();
    resposne.finish();
  }
}

function run_test() {
  setup_http_server();
  setup_httpRequests();
}