summaryrefslogtreecommitdiffstats
path: root/dom/webgpu/tests/cts/checkout/src/common/tools/merge_listing_times.ts
blob: fb33ae20fb38b479688c44fcea89b5f69a48ab74 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
import * as fs from 'fs';
import * as process from 'process';
import * as readline from 'readline';

import { TestMetadataListing } from '../framework/metadata.js';
import { parseQuery } from '../internal/query/parseQuery.js';
import { TestQueryMultiCase, TestQuerySingleCase } from '../internal/query/query.js';
import { CaseTimingLogLine } from '../internal/test_group.js';
import { assert } from '../util/util.js';

// For information on listing_meta.json file maintenance, please read
// tools/merge_listing_times first.

function usage(rc: number): never {
  console.error(`Usage: tools/merge_listing_times [options] SUITES... -- [TIMING_LOG_FILES...]

Options:
  --help          Print this message and exit.

Reads raw case timing data for each suite in SUITES, from all TIMING_LOG_FILES
(see below), and merges it into the src/*/listing_meta.json files checked into
the repository. The timing data in the listing_meta.json files is updated with
the newly-observed timing data *if the new timing is slower*. That is, it will
only increase the values in the listing_meta.json file, and will only cause WPT
chunks to become smaller.

If there are no TIMING_LOG_FILES, this just regenerates (reformats) the file
using the data already present.

In more detail:

- Reads per-case timing data in any of the SUITES, from all TIMING_LOG_FILES
  (ignoring skipped cases), and averages it over the number of subcases.
  In the case of cases that have run multiple times, takes the max of each.
- Compiles the average time-per-subcase for each test seen.
- For each suite seen, loads its listing_meta.json, takes the max of the old and
  new data, and writes it back out.

How to generate TIMING_LOG_FILES files:

- Launch the 'websocket-logger' tool (see its README.md), which listens for
  log messages on localhost:59497.
- Run the tests you want to capture data for, on the same system. Since
  logging is done through the websocket side-channel, you can run the tests
  under any runtime (standalone, WPT, etc.) as long as WebSocket support is
  available (always true in browsers).
- Run \`tools/merge_listing_times webgpu -- tools/websocket-logger/wslog-*.txt\`
`);
  process.exit(rc);
}

const kHeader = `{
  "_comment": "SEMI AUTO-GENERATED: Please read docs/adding_timing_metadata.md.",
`;
const kFooter = `\
  "_end": ""
}
`;

const argv = process.argv;
if (argv.some(v => v.startsWith('-') && v !== '--') || argv.every(v => v !== '--')) {
  usage(0);
}
const suites = [];
const timingLogFilenames = [];
let seenDashDash = false;
for (const arg of argv.slice(2)) {
  if (arg === '--') {
    seenDashDash = true;
    continue;
  } else if (arg.startsWith('-')) {
    usage(0);
  }

  if (seenDashDash) {
    timingLogFilenames.push(arg);
  } else {
    suites.push(arg);
  }
}
if (!seenDashDash) {
  usage(0);
}

void (async () => {
  // Read the log files to find the log line for each *case* query. If a case
  // ran multiple times, take the one with the largest average subcase time.
  const caseTimes = new Map<string, CaseTimingLogLine>();
  for (const timingLogFilename of timingLogFilenames) {
    const rl = readline.createInterface({
      input: fs.createReadStream(timingLogFilename),
      crlfDelay: Infinity,
    });

    for await (const line of rl) {
      const parsed: CaseTimingLogLine = JSON.parse(line);

      const prev = caseTimes.get(parsed.q);
      if (prev !== undefined) {
        const timePerSubcase = parsed.timems / Math.max(1, parsed.nonskippedSubcaseCount);
        const prevTimePerSubcase = prev.timems / Math.max(1, prev.nonskippedSubcaseCount);

        if (timePerSubcase > prevTimePerSubcase) {
          caseTimes.set(parsed.q, parsed);
        }
      } else {
        caseTimes.set(parsed.q, parsed);
      }
    }
  }

  // Accumulate total times per test. Map of suite -> query -> {totalTimeMS, caseCount}.
  const testTimes = new Map<string, Map<string, { totalTimeMS: number; subcaseCount: number }>>();
  for (const suite of suites) {
    testTimes.set(suite, new Map());
  }
  for (const [caseQString, caseTime] of caseTimes) {
    const caseQ = parseQuery(caseQString);
    assert(caseQ instanceof TestQuerySingleCase);
    const suite = caseQ.suite;
    const suiteTestTimes = testTimes.get(suite);
    if (suiteTestTimes === undefined) {
      continue;
    }

    const testQ = new TestQueryMultiCase(suite, caseQ.filePathParts, caseQ.testPathParts, {});
    const testQString = testQ.toString();

    const prev = suiteTestTimes.get(testQString);
    if (prev !== undefined) {
      prev.totalTimeMS += caseTime.timems;
      prev.subcaseCount += caseTime.nonskippedSubcaseCount;
    } else {
      suiteTestTimes.set(testQString, {
        totalTimeMS: caseTime.timems,
        subcaseCount: caseTime.nonskippedSubcaseCount,
      });
    }
  }

  for (const suite of suites) {
    const currentMetadata: TestMetadataListing = JSON.parse(
      fs.readFileSync(`./src/${suite}/listing_meta.json`, 'utf8')
    );

    const metadata = { ...currentMetadata };
    for (const [testQString, { totalTimeMS, subcaseCount }] of testTimes.get(suite)!) {
      const avgTime = totalTimeMS / Math.max(1, subcaseCount);
      if (testQString in metadata) {
        metadata[testQString].subcaseMS = Math.max(metadata[testQString].subcaseMS, avgTime);
      } else {
        metadata[testQString] = { subcaseMS: avgTime };
      }
    }

    writeListings(suite, metadata);
  }
})();

function writeListings(suite: string, metadata: TestMetadataListing) {
  const output = fs.createWriteStream(`./src/${suite}/listing_meta.json`);
  try {
    output.write(kHeader);
    const keys = Object.keys(metadata).sort();
    for (const k of keys) {
      if (k.startsWith('_')) {
        // Ignore json "_comments".
        continue;
      }
      assert(k.indexOf('"') === -1);
      output.write(`  "${k}": { "subcaseMS": ${metadata[k].subcaseMS.toFixed(3)} },\n`);
    }
    output.write(kFooter);
  } finally {
    output.close();
  }
}