summaryrefslogtreecommitdiffstats
path: root/dom/bindings/mozwebidlcodegen/__init__.py
blob: bba95edb789ef2198308b4a7ddefc0f8d1a3dfc0 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

# This module contains code for managing WebIDL files and bindings for
# the build system.

import errno
import hashlib
import io
import json
import logging
import os
import sys
from multiprocessing import Pool

import mozpack.path as mozpath
from mach.mixin.logging import LoggingMixin
from mozbuild.makeutil import Makefile
from mozbuild.pythonutil import iter_modules_in_path
from mozbuild.util import FileAvoidWrite

# There are various imports in this file in functions to avoid adding
# dependencies to config.status. See bug 949875.

# Limit the count on Windows, because of bug 1889842 and also the
# inefficiency of fork on Windows.
DEFAULT_PROCESS_COUNT = 4 if sys.platform == "win32" else os.cpu_count()


class WebIDLPool:
    """
    Distribute generation load across several processes, avoiding redundant state
    copies.
    """

    GeneratorState = None

    def __init__(self, GeneratorState, *, processes=None):
        if processes is None:
            processes = DEFAULT_PROCESS_COUNT

        # As a special case, don't spawn an extra process if processes=1
        if processes == 1:
            WebIDLPool._init(GeneratorState)

            class SeqPool:
                def map(self, *args):
                    return list(map(*args))

            self.pool = SeqPool()
        else:
            self.pool = Pool(
                initializer=WebIDLPool._init,
                initargs=(GeneratorState,),
                processes=processes,
            )

    def run(self, filenames):
        return self.pool.map(WebIDLPool._run, filenames)

    @staticmethod
    def _init(GeneratorState):
        WebIDLPool.GeneratorState = GeneratorState

    @staticmethod
    def _run(filename):
        return WebIDLPool.GeneratorState._generate_build_files_for_webidl(filename)


class BuildResult(object):
    """Represents the result of processing WebIDL files.

    This holds a summary of output file generation during code generation.
    """

    def __init__(self):
        # The .webidl files that had their outputs regenerated.
        self.inputs = set()

        # The output files that were created.
        self.created = set()

        # The output files that changed.
        self.updated = set()

        # The output files that didn't change.
        self.unchanged = set()


class WebIDLCodegenManagerState(dict):
    """Holds state for the WebIDL code generation manager.

    State is currently just an extended dict. The internal implementation of
    state should be considered a black box to everyone except
    WebIDLCodegenManager. But we'll still document it.

    Any set stored in this dict should be copied and sorted in the `dump()`
    method.

    Fields:

    version
       The integer version of the format. This is to detect incompatible
       changes between state. It should be bumped whenever the format
       changes or semantics change.

    webidls
       A dictionary holding information about every known WebIDL input.
       Keys are the basenames of input WebIDL files. Values are dicts of
       metadata. Keys in those dicts are:

       * filename - The full path to the input filename.
       * inputs - A set of full paths to other webidl files this webidl
         depends on.
       * outputs - Set of full output paths that are created/derived from
         this file.
       * sha1 - The hexidecimal SHA-1 of the input filename from the last
         processing time.

    global_inputs
       A dictionary defining files that influence all processing. Keys
       are full filenames. Values are hexidecimal SHA-1 from the last
       processing time.

    dictionaries_convertible_to_js
       A set of names of dictionaries that are convertible to JS.

    dictionaries_convertible_from_js
       A set of names of dictionaries that are convertible from JS.
    """

    VERSION = 3

    def __init__(self, fh=None):
        self["version"] = self.VERSION
        self["webidls"] = {}
        self["global_depends"] = {}

        if not fh:
            return

        state = json.load(fh)
        if state["version"] != self.VERSION:
            raise Exception("Unknown state version: %s" % state["version"])

        self["version"] = state["version"]
        self["global_depends"] = state["global_depends"]

        for k, v in state["webidls"].items():
            self["webidls"][k] = v

            # Sets are converted to lists for serialization because JSON
            # doesn't support sets.
            self["webidls"][k]["inputs"] = set(v["inputs"])
            self["webidls"][k]["outputs"] = set(v["outputs"])

        self["dictionaries_convertible_to_js"] = set(
            state["dictionaries_convertible_to_js"]
        )

        self["dictionaries_convertible_from_js"] = set(
            state["dictionaries_convertible_from_js"]
        )

    def dump(self, fh):
        """Dump serialized state to a file handle."""
        normalized = self.copy()

        webidls = normalized["webidls"] = self["webidls"].copy()
        for k, v in self["webidls"].items():
            webidls_k = webidls[k] = v.copy()

            # Convert sets to lists because JSON doesn't support sets.
            webidls_k["outputs"] = sorted(v["outputs"])
            webidls_k["inputs"] = sorted(v["inputs"])

        normalized["dictionaries_convertible_to_js"] = sorted(
            self["dictionaries_convertible_to_js"]
        )

        normalized["dictionaries_convertible_from_js"] = sorted(
            self["dictionaries_convertible_from_js"]
        )

        json.dump(normalized, fh, sort_keys=True)


class WebIDLCodegenManager(LoggingMixin):
    """Manages all code generation around WebIDL.

    To facilitate testing, this object is meant to be generic and reusable.
    Paths, etc should be parameters and not hardcoded.
    """

    # Global parser derived declaration files.
    GLOBAL_DECLARE_FILES = {
        "BindingNames.h",
        "GeneratedAtomList.h",
        "GeneratedEventList.h",
        "PrototypeList.h",
        "RegisterBindings.h",
        "RegisterShadowRealmBindings.h",
        "RegisterWorkerBindings.h",
        "RegisterWorkerDebuggerBindings.h",
        "RegisterWorkletBindings.h",
        "UnionTypes.h",
        "WebIDLPrefs.h",
        "WebIDLSerializable.h",
    }

    # Global parser derived definition files.
    GLOBAL_DEFINE_FILES = {
        "BindingNames.cpp",
        "RegisterBindings.cpp",
        "RegisterShadowRealmBindings.cpp",
        "RegisterWorkerBindings.cpp",
        "RegisterWorkerDebuggerBindings.cpp",
        "RegisterWorkletBindings.cpp",
        "UnionTypes.cpp",
        "PrototypeList.cpp",
        "WebIDLPrefs.cpp",
        "WebIDLSerializable.cpp",
    }

    def __init__(
        self,
        config_path,
        webidl_root,
        inputs,
        exported_header_dir,
        codegen_dir,
        state_path,
        cache_dir=None,
        make_deps_path=None,
        make_deps_target=None,
    ):
        """Create an instance that manages WebIDLs in the build system.

        config_path refers to a WebIDL config file (e.g. Bindings.conf).
        inputs is a 4-tuple describing the input .webidl files and how to
        process them. Members are:
            (set(.webidl files), set(basenames of exported files),
                set(basenames of generated events files),
                set(example interface names))

        exported_header_dir and codegen_dir are directories where generated
        files will be written to.
        state_path is the path to a file that will receive JSON state from our
        actions.
        make_deps_path is the path to a make dependency file that we can
        optionally write.
        make_deps_target is the target that receives the make dependencies. It
        must be defined if using make_deps_path.
        """
        self.populate_logger()

        input_paths, exported_stems, generated_events_stems, example_interfaces = inputs

        self._config_path = config_path
        self._webidl_root = webidl_root
        self._input_paths = set(input_paths)
        self._exported_stems = set(exported_stems)
        self._generated_events_stems = set(generated_events_stems)
        self._generated_events_stems_as_array = generated_events_stems
        self._example_interfaces = set(example_interfaces)
        self._exported_header_dir = exported_header_dir
        self._codegen_dir = codegen_dir
        self._state_path = state_path
        self._cache_dir = cache_dir
        self._make_deps_path = make_deps_path
        self._make_deps_target = make_deps_target

        if (make_deps_path and not make_deps_target) or (
            not make_deps_path and make_deps_target
        ):
            raise Exception(
                "Must define both make_deps_path and make_deps_target "
                "if one is defined."
            )

        self._parser_results = None
        self._config = None
        self._state = WebIDLCodegenManagerState()

        if os.path.exists(state_path):
            with io.open(state_path, "r") as fh:
                try:
                    self._state = WebIDLCodegenManagerState(fh=fh)
                except Exception as e:
                    self.log(
                        logging.WARN,
                        "webidl_bad_state",
                        {"msg": str(e)},
                        "Bad WebIDL state: {msg}",
                    )

    @property
    def config(self):
        if not self._config:
            self._parse_webidl()

        return self._config

    def generate_build_files(self, *, processes=None):
        """Generate files required for the build.

        This function is in charge of generating all the .h/.cpp files derived
        from input .webidl files. Please note that there are build actions
        required to produce .webidl files and these build actions are
        explicitly not captured here: this function assumes all .webidl files
        are present and up to date.

        This routine is called as part of the build to ensure files that need
        to exist are present and up to date. This routine may not be called if
        the build dependencies (generated as a result of calling this the first
        time) say everything is up to date.

        Because reprocessing outputs for every .webidl on every invocation
        is expensive, we only regenerate the minimal set of files on every
        invocation. The rules for deciding what needs done are roughly as
        follows:

        1. If any .webidl changes, reparse all .webidl files and regenerate
           the global derived files. Only regenerate output files (.h/.cpp)
           impacted by the modified .webidl files.
        2. If an non-.webidl dependency (Python files, config file) changes,
           assume everything is out of date and regenerate the world. This
           is because changes in those could globally impact every output
           file.
        3. If an output file is missing, ensure it is present by performing
           necessary regeneration.

        if `processes` is set to None, run in parallel using the
        multiprocess.Pool default. If set to 1, don't use extra processes.
        """
        # Despite #1 above, we assume the build system is smart enough to not
        # invoke us if nothing has changed. Therefore, any invocation means
        # something has changed. And, if anything has changed, we need to
        # parse the WebIDL.
        self._parse_webidl()

        result = BuildResult()

        # If we parse, we always update globals - they are cheap and it is
        # easier that way.
        created, updated, unchanged = self._write_global_derived()
        result.created |= created
        result.updated |= updated
        result.unchanged |= unchanged

        # If any of the extra dependencies changed, regenerate the world.
        global_changed, global_hashes = self._global_dependencies_changed()
        if global_changed:
            # Make a copy because we may modify.
            changed_inputs = set(self._input_paths)
        else:
            changed_inputs = self._compute_changed_inputs()

        self._state["global_depends"] = global_hashes
        self._state["dictionaries_convertible_to_js"] = set(
            d.identifier.name for d in self._config.getDictionariesConvertibleToJS()
        )
        self._state["dictionaries_convertible_from_js"] = set(
            d.identifier.name for d in self._config.getDictionariesConvertibleFromJS()
        )

        # Distribute the generation load across several processes. This requires
        # a) that `self' is serializable and b) that `self' is unchanged by
        # _generate_build_files_for_webidl(...)
        ordered_changed_inputs = sorted(changed_inputs)
        pool = WebIDLPool(self, processes=processes)
        generation_results = pool.run(ordered_changed_inputs)

        # Generate bindings from .webidl files.
        for filename, generation_result in zip(
            ordered_changed_inputs, generation_results
        ):
            basename = mozpath.basename(filename)
            result.inputs.add(filename)
            written, deps = generation_result
            result.created |= written[0]
            result.updated |= written[1]
            result.unchanged |= written[2]

            self._state["webidls"][basename] = dict(
                filename=filename,
                outputs=written[0] | written[1] | written[2],
                inputs=set(deps),
                sha1=self._input_hashes[filename],
            )

        # Process some special interfaces required for testing.
        for interface in self._example_interfaces:
            written = self.generate_example_files(interface)
            result.created |= written[0]
            result.updated |= written[1]
            result.unchanged |= written[2]

        # Generate a make dependency file.
        if self._make_deps_path:
            mk = Makefile()
            codegen_rule = mk.create_rule([self._make_deps_target])
            codegen_rule.add_dependencies(global_hashes.keys())
            codegen_rule.add_dependencies(self._input_paths)

            with FileAvoidWrite(self._make_deps_path) as fh:
                mk.dump(fh)

        self._save_state()

        return result

    def generate_example_files(self, interface):
        """Generates example files for a given interface."""
        from Codegen import CGExampleRoot

        root = CGExampleRoot(self.config, interface)

        example_paths = self._example_paths(interface)
        for path in example_paths:
            self.log(
                logging.INFO,
                "webidl_generate_example_files",
                {"filename": path},
                "Generating WebIDL example files derived from {filename}",
            )

        return self._maybe_write_codegen(root, *example_paths)

    def _parse_webidl(self):
        import WebIDL
        from Configuration import Configuration

        self.log(
            logging.INFO,
            "webidl_parse",
            {"count": len(self._input_paths)},
            "Parsing {count} WebIDL files.",
        )

        hashes = {}
        parser = WebIDL.Parser(self._cache_dir, lexer=None)

        for path in sorted(self._input_paths):
            with io.open(path, "r", encoding="utf-8") as fh:
                data = fh.read()
                hashes[path] = hashlib.sha1(data.encode()).hexdigest()
                parser.parse(data, path)

        # Only these directories may contain WebIDL files with interfaces
        # which are exposed to the web. WebIDL files in these roots may not
        # be changed without DOM peer review.
        #
        # Other directories may contain WebIDL files as long as they only
        # contain ChromeOnly interfaces. These are not subject to mandatory
        # DOM peer review.
        web_roots = (
            # The main WebIDL root.
            self._webidl_root,
            # The binding config root, which contains some test-only
            # interfaces.
            os.path.dirname(self._config_path),
            # The objdir sub-directory which contains generated WebIDL files.
            self._codegen_dir,
        )

        self._parser_results = parser.finish()
        self._config = Configuration(
            self._config_path,
            web_roots,
            self._parser_results,
            self._generated_events_stems_as_array,
        )
        self._input_hashes = hashes

    def _write_global_derived(self):
        from Codegen import GlobalGenRoots

        things = [("declare", f) for f in self.GLOBAL_DECLARE_FILES]
        things.extend(("define", f) for f in self.GLOBAL_DEFINE_FILES)

        result = (set(), set(), set())

        for what, filename in things:
            stem = mozpath.splitext(filename)[0]
            root = getattr(GlobalGenRoots, stem)(self._config)

            if what == "declare":
                code = root.declare()
                output_root = self._exported_header_dir
            elif what == "define":
                code = root.define()
                output_root = self._codegen_dir
            else:
                raise Exception("Unknown global gen type: %s" % what)

            output_path = mozpath.join(output_root, filename)
            self._maybe_write_file(output_path, code, result)

        return result

    def _compute_changed_inputs(self):
        """Compute the set of input files that need to be regenerated."""
        changed_inputs = set()
        expected_outputs = self.expected_build_output_files()

        # Look for missing output files.
        if any(not os.path.exists(f) for f in expected_outputs):
            # FUTURE Bug 940469 Only regenerate minimum set.
            changed_inputs |= self._input_paths

        # That's it for examining output files. We /could/ examine SHA-1's of
        # output files from a previous run to detect modifications. But that's
        # a lot of extra work and most build systems don't do that anyway.

        # Now we move on to the input files.
        old_hashes = {v["filename"]: v["sha1"] for v in self._state["webidls"].values()}

        old_filenames = set(old_hashes.keys())
        new_filenames = self._input_paths

        # If an old file has disappeared or a new file has arrived, mark
        # it.
        changed_inputs |= old_filenames ^ new_filenames

        # For the files in common between runs, compare content. If the file
        # has changed, mark it. We don't need to perform mtime comparisons
        # because content is a stronger validator.
        for filename in old_filenames & new_filenames:
            if old_hashes[filename] != self._input_hashes[filename]:
                changed_inputs.add(filename)

        # We've now populated the base set of inputs that have changed.

        # Inherit dependencies from previous run. The full set of dependencies
        # is associated with each record, so we don't need to perform any fancy
        # graph traversal.
        for v in self._state["webidls"].values():
            if any(dep for dep in v["inputs"] if dep in changed_inputs):
                changed_inputs.add(v["filename"])

        # Now check for changes to the set of dictionaries that are convertible to JS
        oldDictionariesConvertibleToJS = self._state["dictionaries_convertible_to_js"]
        newDictionariesConvertibleToJS = self._config.getDictionariesConvertibleToJS()
        newNames = set(d.identifier.name for d in newDictionariesConvertibleToJS)
        changedDictionaryNames = oldDictionariesConvertibleToJS ^ newNames

        # Now check for changes to the set of dictionaries that are convertible from JS
        oldDictionariesConvertibleFromJS = self._state[
            "dictionaries_convertible_from_js"
        ]
        newDictionariesConvertibleFromJS = (
            self._config.getDictionariesConvertibleFromJS()
        )
        newNames = set(d.identifier.name for d in newDictionariesConvertibleFromJS)
        changedDictionaryNames |= oldDictionariesConvertibleFromJS ^ newNames

        for name in changedDictionaryNames:
            d = self._config.getDictionaryIfExists(name)
            if d:
                changed_inputs.add(d.filename)

        # Only use paths that are known to our current state.
        # This filters out files that were deleted or changed type (e.g. from
        # static to preprocessed).
        return changed_inputs & self._input_paths

    def _binding_info(self, p):
        """Compute binding metadata for an input path.

        Returns a tuple of:

          (stem, binding_stem, is_event, output_files)

        output_files is itself a tuple. The first two items are the binding
        header and C++ paths, respectively. The 2nd pair are the event header
        and C++ paths or None if this isn't an event binding.
        """
        basename = mozpath.basename(p)
        stem = mozpath.splitext(basename)[0]
        binding_stem = "%sBinding" % stem

        if stem in self._exported_stems:
            header_dir = self._exported_header_dir
        else:
            header_dir = self._codegen_dir

        is_event = stem in self._generated_events_stems

        files = (
            mozpath.join(header_dir, "%s.h" % binding_stem),
            mozpath.join(self._codegen_dir, "%s.cpp" % binding_stem),
            mozpath.join(header_dir, "%s.h" % stem) if is_event else None,
            mozpath.join(self._codegen_dir, "%s.cpp" % stem) if is_event else None,
        )

        return stem, binding_stem, is_event, header_dir, files

    def _example_paths(self, interface):
        return (
            mozpath.join(self._codegen_dir, "%s-example.h" % interface),
            mozpath.join(self._codegen_dir, "%s-example.cpp" % interface),
        )

    def expected_build_output_files(self):
        """Obtain the set of files generate_build_files() should write."""
        paths = set()

        # Account for global generation.
        for p in self.GLOBAL_DECLARE_FILES:
            paths.add(mozpath.join(self._exported_header_dir, p))
        for p in self.GLOBAL_DEFINE_FILES:
            paths.add(mozpath.join(self._codegen_dir, p))

        for p in self._input_paths:
            stem, binding_stem, is_event, header_dir, files = self._binding_info(p)
            paths |= {f for f in files if f}

        for interface in self._example_interfaces:
            for p in self._example_paths(interface):
                paths.add(p)

        return paths

    # Parallelization of the generation step relies on this method not changing
    # the internal state of the object
    def _generate_build_files_for_webidl(self, filename):
        from Codegen import CGBindingRoot, CGEventRoot

        self.log(
            logging.INFO,
            "webidl_generate_build_for_input",
            {"filename": filename},
            "Generating WebIDL files derived from {filename}",
        )

        stem, binding_stem, is_event, header_dir, files = self._binding_info(filename)
        root = CGBindingRoot(self._config, binding_stem, filename)

        result = self._maybe_write_codegen(root, files[0], files[1])

        if is_event:
            generated_event = CGEventRoot(self._config, stem)
            result = self._maybe_write_codegen(
                generated_event, files[2], files[3], result
            )

        return result, root.deps()

    def _global_dependencies_changed(self):
        """Determine whether the global dependencies have changed."""
        current_files = set(iter_modules_in_path(mozpath.dirname(__file__)))

        # We need to catch other .py files from /dom/bindings. We assume these
        # are in the same directory as the config file.
        current_files |= set(iter_modules_in_path(mozpath.dirname(self._config_path)))

        current_files.add(self._config_path)

        current_hashes = {}
        for f in current_files:
            # This will fail if the file doesn't exist. If a current global
            # dependency doesn't exist, something else is wrong.
            with io.open(f, "rb") as fh:
                current_hashes[f] = hashlib.sha1(fh.read()).hexdigest()

        # The set of files has changed.
        if current_files ^ set(self._state["global_depends"].keys()):
            return True, current_hashes

        # Compare hashes.
        for f, sha1 in current_hashes.items():
            if sha1 != self._state["global_depends"][f]:
                return True, current_hashes

        return False, current_hashes

    def _save_state(self):
        with io.open(self._state_path, "w", newline="\n") as fh:
            self._state.dump(fh)

    def _maybe_write_codegen(self, obj, declare_path, define_path, result=None):
        assert declare_path and define_path
        if not result:
            result = (set(), set(), set())

        self._maybe_write_file(declare_path, obj.declare(), result)
        self._maybe_write_file(define_path, obj.define(), result)

        return result

    def _maybe_write_file(self, path, content, result):
        fh = FileAvoidWrite(path)
        fh.write(content)
        existed, updated = fh.close()

        if not existed:
            result[0].add(path)
        elif updated:
            result[1].add(path)
        else:
            result[2].add(path)


def create_build_system_manager(topsrcdir=None, topobjdir=None, dist_dir=None):
    """Create a WebIDLCodegenManager for use by the build system."""
    if topsrcdir is None:
        assert topobjdir is None and dist_dir is None
        import buildconfig

        topsrcdir = buildconfig.topsrcdir
        topobjdir = buildconfig.topobjdir
        dist_dir = buildconfig.substs["DIST"]

    src_dir = os.path.join(topsrcdir, "dom", "bindings")
    obj_dir = os.path.join(topobjdir, "dom", "bindings")
    webidl_root = os.path.join(topsrcdir, "dom", "webidl")

    with io.open(os.path.join(obj_dir, "file-lists.json"), "r") as fh:
        files = json.load(fh)

    inputs = (
        files["webidls"],
        files["exported_stems"],
        files["generated_events_stems"],
        files["example_interfaces"],
    )

    cache_dir = os.path.join(obj_dir, "_cache")
    try:
        os.makedirs(cache_dir)
    except OSError as e:
        if e.errno != errno.EEXIST:
            raise

    return WebIDLCodegenManager(
        os.path.join(src_dir, "Bindings.conf"),
        webidl_root,
        inputs,
        os.path.join(dist_dir, "include", "mozilla", "dom"),
        obj_dir,
        os.path.join(obj_dir, "codegen.json"),
        cache_dir=cache_dir,
        # The make rules include a codegen.pp file containing dependencies.
        make_deps_path=os.path.join(obj_dir, "codegen.pp"),
        make_deps_target="webidl.stub",
    )