summaryrefslogtreecommitdiffstats
path: root/python/mozbuild/mozbuild/test/test_artifact_cache.py
blob: d12d150183a3b489ebe1acae1189a49c1a5233b1 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

import os
import time
import unittest
from shutil import rmtree
from tempfile import mkdtemp

import mozunit

from mozbuild import artifact_cache
from mozbuild.artifact_cache import ArtifactCache

CONTENTS = {
    "http://server/foo": b"foo",
    "http://server/bar": b"bar" * 400,
    "http://server/qux": b"qux" * 400,
    "http://server/fuga": b"fuga" * 300,
    "http://server/hoge": b"hoge" * 300,
    "http://server/larger": b"larger" * 3000,
}


class FakeResponse(object):
    def __init__(self, content):
        self._content = content

    @property
    def headers(self):
        return {"Content-length": str(len(self._content))}

    def iter_content(self, chunk_size):
        content = memoryview(self._content)
        while content:
            yield content[:chunk_size]
            content = content[chunk_size:]

    def raise_for_status(self):
        pass

    def close(self):
        pass


class FakeSession(object):
    def get(self, url, stream=True):
        assert stream is True
        return FakeResponse(CONTENTS[url])


class TestArtifactCache(unittest.TestCase):
    def setUp(self):
        self.min_cached_artifacts = artifact_cache.MIN_CACHED_ARTIFACTS
        self.max_cached_artifacts_size = artifact_cache.MAX_CACHED_ARTIFACTS_SIZE
        artifact_cache.MIN_CACHED_ARTIFACTS = 2
        artifact_cache.MAX_CACHED_ARTIFACTS_SIZE = 4096

        self._real_utime = os.utime
        os.utime = self.utime
        self.timestamp = time.time() - 86400

        self.tmpdir = mkdtemp()

    def tearDown(self):
        rmtree(self.tmpdir)
        artifact_cache.MIN_CACHED_ARTIFACTS = self.min_cached_artifacts
        artifact_cache.MAX_CACHED_ARTIFACTS_SIZE = self.max_cached_artifacts_size
        os.utime = self._real_utime

    def utime(self, path, times):
        if times is None:
            # Ensure all downloaded files have a different timestamp
            times = (self.timestamp, self.timestamp)
            self.timestamp += 2
        self._real_utime(path, times)

    def listtmpdir(self):
        return [p for p in os.listdir(self.tmpdir) if p != ".metadata_never_index"]

    def test_artifact_cache_persistence(self):
        cache = ArtifactCache(self.tmpdir)
        cache._download_manager.session = FakeSession()

        path = cache.fetch("http://server/foo")
        expected = [os.path.basename(path)]
        self.assertEqual(self.listtmpdir(), expected)

        path = cache.fetch("http://server/bar")
        expected.append(os.path.basename(path))
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        # We're downloading more than the cache allows us, but since it's all
        # in the same session, no purge happens.
        path = cache.fetch("http://server/qux")
        expected.append(os.path.basename(path))
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        path = cache.fetch("http://server/fuga")
        expected.append(os.path.basename(path))
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        cache = ArtifactCache(self.tmpdir)
        cache._download_manager.session = FakeSession()

        # Downloading a new file in a new session purges the oldest files in
        # the cache.
        path = cache.fetch("http://server/hoge")
        expected.append(os.path.basename(path))
        expected = expected[2:]
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        # Downloading a file already in the cache leaves the cache untouched
        cache = ArtifactCache(self.tmpdir)
        cache._download_manager.session = FakeSession()

        path = cache.fetch("http://server/qux")
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        # bar was purged earlier, re-downloading it should purge the oldest
        # downloaded file, which at this point would be qux, but we also
        # re-downloaded it in the mean time, so the next one (fuga) should be
        # the purged one.
        cache = ArtifactCache(self.tmpdir)
        cache._download_manager.session = FakeSession()

        path = cache.fetch("http://server/bar")
        expected.append(os.path.basename(path))
        expected = [p for p in expected if "fuga" not in p]
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))

        # Downloading one file larger than the cache size should still leave
        # MIN_CACHED_ARTIFACTS files.
        cache = ArtifactCache(self.tmpdir)
        cache._download_manager.session = FakeSession()

        path = cache.fetch("http://server/larger")
        expected.append(os.path.basename(path))
        expected = expected[-2:]
        self.assertEqual(sorted(self.listtmpdir()), sorted(expected))


if __name__ == "__main__":
    mozunit.main()