summaryrefslogtreecommitdiffstats
path: root/taskcluster/docker/funsize-update-generator
diff options
context:
space:
mode:
Diffstat (limited to 'taskcluster/docker/funsize-update-generator')
-rw-r--r--taskcluster/docker/funsize-update-generator/Dockerfile55
-rw-r--r--taskcluster/docker/funsize-update-generator/Makefile9
-rw-r--r--taskcluster/docker/funsize-update-generator/README7
-rw-r--r--taskcluster/docker/funsize-update-generator/dep1.pubkey14
-rw-r--r--taskcluster/docker/funsize-update-generator/nightly.pubkey14
-rw-r--r--taskcluster/docker/funsize-update-generator/release.pubkey14
-rw-r--r--taskcluster/docker/funsize-update-generator/requirements.in7
-rw-r--r--taskcluster/docker/funsize-update-generator/requirements.txt338
-rw-r--r--taskcluster/docker/funsize-update-generator/runme.sh61
-rw-r--r--taskcluster/docker/funsize-update-generator/scripts/funsize.py472
-rwxr-xr-xtaskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh157
11 files changed, 1148 insertions, 0 deletions
diff --git a/taskcluster/docker/funsize-update-generator/Dockerfile b/taskcluster/docker/funsize-update-generator/Dockerfile
new file mode 100644
index 0000000000..8c63a05df3
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/Dockerfile
@@ -0,0 +1,55 @@
+FROM ubuntu:18.04
+MAINTAINER Simon Fraser <sfraser@mozilla.com>
+
+# Required software
+ENV DEBIAN_FRONTEND noninteractive
+# Chain apt-get commands with apt-get clean in a single docker RUN
+# to make sure that files are removed within a single docker layer
+RUN apt-get update -q && \
+ apt-get install -yyq --no-install-recommends \
+ bzip2 \
+ ca-certificates \
+ curl \
+ gcc \
+ jq \
+ libdpkg-perl \
+ libgetopt-simple-perl \
+ liblzma-dev \
+ locales \
+ python3.8 \
+ libpython3.8-dev \
+ python3-dev \
+ xz-utils \
+ && apt-get clean
+RUN useradd -d /home/worker -s /bin/bash -m worker
+COPY requirements.txt /
+
+RUN locale-gen en_CA.UTF-8
+ENV LANG en_CA.UTF-8
+ENV LANGUAGE en_CA.UTF-8
+ENV LANG_ALL en_CA.UTF-8
+ENV LC_ALL en_CA.UTF-8
+
+# python-pip installs a lot of dependencies increasing the size of an image
+# drastically. Install it like this saves us almost 200M.
+RUN bash -c "curl -L https://bootstrap.pypa.io/get-pip.py | python3.8"
+
+RUN ["pip", "install", "-r", "/requirements.txt"]
+
+# scripts
+RUN mkdir /home/worker/bin
+COPY scripts/* /home/worker/bin/
+
+COPY runme.sh /runme.sh
+RUN chmod 755 /home/worker/bin/* /*.sh
+RUN mkdir /home/worker/keys
+COPY *.pubkey /home/worker/keys/
+
+ENV HOME /home/worker
+ENV SHELL /bin/bash
+ENV USER worker
+ENV LOGNAME worker
+
+USER worker
+
+CMD ["/runme.sh"]
diff --git a/taskcluster/docker/funsize-update-generator/Makefile b/taskcluster/docker/funsize-update-generator/Makefile
new file mode 100644
index 0000000000..6b67f0ed90
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/Makefile
@@ -0,0 +1,9 @@
+IMAGE_NAME = funsize-update-generator
+
+build:
+ docker build -t $(IMAGE_NAME) --no-cache --rm .
+
+update_pubkeys:
+ curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/nightly_aurora_level3_primary.der | openssl x509 -inform DER -pubkey -noout > nightly.pubkey
+ curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/dep1.der | openssl x509 -inform DER -pubkey -noout > dep.pubkey
+ curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/release_primary.der | openssl x509 -inform DER -pubkey -noout > release.pubkey
diff --git a/taskcluster/docker/funsize-update-generator/README b/taskcluster/docker/funsize-update-generator/README
new file mode 100644
index 0000000000..5e9507be71
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/README
@@ -0,0 +1,7 @@
+
+To run this locally for testing/development purposes:
+
+1. Find a funsize generating task ID
+2. docker run -t -e TASKCLUSTER_ROOT_URL="https://firefox-ci-tc.services.mozilla.com" -e SIGNING_CERT='nightly' -e MAR_CHANNEL_ID='firefox-mozilla-central' -e TASK_ID="${TASK_ID}" -e EXTRA_PARAMS="--arch=x86_64" funsize-update-generator /runme.sh
+
+The TASK_ID should be a recent "partials" Task.
diff --git a/taskcluster/docker/funsize-update-generator/dep1.pubkey b/taskcluster/docker/funsize-update-generator/dep1.pubkey
new file mode 100644
index 0000000000..927b2cc947
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/dep1.pubkey
@@ -0,0 +1,14 @@
+-----BEGIN PUBLIC KEY-----
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA8Y6AS+xwKoXZl0X5qOKr
+0I00xC4UN+IMjA1LIQoZ2GBkiqQF3q8v2nWTFE0+47+3NtP0l8tvsQY+LSYR4Fek
+v2Vx4m/CAMKmWzW6Vtlj80y6rQ04V19l41bZXvCIBW5fm9sAvPgc7CngkcLySNqk
+8vf57cUEpOmbsjSOCmK0j8hh03I1eWogpbAVEchSm1xN2sUJaVTvz5j8BfE6Vm0i
+nN7V0zF+AOxzvntZIpfUqMZbHRiMkGn4l9rjia1Rz0qUc9RNCJkNocyKtQ2N2wnN
+FjHpmK9x2V71cS1JQGhgLegrswPCAWY1lTmiLk9LweqGoVL0rqR4LCkb0VCaeSRe
+6bUEYcU1ZQedE80zGKB3AfoC5br1shYY0xjmyRSCQ8m8WE60HzXhL8wczKrn5yoJ
+iF6BxFwcYsvrWBPgIYVZLcqjODfR/M62o8yIfTC7yBcIdycJ0sWhB47dHAFxv1kc
+wv8Ik9ftvDyupE8kwcl58fNOXz93j7IxMry/ey27NyYpESPOUNcjT8TP26FdGebg
+4iJx0/LaYmaNUdchfBBlaYqGdH6ZGK0OeVxzHstGuG0gebm/igYcpaFxiQzvWijX
+MIAU56s4g+yj7pSzT5/s9r8Gv+YhsNHKm4hnwLZaITV0lLMT5h/OZGseQTPMBnAR
+hK3CIfcqG0I23hdwI29ZuUMCAwEAAQ==
+-----END PUBLIC KEY-----
diff --git a/taskcluster/docker/funsize-update-generator/nightly.pubkey b/taskcluster/docker/funsize-update-generator/nightly.pubkey
new file mode 100644
index 0000000000..e51049844c
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/nightly.pubkey
@@ -0,0 +1,14 @@
+-----BEGIN PUBLIC KEY-----
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAth151NGY8PBzn0bii9Yc
+AjYHZDwP9Lj1c3owG0zLqW2kPcdp86QTAcoYunHGYFFakNG3tooZhzwkMjZ1OrXc
+ERjD6AuVSGIBdsKtKP4vLtMjDUteFN4K2+rveozcnYFZuTWEajGu8uoYsv4QgdEA
+nTBC39j0J33xlfUR+XKuxzhxNrFX+fRFWuLDJrPziMcVA/mzf0gXlhtEsfV0HYyg
+yWpHdIWww+llysD1QOQAHk94Ss8c/4BFXFxlwlLeNlB1ZqLm1LsNy0jUy9EHeO3C
+H6eqmiFEbpdjlrkJdgR1NcTzeY/Qf/nhWH6BAZrSapQycF7OSLU+rFWMQUElSPLc
+NVl7oNAAfSYLTvRjPGi+mJK3wGFQw1EpwQl+elE1oj4+sHvIVpDrLb6btpxfr1cZ
+pR4Di/hkOIymxEDWvtUhOxUXnYbDKQSDcAHKM/xR3sdIAiVtVuL4hyBwlAqkQc2j
+H+SmnCbazgnq5+dN4y5DjoOgbZQ/koE3s3bUzzMeIxaul9v4gMtGROw3PQ3OZcP0
+lgjPRhY+NeTnWMo2nGb4/eS6Cn2qFLfbEQjsj6pJJBNKfvK/gm1jXb3PgXXdf8+d
+2xTPOX8QNpSK7C0w4vYlvSpYZlsx2cznEOV6LDqP0QHUnmd/k1xWRRGiQ7gtT+BV
+Fn0h7JyTGmEdFu6l4OhS8hMCAwEAAQ==
+-----END PUBLIC KEY-----
diff --git a/taskcluster/docker/funsize-update-generator/release.pubkey b/taskcluster/docker/funsize-update-generator/release.pubkey
new file mode 100644
index 0000000000..ec1103d828
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/release.pubkey
@@ -0,0 +1,14 @@
+-----BEGIN PUBLIC KEY-----
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxCHbY+fP3dvaP9XVbmK6
+i4rbqo72INEWgDSYbr/DIYfCSzHC9H8pU8dyjt+Nd8OtoUZtBD1N9fP7SlrvPZSI
+ZSW4k0e9Ky5aV3Uy+ivamSvYszkhqdeP2y7MBu73XHKYONR9PnKa+ovmREwSEI+h
+1e0ebm8zvF7Ndwx0mOeZkDu9SDkDGg4aj2xrJyBBOuGVjuctMZ6l1davANI5xiJ0
+GBEU3tR1gJs1T4vLBis5mEFn9y4kgyw/HrxmRYGnZL4fLb2fTI+pNW0Twu3KWwwi
+LgLkkVrNWiHSk7YWqxjcg5IA3pQETQ17paTHoB5Mnkvuh6MkDXvRG5VgAHZAigr6
+fJMsasOUaBeos/cD1LDQEIObpetlxc0Fiu/lvUts0755otkhI+yv35+wUa6GJrsE
+CsT7c/LaFtQXg06aGXbMLDn0bE/e+nw9KWT/rE1iYXMFkzrqoTeYJ+v7/fD/ywU8
+m8l4CZmXxzd/RogMrM3xl+j4ucAAltDQyL4yLySaIT05w5U8z2zJDEXFvpFDSRfF
+K3kjLwGub7wNwaQDuh/msIUdavu4g+GNikCXAJ8AssLuYatyHoltd2tf+EIIDW3U
+zzLpymnLo3cAz3IPfXyqVB+mcLcpqbHjl3hWms6l1wGtz6S4WqdrWs/KfzS5EyDK
+r63xn1Rg/XFmR57EsFEXAZ8CAwEAAQ==
+-----END PUBLIC KEY-----
diff --git a/taskcluster/docker/funsize-update-generator/requirements.in b/taskcluster/docker/funsize-update-generator/requirements.in
new file mode 100644
index 0000000000..3360c71690
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/requirements.in
@@ -0,0 +1,7 @@
+aiohttp
+awscli
+mar
+redo
+requests
+scriptworker
+sh
diff --git a/taskcluster/docker/funsize-update-generator/requirements.txt b/taskcluster/docker/funsize-update-generator/requirements.txt
new file mode 100644
index 0000000000..bbc462f6e5
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/requirements.txt
@@ -0,0 +1,338 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile --generate-hashes requirements.in
+#
+aiohttp==3.6.2 \
+ --hash=sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e \
+ --hash=sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326 \
+ --hash=sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a \
+ --hash=sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654 \
+ --hash=sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a \
+ --hash=sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4 \
+ --hash=sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17 \
+ --hash=sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec \
+ --hash=sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd \
+ --hash=sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48 \
+ --hash=sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59 \
+ --hash=sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965
+ # via
+ # -r requirements.in
+ # scriptworker
+ # taskcluster
+aiomemoizettl==0.0.3 \
+ --hash=sha256:07a6becac60f6cd2604b9f2b73bcd9a50079a0b7b55e2a4e45b1eec5a3ea9659 \
+ --hash=sha256:0a80d2dc765e545263f515363b6700ec8cf86fa3968b529f56390b28e34f743d
+ # via scriptworker
+arrow==0.15.5 \
+ --hash=sha256:5390e464e2c5f76971b60ffa7ee29c598c7501a294bc9f5e6dadcb251a5d027b \
+ --hash=sha256:70729bcc831da496ca3cb4b7e89472c8e2d27d398908155e0796179f6d2d41ee
+ # via scriptworker
+asn1crypto==1.3.0 \
+ --hash=sha256:5a215cb8dc12f892244e3a113fe05397ee23c5c4ca7a69cd6e69811755efc42d \
+ --hash=sha256:831d2710d3274c8a74befdddaf9f17fcbf6e350534565074818722d6d615b315
+ # via mar
+async-timeout==3.0.1 \
+ --hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \
+ --hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3
+ # via
+ # aiohttp
+ # taskcluster
+attrs==19.3.0 \
+ --hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \
+ --hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72
+ # via
+ # aiohttp
+ # jsonschema
+awscli==1.18.13 \
+ --hash=sha256:197a1168a2c979752bb290b09cbbf8bf836507de1b99a19431ca9b0cde8a5f81 \
+ --hash=sha256:d74c5b097fe9288147c6045974eca9dc369055aa6ccce05fb551b0b95e813213
+ # via -r requirements.in
+backports.lzma==0.0.14 \
+ --hash=sha256:16d8b68e4d3cd4e6c9ddb059850452946da3914c8a8e197a7f2b0954559f2df4
+ # via mar
+botocore==1.15.13 \
+ --hash=sha256:265bf5e902e2f0854f12032304f2559f5d042e03c10a59e4f3ec43353be5b88f \
+ --hash=sha256:2fce3cc8fae3bbf89984f3065d9eecf0a906b96bee491a4fb54ad7ec7f4261dc
+ # via
+ # awscli
+ # s3transfer
+certifi==2019.11.28 \
+ --hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \
+ --hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f
+ # via requests
+cffi==1.14.0 \
+ --hash=sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff \
+ --hash=sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b \
+ --hash=sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac \
+ --hash=sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0 \
+ --hash=sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384 \
+ --hash=sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26 \
+ --hash=sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6 \
+ --hash=sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b \
+ --hash=sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e \
+ --hash=sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd \
+ --hash=sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2 \
+ --hash=sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66 \
+ --hash=sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc \
+ --hash=sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8 \
+ --hash=sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55 \
+ --hash=sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4 \
+ --hash=sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5 \
+ --hash=sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d \
+ --hash=sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78 \
+ --hash=sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa \
+ --hash=sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793 \
+ --hash=sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f \
+ --hash=sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a \
+ --hash=sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f \
+ --hash=sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30 \
+ --hash=sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f \
+ --hash=sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3 \
+ --hash=sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c
+ # via cryptography
+chardet==3.0.4 \
+ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
+ --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
+ # via
+ # aiohttp
+ # requests
+click==7.0 \
+ --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \
+ --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7
+ # via mar
+colorama==0.4.3 \
+ --hash=sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff \
+ --hash=sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1
+ # via awscli
+construct==2.10.56 \
+ --hash=sha256:97ba13edcd98546f10f7555af41c8ce7ae9d8221525ec4062c03f9adbf940661
+ # via mar
+cryptography==2.8 \
+ --hash=sha256:02079a6addc7b5140ba0825f542c0869ff4df9a69c360e339ecead5baefa843c \
+ --hash=sha256:1df22371fbf2004c6f64e927668734070a8953362cd8370ddd336774d6743595 \
+ --hash=sha256:369d2346db5934345787451504853ad9d342d7f721ae82d098083e1f49a582ad \
+ --hash=sha256:3cda1f0ed8747339bbdf71b9f38ca74c7b592f24f65cdb3ab3765e4b02871651 \
+ --hash=sha256:44ff04138935882fef7c686878e1c8fd80a723161ad6a98da31e14b7553170c2 \
+ --hash=sha256:4b1030728872c59687badcca1e225a9103440e467c17d6d1730ab3d2d64bfeff \
+ --hash=sha256:58363dbd966afb4f89b3b11dfb8ff200058fbc3b947507675c19ceb46104b48d \
+ --hash=sha256:6ec280fb24d27e3d97aa731e16207d58bd8ae94ef6eab97249a2afe4ba643d42 \
+ --hash=sha256:7270a6c29199adc1297776937a05b59720e8a782531f1f122f2eb8467f9aab4d \
+ --hash=sha256:73fd30c57fa2d0a1d7a49c561c40c2f79c7d6c374cc7750e9ac7c99176f6428e \
+ --hash=sha256:7f09806ed4fbea8f51585231ba742b58cbcfbfe823ea197d8c89a5e433c7e912 \
+ --hash=sha256:90df0cc93e1f8d2fba8365fb59a858f51a11a394d64dbf3ef844f783844cc793 \
+ --hash=sha256:971221ed40f058f5662a604bd1ae6e4521d84e6cad0b7b170564cc34169c8f13 \
+ --hash=sha256:a518c153a2b5ed6b8cc03f7ae79d5ffad7315ad4569b2d5333a13c38d64bd8d7 \
+ --hash=sha256:b0de590a8b0979649ebeef8bb9f54394d3a41f66c5584fff4220901739b6b2f0 \
+ --hash=sha256:b43f53f29816ba1db8525f006fa6f49292e9b029554b3eb56a189a70f2a40879 \
+ --hash=sha256:d31402aad60ed889c7e57934a03477b572a03af7794fa8fb1780f21ea8f6551f \
+ --hash=sha256:de96157ec73458a7f14e3d26f17f8128c959084931e8997b9e655a39c8fde9f9 \
+ --hash=sha256:df6b4dca2e11865e6cfbfb708e800efb18370f5a46fd601d3755bc7f85b3a8a2 \
+ --hash=sha256:ecadccc7ba52193963c0475ac9f6fa28ac01e01349a2ca48509667ef41ffd2cf \
+ --hash=sha256:fb81c17e0ebe3358486cd8cc3ad78adbae58af12fc2bf2bc0bb84e8090fa5ce8
+ # via
+ # jwcrypto
+ # mar
+ # scriptworker
+dictdiffer==0.8.1 \
+ --hash=sha256:1adec0d67cdf6166bda96ae2934ddb5e54433998ceab63c984574d187cc563d2 \
+ --hash=sha256:d79d9a39e459fe33497c858470ca0d2e93cb96621751de06d631856adfd9c390
+ # via scriptworker
+docutils==0.15.2 \
+ --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
+ --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
+ --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99
+ # via
+ # awscli
+ # botocore
+github3.py==1.3.0 \
+ --hash=sha256:15a115c18f7bfcf934dfef7ab103844eb9f620c586bad65967708926da47cbda \
+ --hash=sha256:50833b5da35546b8cced0e8d7ff4c50a9afc2c8e46cc4d07dc4b66d26467c708
+ # via scriptworker
+idna-ssl==1.1.0 \
+ --hash=sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c
+ # via aiohttp
+idna==2.9 \
+ --hash=sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb \
+ --hash=sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa
+ # via
+ # idna-ssl
+ # requests
+ # yarl
+immutabledict==0.2.0 \
+ --hash=sha256:43dde3e55dcb539537ae6189fb6b09a1d01e94db304e4506e94ca2d45ec14c47 \
+ --hash=sha256:7881e44098f13dd12d6fec00551d564433cb46776e8b2f3453128f715df4376a
+ # via scriptworker
+importlib-metadata==1.5.0 \
+ --hash=sha256:06f5b3a99029c7134207dd882428a66992a9de2bef7c2b699b5641f9886c3302 \
+ --hash=sha256:b97607a1a18a5100839aec1dc26a1ea17ee0d93b20b0f008d80a5a050afb200b
+ # via jsonschema
+jmespath==0.9.5 \
+ --hash=sha256:695cb76fa78a10663425d5b73ddc5714eb711157e52704d69be03b1a02ba4fec \
+ --hash=sha256:cca55c8d153173e21baa59983015ad0daf603f9cb799904ff057bfb8ff8dc2d9
+ # via botocore
+json-e==3.0.2 \
+ --hash=sha256:35dc96ce04f0caece560dc18266af7f0ffb780955273ffa20814be65447cad49
+ # via scriptworker
+jsonschema==3.2.0 \
+ --hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \
+ --hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a
+ # via scriptworker
+jwcrypto==0.7 \
+ --hash=sha256:618ded1d25d3f806a1ab05cee42633a5a2787af33fca8d8f539b0aa1478b3728 \
+ --hash=sha256:adbe1f6266cde35d40d5de6d1419612b3bd4c869b9332c88c9d7a9163d305100
+ # via github3.py
+mar==3.1.0 \
+ --hash=sha256:1939df482f2d3f6221405da00d6286d77d2dd60d372a0fd37532a8f00544f64f \
+ --hash=sha256:5d2904a063f0da625e37515fa9eb340082e69fa1c00fdbeb82d28b7ff3c51e28
+ # via -r requirements.in
+mohawk==1.1.0 \
+ --hash=sha256:3ed296a30453d0b724679e0fd41e4e940497f8e461a9a9c3b7f36e43bab0fa09 \
+ --hash=sha256:d2a0e3ab10a209cc79e95e28f2dd54bd4a73fd1998ffe27b7ba0f962b6be9723
+ # via taskcluster
+multidict==4.7.5 \
+ --hash=sha256:317f96bc0950d249e96d8d29ab556d01dd38888fbe68324f46fd834b430169f1 \
+ --hash=sha256:42f56542166040b4474c0c608ed051732033cd821126493cf25b6c276df7dd35 \
+ --hash=sha256:4b7df040fb5fe826d689204f9b544af469593fb3ff3a069a6ad3409f742f5928 \
+ --hash=sha256:544fae9261232a97102e27a926019100a9db75bec7b37feedd74b3aa82f29969 \
+ --hash=sha256:620b37c3fea181dab09267cd5a84b0f23fa043beb8bc50d8474dd9694de1fa6e \
+ --hash=sha256:6e6fef114741c4d7ca46da8449038ec8b1e880bbe68674c01ceeb1ac8a648e78 \
+ --hash=sha256:7774e9f6c9af3f12f296131453f7b81dabb7ebdb948483362f5afcaac8a826f1 \
+ --hash=sha256:85cb26c38c96f76b7ff38b86c9d560dea10cf3459bb5f4caf72fc1bb932c7136 \
+ --hash=sha256:a326f4240123a2ac66bb163eeba99578e9d63a8654a59f4688a79198f9aa10f8 \
+ --hash=sha256:ae402f43604e3b2bc41e8ea8b8526c7fa7139ed76b0d64fc48e28125925275b2 \
+ --hash=sha256:aee283c49601fa4c13adc64c09c978838a7e812f85377ae130a24d7198c0331e \
+ --hash=sha256:b51249fdd2923739cd3efc95a3d6c363b67bbf779208e9f37fd5e68540d1a4d4 \
+ --hash=sha256:bb519becc46275c594410c6c28a8a0adc66fe24fef154a9addea54c1adb006f5 \
+ --hash=sha256:c2c37185fb0af79d5c117b8d2764f4321eeb12ba8c141a95d0aa8c2c1d0a11dd \
+ --hash=sha256:dc561313279f9d05a3d0ffa89cd15ae477528ea37aa9795c4654588a3287a9ab \
+ --hash=sha256:e439c9a10a95cb32abd708bb8be83b2134fa93790a4fb0535ca36db3dda94d20 \
+ --hash=sha256:fc3b4adc2ee8474cb3cd2a155305d5f8eda0a9c91320f83e55748e1fcb68f8e3
+ # via
+ # aiohttp
+ # yarl
+pyasn1==0.4.8 \
+ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
+ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
+ # via rsa
+pycparser==2.19 \
+ --hash=sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3
+ # via cffi
+pyrsistent==0.15.7 \
+ --hash=sha256:cdc7b5e3ed77bed61270a47d35434a30617b9becdf2478af76ad2c6ade307280
+ # via jsonschema
+python-dateutil==2.8.1 \
+ --hash=sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c \
+ --hash=sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a
+ # via
+ # arrow
+ # botocore
+ # github3.py
+pyyaml==5.3 \
+ --hash=sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6 \
+ --hash=sha256:1cf708e2ac57f3aabc87405f04b86354f66799c8e62c28c5fc5f88b5521b2dbf \
+ --hash=sha256:24521fa2890642614558b492b473bee0ac1f8057a7263156b02e8b14c88ce6f5 \
+ --hash=sha256:4fee71aa5bc6ed9d5f116327c04273e25ae31a3020386916905767ec4fc5317e \
+ --hash=sha256:70024e02197337533eef7b85b068212420f950319cc8c580261963aefc75f811 \
+ --hash=sha256:74782fbd4d4f87ff04159e986886931456a1894c61229be9eaf4de6f6e44b99e \
+ --hash=sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d \
+ --hash=sha256:cb1f2f5e426dc9f07a7681419fe39cee823bb74f723f36f70399123f439e9b20 \
+ --hash=sha256:dbbb2379c19ed6042e8f11f2a2c66d39cceb8aeace421bfc29d085d93eda3689 \
+ --hash=sha256:e3a057b7a64f1222b56e47bcff5e4b94c4f61faac04c7c4ecb1985e18caa3994 \
+ --hash=sha256:e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615
+ # via
+ # awscli
+ # scriptworker
+redo==2.0.3 \
+ --hash=sha256:36784bf8ae766e14f9db0e377ccfa02835d648321d2007b6ae0bf4fd612c0f94 \
+ --hash=sha256:71161cb0e928d824092a5f16203939bbc0867ce4c4685db263cf22c3ae7634a8
+ # via -r requirements.in
+requests==2.23.0 \
+ --hash=sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee \
+ --hash=sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6
+ # via
+ # -r requirements.in
+ # github3.py
+ # taskcluster
+rsa==3.4.2 \
+ --hash=sha256:25df4e10c263fb88b5ace923dd84bf9aa7f5019687b5e55382ffcdb8bede9db5 \
+ --hash=sha256:43f682fea81c452c98d09fc316aae12de6d30c4b5c84226642cf8f8fd1c93abd
+ # via awscli
+s3transfer==0.3.3 \
+ --hash=sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13 \
+ --hash=sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db
+ # via awscli
+scriptworker==32.0.3 \
+ --hash=sha256:a4d428a2c9db1b0dbabb86c0badf850e919ed83ffaba701185370ebe3dd5f7aa \
+ --hash=sha256:d60976a94ba86d0c4b9f23536ce3cb026281c726d00da12b6546a8dd80cb4d1b
+ # via -r requirements.in
+sh==1.12.14 \
+ --hash=sha256:ae3258c5249493cebe73cb4e18253a41ed69262484bad36fdb3efcb8ad8870bb \
+ --hash=sha256:b52bf5833ed01c7b5c5fb73a7f71b3d98d48e9b9b8764236237bdc7ecae850fc
+ # via -r requirements.in
+six==1.14.0 \
+ --hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \
+ --hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c
+ # via
+ # cryptography
+ # jsonschema
+ # mohawk
+ # pyrsistent
+ # python-dateutil
+ # taskcluster
+slugid==2.0.0 \
+ --hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \
+ --hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c
+ # via taskcluster
+taskcluster-urls==12.1.0 \
+ --hash=sha256:1dc740c32c7beb31e11ed7ccf9da2d47a504acdb3170c8900649433b0fd16fb2 \
+ --hash=sha256:4a62c776aeba6d45044789a8845ec4d8521bc1bb6ebfc86d79ee759bcdd4f2f7
+ # via taskcluster
+taskcluster==25.4.0 \
+ --hash=sha256:017e626ec7211cc250f6c59ac4fbeb3ca7d7ecf0a253eb4f21a694bc5d02ae36 \
+ --hash=sha256:0a8fc965441ab60aafb83bcf2b670afd3f8817b13067561e084b4d2528f1e75c \
+ --hash=sha256:a9d10a794a065ea17a11fa8ee4861e57ea702362f70d00dc97d5a4f685a70d4f
+ # via scriptworker
+typing-extensions==3.7.4.1 \
+ --hash=sha256:091ecc894d5e908ac75209f10d5b4f118fbdb2eb1ede6a63544054bb1edb41f2 \
+ --hash=sha256:910f4656f54de5993ad9304959ce9bb903f90aadc7c67a0bef07e678014e892d \
+ --hash=sha256:cf8b63fedea4d89bab840ecbb93e75578af28f76f66c35889bd7065f5af88575
+ # via aiohttp
+uritemplate==3.0.1 \
+ --hash=sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f \
+ --hash=sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae
+ # via github3.py
+urllib3==1.25.8 \
+ --hash=sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc \
+ --hash=sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc
+ # via
+ # botocore
+ # requests
+yarl==1.4.2 \
+ --hash=sha256:0c2ab325d33f1b824734b3ef51d4d54a54e0e7a23d13b86974507602334c2cce \
+ --hash=sha256:0ca2f395591bbd85ddd50a82eb1fde9c1066fafe888c5c7cc1d810cf03fd3cc6 \
+ --hash=sha256:2098a4b4b9d75ee352807a95cdf5f10180db903bc5b7270715c6bbe2551f64ce \
+ --hash=sha256:25e66e5e2007c7a39541ca13b559cd8ebc2ad8fe00ea94a2aad28a9b1e44e5ae \
+ --hash=sha256:26d7c90cb04dee1665282a5d1a998defc1a9e012fdca0f33396f81508f49696d \
+ --hash=sha256:308b98b0c8cd1dfef1a0311dc5e38ae8f9b58349226aa0533f15a16717ad702f \
+ --hash=sha256:3ce3d4f7c6b69c4e4f0704b32eca8123b9c58ae91af740481aa57d7857b5e41b \
+ --hash=sha256:58cd9c469eced558cd81aa3f484b2924e8897049e06889e8ff2510435b7ef74b \
+ --hash=sha256:5b10eb0e7f044cf0b035112446b26a3a2946bca9d7d7edb5e54a2ad2f6652abb \
+ --hash=sha256:6faa19d3824c21bcbfdfce5171e193c8b4ddafdf0ac3f129ccf0cdfcb083e462 \
+ --hash=sha256:944494be42fa630134bf907714d40207e646fd5a94423c90d5b514f7b0713fea \
+ --hash=sha256:a161de7e50224e8e3de6e184707476b5a989037dcb24292b391a3d66ff158e70 \
+ --hash=sha256:a4844ebb2be14768f7994f2017f70aca39d658a96c786211be5ddbe1c68794c1 \
+ --hash=sha256:c2b509ac3d4b988ae8769901c66345425e361d518aecbe4acbfc2567e416626a \
+ --hash=sha256:c9959d49a77b0e07559e579f38b2f3711c2b8716b8410b320bf9713013215a1b \
+ --hash=sha256:d8cdee92bc930d8b09d8bd2043cedd544d9c8bd7436a77678dd602467a993080 \
+ --hash=sha256:e15199cdb423316e15f108f51249e44eb156ae5dba232cb73be555324a1d49c2
+ # via aiohttp
+zipp==3.1.0 \
+ --hash=sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b \
+ --hash=sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96
+ # via importlib-metadata
+
+# WARNING: The following packages were not pinned, but pip requires them to be
+# pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag.
+# setuptools
diff --git a/taskcluster/docker/funsize-update-generator/runme.sh b/taskcluster/docker/funsize-update-generator/runme.sh
new file mode 100644
index 0000000000..99d80c09e2
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/runme.sh
@@ -0,0 +1,61 @@
+#!/bin/sh
+
+set -xe
+
+test "$TASK_ID"
+test "$SIGNING_CERT"
+
+ARTIFACTS_DIR="/home/worker/artifacts"
+mkdir -p "$ARTIFACTS_DIR"
+
+# Strip trailing / if present
+TASKCLUSTER_ROOT_URL="${TASKCLUSTER_ROOT_URL%/}"
+export TASKCLUSTER_ROOT_URL
+
+# duplicate the functionality of taskcluster-lib-urls, but in bash..
+queue_base="${TASKCLUSTER_ROOT_URL%/}/api/queue/v1"
+
+curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID"
+
+# auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/
+# -> bucket of tc-gp-private-1d-us-east-1, path of releng/mbsdiff-cache/
+# Trailing slash is important, due to prefix permissions in S3.
+S3_BUCKET_AND_PATH=$(jq -r '.scopes[] | select(contains ("auth:aws-s3"))' /home/worker/task.json | awk -F: '{print $4}')
+
+# Will be empty if there's no scope for AWS S3.
+if [ -n "${S3_BUCKET_AND_PATH}" ] && getent hosts taskcluster
+then
+ # Does this parse as we expect?
+ S3_PATH=${S3_BUCKET_AND_PATH#*/}
+ AWS_BUCKET_NAME=${S3_BUCKET_AND_PATH%/${S3_PATH}*}
+ test "${S3_PATH}"
+ test "${AWS_BUCKET_NAME}"
+
+ set +x # Don't echo these.
+ secret_url="${TASKCLUSTER_PROXY_URL}/api/auth/v1/aws/s3/read-write/${AWS_BUCKET_NAME}/${S3_PATH}"
+ AUTH=$(curl "${secret_url}")
+ AWS_ACCESS_KEY_ID=$(echo "${AUTH}" | jq -r '.credentials.accessKeyId')
+ AWS_SECRET_ACCESS_KEY=$(echo "${AUTH}" | jq -r '.credentials.secretAccessKey')
+ AWS_SESSION_TOKEN=$(echo "${AUTH}" | jq -r '.credentials.sessionToken')
+ export AWS_ACCESS_KEY_ID
+ export AWS_SECRET_ACCESS_KEY
+ export AWS_SESSION_TOKEN
+ AUTH=
+
+ if [ -n "$AWS_ACCESS_KEY_ID" ] && [ -n "$AWS_SECRET_ACCESS_KEY" ]; then
+ # Pass the full bucket/path prefix, as the script just appends local files.
+ export MBSDIFF_HOOK="/home/worker/bin/mbsdiff_hook.sh -S ${S3_BUCKET_AND_PATH}"
+ fi
+ set -x
+else
+ # disable caching
+ export MBSDIFF_HOOK=
+fi
+
+# EXTRA_PARAMS is optional
+# shellcheck disable=SC2086
+python3.8 /home/worker/bin/funsize.py \
+ --artifacts-dir "$ARTIFACTS_DIR" \
+ --task-definition /home/worker/task.json \
+ --signing-cert "/home/worker/keys/${SIGNING_CERT}.pubkey" \
+ $EXTRA_PARAMS
diff --git a/taskcluster/docker/funsize-update-generator/scripts/funsize.py b/taskcluster/docker/funsize-update-generator/scripts/funsize.py
new file mode 100644
index 0000000000..ab35b78806
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/scripts/funsize.py
@@ -0,0 +1,472 @@
+#!/usr/bin/env python3
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, division, print_function
+
+import argparse
+import asyncio
+import configparser
+import json
+import logging
+import os
+import shutil
+import tempfile
+import time
+from distutils.util import strtobool
+from contextlib import AsyncExitStack
+from pathlib import Path
+
+import aiohttp
+from mardor.reader import MarReader
+from mardor.signing import get_keysize
+from scriptworker.utils import retry_async, get_hash
+
+log = logging.getLogger(__name__)
+
+
+ROOT_URL = os.environ.get(
+ "TASKCLUSTER_ROOT_URL", "https://firefox-ci-tc.services.mozilla.com"
+)
+QUEUE_PREFIX = f"{ROOT_URL}/api/queue/"
+ALLOWED_URL_PREFIXES = (
+ "http://download.cdn.mozilla.net/pub/mozilla.org/firefox/nightly/",
+ "http://download.cdn.mozilla.net/pub/firefox/nightly/",
+ "http://ftp.mozilla.org/",
+ "http://download.mozilla.org/",
+ "https://archive.mozilla.org/",
+ "http://archive.mozilla.org/",
+ QUEUE_PREFIX,
+)
+STAGING_URL_PREFIXES = (
+ "http://ftp.stage.mozaws.net/",
+ "https://ftp.stage.mozaws.net/",
+)
+
+BCJ_OPTIONS = {
+ "x86": ["--x86"],
+ "x86_64": ["--x86"],
+ "aarch64": [],
+ # macOS Universal Builds
+ "macos-x86_64-aarch64": [],
+}
+
+
+def verify_signature(mar, cert):
+ log.info("Checking %s signature", mar)
+ with open(mar, "rb") as mar_fh:
+ m = MarReader(mar_fh)
+ if not m.verify(verify_key=cert):
+ raise ValueError(
+ "MAR Signature invalid: %s (%s) against %s", mar, m.signature_type, cert
+ )
+
+
+def process_arguments():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--artifacts-dir", required=True)
+ parser.add_argument("--signing-cert", type=argparse.FileType("rb"), required=True)
+ parser.add_argument("--task-definition", required=True, type=argparse.FileType("r"))
+ parser.add_argument(
+ "--allow-staging-prefixes",
+ action="store_true",
+ default=strtobool(os.environ.get("FUNSIZE_ALLOW_STAGING_PREFIXES", "false")),
+ help="Allow files from staging buckets.",
+ )
+ parser.add_argument(
+ "-q",
+ "--quiet",
+ dest="log_level",
+ action="store_const",
+ const=logging.INFO,
+ default=logging.DEBUG,
+ )
+ parser.add_argument(
+ "--arch",
+ type=str,
+ required=True,
+ choices=BCJ_OPTIONS.keys(),
+ help="The archtecture you are building.",
+ )
+ return parser.parse_args()
+
+
+def validate_mar_channel_id(mar, channel_ids):
+ log.info("Checking %s for MAR_CHANNEL_ID %s", mar, channel_ids)
+ # We may get a string with a list representation, or a single entry string.
+ channel_ids = set(channel_ids.split(","))
+
+ product_info = MarReader(open(mar, "rb")).productinfo
+ if not isinstance(product_info, tuple):
+ raise ValueError(
+ "Malformed product information in mar: {}".format(product_info)
+ )
+
+ found_channel_ids = set(product_info[1].split(","))
+
+ if not found_channel_ids.issubset(channel_ids):
+ raise ValueError(
+ "MAR_CHANNEL_ID mismatch, {} not in {}".format(product_info[1], channel_ids)
+ )
+
+ log.info("%s channel %s in %s", mar, product_info[1], channel_ids)
+
+
+async def retry_download(*args, semaphore=None, **kwargs): # noqa: E999
+ """Retry download() calls."""
+ async with AsyncExitStack() as stack:
+ if semaphore:
+ await stack.enter_async_context(semaphore)
+ await retry_async(
+ download,
+ retry_exceptions=(aiohttp.ClientError, asyncio.TimeoutError),
+ args=args,
+ kwargs=kwargs,
+ )
+
+
+def verify_allowed_url(mar, allowed_url_prefixes):
+ if not any(mar.startswith(prefix) for prefix in allowed_url_prefixes):
+ raise ValueError(
+ "{mar} is not in allowed URL prefixes: {p}".format(
+ mar=mar, p=allowed_url_prefixes
+ )
+ )
+
+
+async def download(url, dest, mode=None): # noqa: E999
+ log.info("Downloading %s to %s", url, dest)
+ chunk_size = 4096
+ bytes_downloaded = 0
+ async with aiohttp.ClientSession(raise_for_status=True) as session:
+ start = time.time()
+ async with session.get(url, timeout=120) as resp:
+ # Additional early logging for download timeouts.
+ log.debug("Fetching from url %s", resp.url)
+ for history in resp.history:
+ log.debug("Redirection history: %s", history.url)
+ log.debug("Headers for %s: %s", resp.url, resp.headers)
+ if "Content-Length" in resp.headers:
+ log.debug(
+ "Content-Length expected for %s: %s",
+ url,
+ resp.headers["Content-Length"],
+ )
+ log_interval = chunk_size * 1024
+ with open(dest, "wb") as fd:
+ while True:
+ chunk = await resp.content.read(chunk_size)
+ if not chunk:
+ break
+ fd.write(chunk)
+ bytes_downloaded += len(chunk)
+ log_interval -= len(chunk)
+ if log_interval <= 0:
+ log.debug("Bytes downloaded for %s: %d", url, bytes_downloaded)
+ log_interval = chunk_size * 1024
+ end = time.time()
+ log.info(
+ "Downloaded %s, %s bytes in %s seconds",
+ url,
+ bytes_downloaded,
+ int(end - start),
+ )
+ if mode:
+ log.info("chmod %o %s", mode, dest)
+ os.chmod(dest, mode)
+
+
+async def download_buildsystem_bits(partials_config, downloads, tools_dir):
+ """Download external tools needed to make partials."""
+
+ # We're making the assumption that the "to" mar is the same for all,
+ # as that's the way this task is currently used.
+ to_url = extract_download_urls(partials_config, mar_type="to").pop()
+
+ repo = get_option(
+ downloads[to_url]["extracted_path"],
+ filename="platform.ini",
+ section="Build",
+ option="SourceRepository",
+ )
+ revision = get_option(
+ downloads[to_url]["extracted_path"],
+ filename="platform.ini",
+ section="Build",
+ option="SourceStamp",
+ )
+
+ urls = {
+ "make_incremental_update.sh": f"{repo}/raw-file/{revision}/tools/"
+ "update-packaging/make_incremental_update.sh",
+ "common.sh": f"{repo}/raw-file/{revision}/tools/update-packaging/common.sh",
+ "mar": "https://archive.mozilla.org/pub/mozilla.org/firefox/nightly/"
+ "latest-mozilla-central/mar-tools/linux64/mar",
+ "mbsdiff": "https://archive.mozilla.org/pub/mozilla.org/firefox/nightly/"
+ "latest-mozilla-central/mar-tools/linux64/mbsdiff",
+ }
+ for filename, url in urls.items():
+ filename = tools_dir / filename
+ await retry_download(url, dest=filename, mode=0o755)
+
+
+def find_file(directory, filename):
+ log.debug("Searching for %s in %s", filename, directory)
+ return next(Path(directory).rglob(filename))
+
+
+def get_option(directory, filename, section, option):
+ log.info("Extracting [%s]: %s from %s/**/%s", section, option, directory, filename)
+ f = find_file(directory, filename)
+ config = configparser.ConfigParser()
+ config.read(f)
+ rv = config.get(section, option)
+ log.info("Found %s", rv)
+ return rv
+
+
+def extract_download_urls(partials_config, mar_type):
+ """Extract a set of urls to download from the task configuration.
+
+ mar_type should be one of "from", "to"
+ """
+ return {definition[f"{mar_type}_mar"] for definition in partials_config}
+
+
+async def download_and_verify_mars(partials_config, allowed_url_prefixes, signing_cert):
+ """Download, check signature, channel ID and unpack MAR files."""
+ # Separate these categories so we can opt to perform checks on only 'to' downloads.
+ from_urls = extract_download_urls(partials_config, mar_type="from")
+ to_urls = extract_download_urls(partials_config, mar_type="to")
+ tasks = list()
+ downloads = dict()
+
+ semaphore = asyncio.Semaphore(2) # Magic 2 to reduce network timeout errors.
+ for url in from_urls.union(to_urls):
+ verify_allowed_url(url, allowed_url_prefixes)
+ downloads[url] = {
+ "download_path": Path(tempfile.mkdtemp()) / Path(url).name,
+ }
+ tasks.append(
+ retry_download(url, downloads[url]["download_path"], semaphore=semaphore)
+ )
+
+ await asyncio.gather(*tasks)
+
+ for url in downloads:
+ # Verify signature, but not from an artifact as we don't
+ # depend on the signing task
+ if not os.getenv("MOZ_DISABLE_MAR_CERT_VERIFICATION") and not url.startswith(
+ QUEUE_PREFIX
+ ):
+ verify_signature(downloads[url]["download_path"], signing_cert)
+
+ # Only validate the target channel ID, as we update from beta->release
+ if url in to_urls:
+ validate_mar_channel_id(
+ downloads[url]["download_path"], os.environ["MAR_CHANNEL_ID"]
+ )
+
+ downloads[url]["extracted_path"] = tempfile.mkdtemp()
+ with open(downloads[url]["download_path"], "rb") as mar_fh:
+ log.info(
+ "Unpacking %s into %s",
+ downloads[url]["download_path"],
+ downloads[url]["extracted_path"],
+ )
+ m = MarReader(mar_fh)
+ m.extract(downloads[url]["extracted_path"])
+
+ return downloads
+
+
+async def run_command(cmd, cwd="/", env=None, label=None, silent=False):
+ log.info("Running: %s", cmd)
+ if not env:
+ env = dict()
+ process = await asyncio.create_subprocess_shell(
+ cmd,
+ stdout=asyncio.subprocess.PIPE,
+ stderr=asyncio.subprocess.PIPE,
+ cwd=cwd,
+ env=env,
+ )
+ if label:
+ label = "{}: ".format(label)
+ else:
+ label = ""
+
+ async def read_output(stream, label, printcmd):
+ while True:
+ line = await stream.readline()
+ if line == b"":
+ break
+ printcmd("%s%s", label, line.decode("utf-8").rstrip())
+
+ if silent:
+ await process.wait()
+ else:
+ await asyncio.gather(
+ read_output(process.stdout, label, log.info),
+ read_output(process.stderr, label, log.warn),
+ )
+ await process.wait()
+
+
+async def generate_partial(from_dir, to_dir, dest_mar, mar_data, tools_dir, arch):
+ log.info("Generating partial %s", dest_mar)
+ env = os.environ.copy()
+ env["LC_ALL"] = "C"
+ env["MAR"] = tools_dir / "mar"
+ env["MBSDIFF"] = tools_dir / "mbsdiff"
+ if arch:
+ env["BCJ_OPTIONS"] = " ".join(BCJ_OPTIONS[arch])
+ env["MOZ_PRODUCT_VERSION"] = mar_data["version"]
+ env["MAR_CHANNEL_ID"] = mar_data["MAR_CHANNEL_ID"]
+ env["BRANCH"] = mar_data["branch"]
+
+ make_incremental_update = tools_dir / "make_incremental_update.sh"
+ cmd = f"{make_incremental_update} {dest_mar} {from_dir} {to_dir}"
+
+ await run_command(cmd, cwd=dest_mar.parent, env=env, label=dest_mar.name)
+ validate_mar_channel_id(dest_mar, mar_data["MAR_CHANNEL_ID"])
+
+
+async def manage_partial(
+ partial_def, artifacts_dir, tools_dir, downloads, semaphore, arch=None
+):
+ from_url = partial_def["from_mar"]
+ to_url = partial_def["to_mar"]
+ from_path = downloads[from_url]["extracted_path"]
+ to_path = downloads[to_url]["extracted_path"]
+
+ mar_data = {
+ "MAR_CHANNEL_ID": os.environ["MAR_CHANNEL_ID"],
+ "version": get_option(
+ to_path, filename="application.ini", section="App", option="Version"
+ ),
+ "appName": get_option(
+ from_path, filename="application.ini", section="App", option="Name"
+ ),
+ # Use Gecko repo and rev from platform.ini, not application.ini
+ "repo": get_option(
+ to_path, filename="platform.ini", section="Build", option="SourceRepository"
+ ),
+ "revision": get_option(
+ to_path, filename="platform.ini", section="Build", option="SourceStamp"
+ ),
+ "locale": partial_def["locale"],
+ "from_mar": partial_def["from_mar"],
+ "from_size": os.path.getsize(downloads[from_url]["download_path"]),
+ "from_hash": get_hash(downloads[from_url]["download_path"], hash_alg="sha512"),
+ "from_buildid": get_option(
+ from_path, filename="application.ini", section="App", option="BuildID"
+ ),
+ "to_mar": partial_def["to_mar"],
+ "to_size": os.path.getsize(downloads[to_url]["download_path"]),
+ "to_hash": get_hash(downloads[to_url]["download_path"], hash_alg="sha512"),
+ "to_buildid": get_option(
+ to_path, filename="application.ini", section="App", option="BuildID"
+ ),
+ "mar": partial_def["dest_mar"],
+ }
+ # if branch not set explicitly use repo-name
+ mar_data["branch"] = partial_def.get("branch", Path(mar_data["repo"]).name)
+
+ for field in (
+ "update_number",
+ "previousVersion",
+ "previousBuildNumber",
+ "toVersion",
+ "toBuildNumber",
+ ):
+ if field in partial_def:
+ mar_data[field] = partial_def[field]
+
+ dest_mar = Path(artifacts_dir) / mar_data["mar"]
+
+ async with semaphore:
+ await generate_partial(from_path, to_path, dest_mar, mar_data, tools_dir, arch)
+
+ mar_data["size"] = os.path.getsize(dest_mar)
+ mar_data["hash"] = get_hash(dest_mar, hash_alg="sha512")
+ return mar_data
+
+
+async def async_main(args, signing_cert):
+ tasks = []
+
+ allowed_url_prefixes = list(ALLOWED_URL_PREFIXES)
+ if args.allow_staging_prefixes:
+ allowed_url_prefixes += STAGING_URL_PREFIXES
+
+ task = json.load(args.task_definition)
+
+ downloads = await download_and_verify_mars(
+ task["extra"]["funsize"]["partials"], allowed_url_prefixes, signing_cert
+ )
+
+ tools_dir = Path(tempfile.mkdtemp())
+ await download_buildsystem_bits(
+ partials_config=task["extra"]["funsize"]["partials"],
+ downloads=downloads,
+ tools_dir=tools_dir,
+ )
+
+ # May want to consider os.cpu_count() if we ever run on osx/win.
+ # sched_getaffinity is the list of cores we can run on, not the total.
+ semaphore = asyncio.Semaphore(len(os.sched_getaffinity(0)))
+ for definition in task["extra"]["funsize"]["partials"]:
+ tasks.append(
+ asyncio.ensure_future(
+ retry_async(
+ manage_partial,
+ retry_exceptions=(aiohttp.ClientError, asyncio.TimeoutError),
+ kwargs=dict(
+ partial_def=definition,
+ artifacts_dir=args.artifacts_dir,
+ tools_dir=tools_dir,
+ arch=args.arch,
+ downloads=downloads,
+ semaphore=semaphore,
+ ),
+ )
+ )
+ )
+ manifest = await asyncio.gather(*tasks)
+
+ for url in downloads:
+ downloads[url]["download_path"].unlink()
+ shutil.rmtree(downloads[url]["extracted_path"])
+ shutil.rmtree(tools_dir)
+
+ return manifest
+
+
+def main():
+ args = process_arguments()
+
+ logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s")
+ log.setLevel(args.log_level)
+
+ signing_cert = args.signing_cert.read()
+ assert get_keysize(signing_cert) == 4096
+
+ artifacts_dir = Path(args.artifacts_dir)
+ if not artifacts_dir.exists():
+ artifacts_dir.mkdir()
+
+ loop = asyncio.get_event_loop()
+ manifest = loop.run_until_complete(async_main(args, signing_cert))
+ loop.close()
+
+ manifest_file = artifacts_dir / "manifest.json"
+ with open(manifest_file, "w") as fp:
+ json.dump(manifest, fp, indent=2, sort_keys=True)
+
+ log.debug("{}".format(json.dumps(manifest, indent=2, sort_keys=True)))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh b/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
new file mode 100755
index 0000000000..965d938247
--- /dev/null
+++ b/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
@@ -0,0 +1,157 @@
+#!/bin/bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#
+# This tool contains functions that are to be used to handle/enable funsize
+# Author: Mihai Tabara
+#
+
+HOOK=
+AWS_BUCKET_NAME=
+LOCAL_CACHE_DIR=
+
+# Don't cache files smaller than this, as it's slower with S3
+# Bug 1437473
+CACHE_THRESHOLD=500000
+
+S3_CACHE_HITS=0
+S3_CACHE_MISSES=0
+
+getsha512(){
+ openssl sha512 "${1}" | awk '{print $2}'
+}
+
+print_usage(){
+ echo "$(basename "$0") [-S S3-BUCKET-NAME] [-c LOCAL-CACHE-DIR-PATH] [-g] [-u] PATH-FROM-URL PATH-TO-URL PATH-PATCH"
+ echo "Script that saves/retrieves from cache presumptive patches as args"
+ echo ""
+ echo "-A SERVER-URL - host where to send the files"
+ echo "-c LOCAL-CACHE-DIR-PATH local path to which patches are cached"
+ echo "-g pre hook - tests whether patch already in cache"
+ echo "-u post hook - upload patch to cache for future use"
+ echo ""
+ echo "PATH-FROM-URL : path on disk for source file"
+ echo "PATH-TO-URL : path on disk for destination file"
+ echo "PATH-PATCH : path on disk for patch between source and destination"
+}
+
+upload_patch(){
+ if [ "$(stat -c "%s" "$2")" -lt ${CACHE_THRESHOLD} ]
+ then
+ return 0
+ fi
+ sha_from=$(getsha512 "$1")
+ sha_to=$(getsha512 "$2")
+ patch_path="$3"
+ patch_filename="$(basename "$3")"
+
+ # save to local cache first
+ if [ -n "$LOCAL_CACHE_DIR" ]; then
+ local_cmd="mkdir -p "$LOCAL_CACHE_DIR/$sha_from""
+ if $local_cmd >&2; then
+ cp -avf "${patch_path}" "$LOCAL_CACHE_DIR/$sha_from/$sha_to"
+ echo "${patch_path} saved on local cache."
+ fi
+ fi
+
+ if [ -n "${AWS_BUCKET_NAME}" ]; then
+ BUCKET_PATH="s3://${AWS_BUCKET_NAME}${sha_from}/${sha_to}/${patch_filename}"
+ if aws s3 cp "${patch_path}" "${BUCKET_PATH}"; then
+ echo "${patch_path} saved on s://${AWS_BUCKET_NAME}"
+ return 0
+ fi
+ echo "${patch_path} failed to be uploaded to s3://${AWS_BUCKET_NAME}"
+ return 1
+ fi
+ return 0
+}
+
+get_patch(){
+ # $1 and $2 are the /path/to/filename
+ if [ "$(stat -c "%s" "$2")" -lt ${CACHE_THRESHOLD} ]
+ then
+ return 1
+ fi
+ sha_from=$(getsha512 "$1")
+ sha_to=$(getsha512 "$2")
+ destination_file="$3"
+ s3_filename="$(basename "$3")"
+
+ # Try to retrieve from local cache first.
+ if [ -n "$LOCAL_CACHE_DIR" ]; then
+ if [ -r "$LOCAL_CACHE_DIR/$sha_from/$sha_to" ]; then
+ cp -avf "$LOCAL_CACHE_DIR/$sha_from/$sha_to" "$destination_file"
+ echo "Successful retrieved ${destination_file} from local cache."
+ return 0
+ fi
+ fi
+ # If not in the local cache, we might find it remotely.
+
+ if [ -n "${AWS_BUCKET_NAME}" ]; then
+ BUCKET_PATH="s3://${AWS_BUCKET_NAME}${sha_from}/${sha_to}/${s3_filename}"
+ if aws s3 ls "${BUCKET_PATH}"; then
+ ((S3_CACHE_HITS++))
+ echo "s3 cache hit for ${s3_filename} (${S3_CACHE_HITS} total hits)"
+ if aws s3 cp "${BUCKET_PATH}" "${destination_file}"; then
+ echo "Successful retrieved ${destination_file} from s3://${AWS_BUCKET_NAME}"
+ return 0
+ else
+ echo "Failed to retrieve ${destination_file} from s3://${AWS_BUCKET_NAME}"
+ return 1
+ fi
+ # Not found, fall through to default error
+ else
+ ((S3_CACHE_MISSES++))
+ echo "s3 cache miss for ${s3_filename} (${S3_CACHE_MISSES} total misses)"
+ fi
+ fi
+ return 1
+}
+
+OPTIND=1
+
+while getopts ":S:c:gu" option; do
+ case $option in
+ S)
+ # This will probably be bucketname/path/prefix but we can use it either way
+ AWS_BUCKET_NAME="$OPTARG"
+ # Ensure trailing slash is there.
+ if [[ ! $AWS_BUCKET_NAME =~ .*/$ ]]; then
+ AWS_BUCKET_NAME="${AWS_BUCKET_NAME}/"
+ fi
+ ;;
+ c)
+ LOCAL_CACHE_DIR="$OPTARG"
+ ;;
+ g)
+ HOOK="PRE"
+ ;;
+ u)
+ HOOK="POST"
+ ;;
+ \?)
+ echo "Invalid option: -$OPTARG" >&2
+ print_usage
+ exit 1
+ ;;
+ :)
+ echo "Option -$OPTARG requires an argument." >&2
+ print_usage
+ exit 1
+ ;;
+ *)
+ echo "Unimplemented option: -$OPTARG" >&2
+ print_usage
+ exit 1
+ ;;
+ esac
+done
+shift $((OPTIND-1))
+
+if [ "$HOOK" == "PRE" ]; then
+ get_patch "$1" "$2" "$3"
+elif [ "$HOOK" == "POST" ]; then
+ upload_patch "$1" "$2" "$3"
+fi