blob: 8f9ff76d031fe1a204e3ec754ae71c14c353d27e (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
|
#!/bin/sh
set -v -e -x
base="$(realpath "$(dirname "$0")")"
export PATH="$PATH:/builds/worker/bin:$base"
cd /builds/worker
if test "$PROCESSED_PACKAGES_INDEX" && test "$PROCESSED_PACKAGES_PATH" && test "$TASKCLUSTER_ROOT_URL"; then
PROCESSED_PACKAGES="$TASKCLUSTER_ROOT_URL/api/index/v1/task/$PROCESSED_PACKAGES_INDEX/artifacts/$PROCESSED_PACKAGES_PATH"
fi
if test "$PROCESSED_PACKAGES"; then
rm -f processed-packages
if test `curl --output /dev/null --silent --head --location "$PROCESSED_PACKAGES" -w "%{http_code}"` = 200; then
curl -L "$PROCESSED_PACKAGES" | gzip -dc > processed-packages
elif test -f "$PROCESSED_PACKAGES"; then
gzip -dc "$PROCESSED_PACKAGES" > processed-packages
fi
if test -f processed-packages; then
# Prevent reposado from downloading packages that have previously been
# dumped.
for f in $(cat processed-packages); do
mkdir -p "$(dirname "$f")"
touch "$f"
done
fi
fi
mkdir -p /opt/data-reposado/html /opt/data-reposado/metadata
# First, just fetch all the update info.
python3 /usr/local/bin/repo_sync --no-download
# Next, fetch just the update packages we're interested in.
packages=$(python3 "${base}/list-packages.py")
# shellcheck disable=SC2086
python3 /usr/local/bin/repo_sync $packages
du -sh /opt/data-reposado
# Now scrape symbols out of anything that was downloaded.
mkdir -p symbols artifacts
python3 "${base}/PackageSymbolDumper.py" --tracking-file=/builds/worker/processed-packages --dump_syms=/builds/worker/bin/dump_syms_mac /opt/data-reposado/html/content/downloads /builds/worker/symbols
# Hand out artifacts
gzip -c processed-packages > artifacts/processed-packages.gz
cd symbols
zip -r9 /builds/worker/artifacts/target.crashreporter-symbols.zip ./* || echo "No symbols dumped"
|