5 base
="$(realpath "$
(dirname "$0")")"
6 export PATH
="$PATH:/builds/worker/bin:$base"
8 DUMP_SYMS_PATH
="${MOZ_FETCHES_DIR}/dump_syms/dump_syms"
12 if test "$PROCESSED_PACKAGES_INDEX" && test "$PROCESSED_PACKAGES_PATH" && test "$TASKCLUSTER_ROOT_URL"; then
13 PROCESSED_PACKAGES
="$TASKCLUSTER_ROOT_URL/api/index/v1/task/$PROCESSED_PACKAGES_INDEX/artifacts/$PROCESSED_PACKAGES_PATH"
16 if test "$PROCESSED_PACKAGES"; then
17 rm -f processed-packages
18 if test `curl --output /dev/null --silent --head --location "$PROCESSED_PACKAGES" -w "%{http_code}"` = 200; then
19 curl
-L "$PROCESSED_PACKAGES" |
gzip -dc > processed-packages
20 elif test -f "$PROCESSED_PACKAGES"; then
21 gzip -dc "$PROCESSED_PACKAGES" > processed-packages
23 if test -f processed-packages
; then
24 # Prevent reposado from downloading packages that have previously been
26 for f
in $
(cat processed-packages
); do
27 mkdir
-p "$(dirname "$f")"
33 mkdir
-p /opt
/data-reposado
/html
/opt
/data-reposado
/metadata
35 # First, just fetch all the update info.
36 python3
/usr
/local
/bin
/repo_sync
--no-download
38 # Next, fetch just the update packages we're interested in.
39 packages
=$
(python3
"${base}/list-packages.py")
40 # shellcheck disable=SC2086
41 python3
/usr
/local
/bin
/repo_sync
$packages
43 du
-sh /opt
/data-reposado
45 # Now scrape symbols out of anything that was downloaded.
46 mkdir
-p symbols artifacts
47 python3
"${base}/PackageSymbolDumper.py" --tracking-file=/builds
/worker
/processed-packages
--dump_syms="${DUMP_SYMS_PATH}" /opt
/data-reposado
/html
/content
/downloads
/builds
/worker
/symbols
50 gzip -c processed-packages
> artifacts
/processed-packages.gz
53 zip -r9 /builds
/worker
/artifacts
/target.crashreporter-symbols.
zip .
/* ||
echo "No symbols dumped"