8 ARTIFACTS_DIR
="/home/worker/artifacts"
9 mkdir
-p "$ARTIFACTS_DIR"
11 # Strip trailing / if present
12 TASKCLUSTER_ROOT_URL
="${TASKCLUSTER_ROOT_URL%/}"
13 export TASKCLUSTER_ROOT_URL
15 # duplicate the functionality of taskcluster-lib-urls, but in bash..
16 queue_base
="${TASKCLUSTER_ROOT_URL%/}/api/queue/v1"
18 curl
--location --retry 10 --retry-delay 10 -o /home
/worker
/task.json
"$queue_base/task/$TASK_ID"
20 # auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/
21 # -> bucket of tc-gp-private-1d-us-east-1, path of releng/mbsdiff-cache/
22 # Trailing slash is important, due to prefix permissions in S3.
23 S3_BUCKET_AND_PATH
=$
(jq
-r '.scopes[] | select(contains ("auth:aws-s3"))' /home
/worker
/task.json |
awk -F: '{print $4}')
25 # Will be empty if there's no scope for AWS S3.
26 if [ -n "${S3_BUCKET_AND_PATH}" ] && getent hosts taskcluster
28 # Does this parse as we expect?
29 S3_PATH
=${S3_BUCKET_AND_PATH#*/}
30 AWS_BUCKET_NAME
=${S3_BUCKET_AND_PATH%/"${S3_PATH}"*}
32 test "${AWS_BUCKET_NAME}"
34 set +x
# Don't echo these.
35 secret_url
="${TASKCLUSTER_PROXY_URL}/api/auth/v1/aws/s3/read-write/${AWS_BUCKET_NAME}/${S3_PATH}"
36 AUTH
=$
(curl
"${secret_url}")
37 AWS_ACCESS_KEY_ID
=$
(echo "${AUTH}" | jq
-r '.credentials.accessKeyId')
38 AWS_SECRET_ACCESS_KEY
=$
(echo "${AUTH}" | jq
-r '.credentials.secretAccessKey')
39 AWS_SESSION_TOKEN
=$
(echo "${AUTH}" | jq
-r '.credentials.sessionToken')
40 export AWS_ACCESS_KEY_ID
41 export AWS_SECRET_ACCESS_KEY
42 export AWS_SESSION_TOKEN
45 if [ -n "$AWS_ACCESS_KEY_ID" ] && [ -n "$AWS_SECRET_ACCESS_KEY" ]; then
46 # Pass the full bucket/path prefix, as the script just appends local files.
47 export MBSDIFF_HOOK
="/home/worker/bin/mbsdiff_hook.sh -S ${S3_BUCKET_AND_PATH}"
55 # EXTRA_PARAMS is optional
56 # shellcheck disable=SC2086
57 python3
/home
/worker
/bin
/funsize.py \
58 --artifacts-dir "$ARTIFACTS_DIR" \
59 --task-definition /home
/worker
/task.json \
60 --signing-cert "/home/worker/keys/${SIGNING_CERT}.pubkey" \