Bug 1828523 [wpt PR 39579] - Only restore dialog focus if focus is in the dialog...
[gecko.git] / tools / moztreedocs / upload.py
blobee98d8815b9d84a2f40fced6f0bb1dcf960a3da2
1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
5 import concurrent.futures as futures
6 import io
7 import mimetypes
8 import os
9 import sys
10 from pprint import pprint
12 import boto3
13 import botocore
14 import requests
15 from mozbuild.util import memoize
18 @memoize
19 def create_aws_session():
20 """
21 This function creates an aws session that is
22 shared between upload and delete both.
23 """
24 region = "us-west-2"
25 level = os.environ.get("MOZ_SCM_LEVEL", "1")
26 bucket = {
27 "1": "gecko-docs.mozilla.org-l1",
28 "2": "gecko-docs.mozilla.org-l2",
29 "3": "gecko-docs.mozilla.org",
30 }[level]
31 secrets_url = "http://taskcluster/secrets/v1/secret/"
32 secrets_url += "project/releng/gecko/build/level-{}/gecko-docs-upload".format(level)
34 # Get the credentials from the TC secrets service. Note that these
35 # differ per SCM level
36 if "TASK_ID" in os.environ:
37 print("Using AWS credentials from the secrets service")
38 session = requests.Session()
39 res = session.get(secrets_url)
40 res.raise_for_status()
41 secret = res.json()["secret"]
42 session = boto3.session.Session(
43 aws_access_key_id=secret["AWS_ACCESS_KEY_ID"],
44 aws_secret_access_key=secret["AWS_SECRET_ACCESS_KEY"],
45 region_name=region,
47 else:
48 print("Trying to use your AWS credentials..")
49 session = boto3.session.Session(region_name=region)
51 s3 = session.client("s3", config=botocore.client.Config(max_pool_connections=20))
53 return s3, bucket
56 @memoize
57 def get_s3_keys(s3, bucket):
58 kwargs = {"Bucket": bucket}
59 all_keys = []
60 while True:
61 response = s3.list_objects_v2(**kwargs)
62 for obj in response["Contents"]:
63 all_keys.append(obj["Key"])
65 try:
66 kwargs["ContinuationToken"] = response["NextContinuationToken"]
67 except KeyError:
68 break
70 return all_keys
73 def s3_set_redirects(redirects):
75 s3, bucket = create_aws_session()
77 configuration = {"IndexDocument": {"Suffix": "index.html"}, "RoutingRules": []}
79 for path, redirect in redirects.items():
80 rule = {
81 "Condition": {"KeyPrefixEquals": path},
82 "Redirect": {"ReplaceKeyPrefixWith": redirect},
84 if os.environ.get("MOZ_SCM_LEVEL") == "3":
85 rule["Redirect"]["HostName"] = "firefox-source-docs.mozilla.org"
87 configuration["RoutingRules"].append(rule)
89 s3.put_bucket_website(
90 Bucket=bucket,
91 WebsiteConfiguration=configuration,
95 def s3_delete_missing(files, key_prefix=None):
96 """Delete files in the S3 bucket.
98 Delete files on the S3 bucket that doesn't match the files
99 given as the param. If the key_prefix is not specified, missing
100 files that has main/ as a prefix will be removed. Otherwise, it
101 will remove files with the same prefix as key_prefix.
103 s3, bucket = create_aws_session()
104 files_on_server = get_s3_keys(s3, bucket)
105 if key_prefix:
106 files_on_server = [
107 path for path in files_on_server if path.startswith(key_prefix)
109 else:
110 files_on_server = [
111 path for path in files_on_server if not path.startswith("main/")
113 files = [key_prefix + "/" + path if key_prefix else path for path, f in files]
114 files_to_delete = [path for path in files_on_server if path not in files]
116 query_size = 1000
117 while files_to_delete:
118 keys_to_remove = [{"Key": key} for key in files_to_delete[:query_size]]
119 response = s3.delete_objects(
120 Bucket=bucket,
121 Delete={
122 "Objects": keys_to_remove,
123 }, # NOQA
125 pprint(response, indent=2)
126 files_to_delete = files_to_delete[query_size:]
129 def s3_upload(files, key_prefix=None):
130 """Upload files to an S3 bucket.
132 ``files`` is an iterable of ``(path, BaseFile)`` (typically from a
133 mozpack Finder).
135 Keys in the bucket correspond to source filenames. If ``key_prefix`` is
136 defined, key names will be ``<key_prefix>/<path>``.
138 s3, bucket = create_aws_session()
140 def upload(f, path, bucket, key, extra_args):
141 # Need to flush to avoid buffering/interleaving from multiple threads.
142 sys.stdout.write("uploading %s to %s\n" % (path, key))
143 sys.stdout.flush()
144 s3.upload_fileobj(f, bucket, key, ExtraArgs=extra_args)
146 fs = []
147 with futures.ThreadPoolExecutor(20) as e:
148 for path, f in files:
149 content_type, content_encoding = mimetypes.guess_type(path)
150 extra_args = {}
151 if content_type:
152 if content_type.startswith("text/"):
153 content_type += '; charset="utf-8"'
154 extra_args["ContentType"] = content_type
155 if content_encoding:
156 extra_args["ContentEncoding"] = content_encoding
158 if key_prefix:
159 key = "%s/%s" % (key_prefix, path)
160 else:
161 key = path
163 # The file types returned by mozpack behave like file objects. But
164 # they don't accept an argument to read(). So we wrap in a BytesIO.
165 fs.append(
166 e.submit(upload, io.BytesIO(f.read()), path, bucket, key, extra_args)
169 s3_delete_missing(files, key_prefix)
170 # Need to do this to catch any exceptions.
171 for f in fs:
172 f.result()