Bug 1914411 - shouldInlineCallDirect: disallow inlining of imported functions. r...
[gecko.git] / taskcluster / gecko_taskgraph / decision.py
blob0b71799ecb1d96eede560d137093a41de00b5558
1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
6 import json
7 import logging
8 import os
9 import shutil
10 import sys
11 import time
12 from collections import defaultdict
14 import yaml
15 from redo import retry
16 from taskgraph import create
17 from taskgraph.create import create_tasks
19 # TODO: Let standalone taskgraph generate parameters instead of calling internals
20 from taskgraph.decision import (
21 _determine_more_accurate_base_ref,
22 _determine_more_accurate_base_rev,
23 _get_env_prefix,
25 from taskgraph.generator import TaskGraphGenerator
26 from taskgraph.parameters import Parameters
27 from taskgraph.taskgraph import TaskGraph
28 from taskgraph.util.python_path import find_object
29 from taskgraph.util.taskcluster import get_artifact
30 from taskgraph.util.vcs import get_repository
31 from taskgraph.util.yaml import load_yaml
33 from . import GECKO
34 from .actions import render_actions_json
35 from .files_changed import get_changed_files
36 from .parameters import get_app_version, get_version
37 from .try_option_syntax import parse_message
38 from .util.backstop import ANDROID_PERFTEST_BACKSTOP_INDEX, BACKSTOP_INDEX, is_backstop
39 from .util.bugbug import push_schedules
40 from .util.chunking import resolver
41 from .util.hg import get_hg_commit_message, get_hg_revision_branch
42 from .util.partials import populate_release_history
43 from .util.taskcluster import insert_index
44 from .util.taskgraph import find_decision_task, find_existing_tasks_from_previous_kinds
46 logger = logging.getLogger(__name__)
48 ARTIFACTS_DIR = "artifacts"
50 # For each project, this gives a set of parameters specific to the project.
51 # See `taskcluster/docs/parameters.rst` for information on parameters.
52 PER_PROJECT_PARAMETERS = {
53 "try": {
54 "enable_always_target": True,
55 "target_tasks_method": "try_tasks",
56 "release_type": "nightly",
58 "kaios-try": {
59 "target_tasks_method": "try_tasks",
61 "ash": {
62 "target_tasks_method": "default",
64 "cedar": {
65 "target_tasks_method": "default",
67 "holly": {
68 "enable_always_target": True,
69 "target_tasks_method": "holly_tasks",
71 "oak": {
72 "target_tasks_method": "default",
73 "release_type": "nightly-oak",
75 "graphics": {
76 "target_tasks_method": "graphics_tasks",
78 "autoland": {
79 "optimize_strategies": "gecko_taskgraph.optimize:project.autoland",
80 "target_tasks_method": "autoland_tasks",
81 "test_manifest_loader": "bugbug", # Remove this line to disable "manifest scheduling".
83 "mozilla-central": {
84 "target_tasks_method": "mozilla_central_tasks",
85 "release_type": "nightly",
87 "mozilla-beta": {
88 "target_tasks_method": "mozilla_beta_tasks",
89 "release_type": "beta",
91 "mozilla-release": {
92 "target_tasks_method": "mozilla_release_tasks",
93 "release_type": "release",
95 "mozilla-esr128": {
96 "target_tasks_method": "mozilla_esr128_tasks",
97 "release_type": "esr128",
99 "pine": {
100 "target_tasks_method": "pine_tasks",
101 "release_type": "nightly-pine",
103 "cypress": {
104 "target_tasks_method": "cypress_tasks",
106 "larch": {
107 "target_tasks_method": "larch_tasks",
108 "release_type": "nightly-larch",
110 "kaios": {
111 "target_tasks_method": "kaios_tasks",
113 "toolchains": {
114 "target_tasks_method": "mozilla_central_tasks",
116 # the default parameters are used for projects that do not match above.
117 "default": {
118 "target_tasks_method": "default",
123 def full_task_graph_to_runnable_jobs(full_task_json):
124 runnable_jobs = {}
125 for label, node in full_task_json.items():
126 if not ("extra" in node["task"] and "treeherder" in node["task"]["extra"]):
127 continue
129 th = node["task"]["extra"]["treeherder"]
130 runnable_jobs[label] = {"symbol": th["symbol"]}
132 for i in ("groupName", "groupSymbol", "collection"):
133 if i in th:
134 runnable_jobs[label][i] = th[i]
135 if th.get("machine", {}).get("platform"):
136 runnable_jobs[label]["platform"] = th["machine"]["platform"]
137 return runnable_jobs
140 def full_task_graph_to_manifests_by_task(full_task_json):
141 manifests_by_task = defaultdict(list)
142 for label, node in full_task_json.items():
143 manifests = node["attributes"].get("test_manifests")
144 if not manifests:
145 continue
147 manifests_by_task[label].extend(manifests)
148 return manifests_by_task
151 def try_syntax_from_message(message):
153 Parse the try syntax out of a commit message, returning '' if none is
154 found.
156 try_idx = message.find("try:")
157 if try_idx == -1:
158 return ""
159 return message[try_idx:].split("\n", 1)[0]
162 def taskgraph_decision(options, parameters=None):
164 Run the decision task. This function implements `mach taskgraph decision`,
165 and is responsible for
167 * processing decision task command-line options into parameters
168 * running task-graph generation exactly the same way the other `mach
169 taskgraph` commands do
170 * generating a set of artifacts to memorialize the graph
171 * calling TaskCluster APIs to create the graph
174 parameters = parameters or (
175 lambda graph_config: get_decision_parameters(graph_config, options)
178 decision_task_id = os.environ["TASK_ID"]
180 # create a TaskGraphGenerator instance
181 tgg = TaskGraphGenerator(
182 root_dir=options.get("root"),
183 parameters=parameters,
184 decision_task_id=decision_task_id,
185 write_artifacts=True,
188 if not create.testing:
189 # set additional index paths for the decision task
190 set_decision_indexes(decision_task_id, tgg.parameters, tgg.graph_config)
192 # write out the parameters used to generate this graph
193 write_artifact("parameters.yml", dict(**tgg.parameters))
195 # write out the public/actions.json file
196 write_artifact(
197 "actions.json",
198 render_actions_json(tgg.parameters, tgg.graph_config, decision_task_id),
201 # write out the full graph for reference
202 full_task_json = tgg.full_task_graph.to_json()
203 write_artifact("full-task-graph.json", full_task_json)
205 # write out the public/runnable-jobs.json file
206 write_artifact(
207 "runnable-jobs.json", full_task_graph_to_runnable_jobs(full_task_json)
210 # write out the public/manifests-by-task.json file
211 write_artifact(
212 "manifests-by-task.json.gz",
213 full_task_graph_to_manifests_by_task(full_task_json),
216 # write out the public/tests-by-manifest.json file
217 write_artifact("tests-by-manifest.json.gz", resolver.tests_by_manifest)
219 # this is just a test to check whether the from_json() function is working
220 _, _ = TaskGraph.from_json(full_task_json)
222 # write out the target task set to allow reproducing this as input
223 write_artifact("target-tasks.json", list(tgg.target_task_set.tasks.keys()))
225 # write out the optimized task graph to describe what will actually happen,
226 # and the map of labels to taskids
227 write_artifact("task-graph.json", tgg.morphed_task_graph.to_json())
228 write_artifact("label-to-taskid.json", tgg.label_to_taskid)
230 # write bugbug scheduling information if it was invoked
231 if len(push_schedules) > 0:
232 write_artifact("bugbug-push-schedules.json", push_schedules.popitem()[1])
234 # cache run-task, misc/fetch-content & robustcheckout.py
235 scripts_root_dir = os.path.join(GECKO, "taskcluster/scripts")
236 run_task_file_path = os.path.join(scripts_root_dir, "run-task")
237 fetch_content_file_path = os.path.join(
238 GECKO,
239 "third_party",
240 "python",
241 "taskcluster_taskgraph",
242 "taskgraph",
243 "run-task",
244 "fetch-content",
246 robustcheckout_path = os.path.join(
247 GECKO,
248 "testing/mozharness/external_tools/robustcheckout.py",
250 shutil.copy2(run_task_file_path, ARTIFACTS_DIR)
251 shutil.copy2(fetch_content_file_path, ARTIFACTS_DIR)
252 shutil.copy2(robustcheckout_path, ARTIFACTS_DIR)
254 # actually create the graph
255 create_tasks(
256 tgg.graph_config,
257 tgg.morphed_task_graph,
258 tgg.label_to_taskid,
259 tgg.parameters,
260 decision_task_id=decision_task_id,
264 def get_decision_parameters(graph_config, options):
266 Load parameters from the command-line options for 'taskgraph decision'.
267 This also applies per-project parameters, based on the given project.
270 product_dir = graph_config["product-dir"]
272 parameters = {
273 n: options[n]
274 for n in [
275 "base_repository",
276 "base_ref",
277 "base_rev",
278 "head_repository",
279 "head_rev",
280 "head_ref",
281 "head_tag",
282 "project",
283 "pushlog_id",
284 "pushdate",
285 "owner",
286 "level",
287 "repository_type",
288 "target_tasks_method",
289 "tasks_for",
291 if n in options
294 commit_message = get_hg_commit_message(os.path.join(GECKO, product_dir))
296 repo_path = os.getcwd()
297 repo = get_repository(repo_path)
298 parameters["base_ref"] = _determine_more_accurate_base_ref(
299 repo,
300 candidate_base_ref=options.get("base_ref"),
301 head_ref=options.get("head_ref"),
302 base_rev=options.get("base_rev"),
305 parameters["base_rev"] = _determine_more_accurate_base_rev(
306 repo,
307 base_ref=parameters["base_ref"],
308 candidate_base_rev=options.get("base_rev"),
309 head_rev=options.get("head_rev"),
310 env_prefix=_get_env_prefix(graph_config),
313 # Define default filter list, as most configurations shouldn't need
314 # custom filters.
315 parameters["filters"] = [
316 "target_tasks_method",
318 parameters["enable_always_target"] = ["docker-image"]
319 parameters["existing_tasks"] = {}
320 parameters["do_not_optimize"] = []
321 parameters["build_number"] = 1
322 parameters["version"] = get_version(product_dir)
323 parameters["app_version"] = get_app_version(product_dir)
324 parameters["message"] = try_syntax_from_message(commit_message)
325 parameters["hg_branch"] = get_hg_revision_branch(
326 GECKO, revision=parameters["head_rev"]
328 parameters["files_changed"] = sorted(
329 get_changed_files(parameters["head_repository"], parameters["head_rev"])
331 parameters["next_version"] = None
332 parameters["optimize_strategies"] = None
333 parameters["optimize_target_tasks"] = True
334 parameters["phabricator_diff"] = None
335 parameters["release_type"] = ""
336 parameters["release_eta"] = ""
337 parameters["release_enable_partner_repack"] = False
338 parameters["release_enable_partner_attribution"] = False
339 parameters["release_partners"] = []
340 parameters["release_partner_config"] = {}
341 parameters["release_partner_build_number"] = 1
342 parameters["release_enable_emefree"] = False
343 parameters["release_product"] = None
344 parameters["required_signoffs"] = []
345 parameters["signoff_urls"] = {}
346 parameters["test_manifest_loader"] = "default"
347 parameters["try_mode"] = None
348 parameters["try_task_config"] = {}
349 parameters["try_options"] = None
351 # owner must be an email, but sometimes (e.g., for ffxbld) it is not, in which
352 # case, fake it
353 if "@" not in parameters["owner"]:
354 parameters["owner"] += "@noreply.mozilla.org"
356 # use the pushdate as build_date if given, else use current time
357 parameters["build_date"] = parameters["pushdate"] or int(time.time())
358 # moz_build_date is the build identifier based on build_date
359 parameters["moz_build_date"] = time.strftime(
360 "%Y%m%d%H%M%S", time.gmtime(parameters["build_date"])
363 project = parameters["project"]
364 try:
365 parameters.update(PER_PROJECT_PARAMETERS[project])
366 except KeyError:
367 logger.warning(
368 "using default project parameters; add {} to "
369 "PER_PROJECT_PARAMETERS in {} to customize behavior "
370 "for this project".format(project, __file__)
372 parameters.update(PER_PROJECT_PARAMETERS["default"])
374 # `target_tasks_method` has higher precedence than `project` parameters
375 if options.get("target_tasks_method"):
376 parameters["target_tasks_method"] = options["target_tasks_method"]
378 # ..but can be overridden by the commit message: if it contains the special
379 # string "DONTBUILD" and this is an on-push decision task, then use the
380 # special 'nothing' target task method.
381 if "DONTBUILD" in commit_message and options["tasks_for"] == "hg-push":
382 parameters["target_tasks_method"] = "nothing"
384 if options.get("include_push_tasks"):
385 get_existing_tasks(options.get("rebuild_kinds", []), parameters, graph_config)
387 # If the target method is nightly, we should build partials. This means
388 # knowing what has been released previously.
389 # An empty release_history is fine, it just means no partials will be built
390 parameters.setdefault("release_history", dict())
391 if "nightly" in parameters.get("target_tasks_method", ""):
392 parameters["release_history"] = populate_release_history("Firefox", project)
394 if options.get("try_task_config_file"):
395 task_config_file = os.path.abspath(options.get("try_task_config_file"))
396 else:
397 # if try_task_config.json is present, load it
398 task_config_file = os.path.join(os.getcwd(), "try_task_config.json")
400 # load try settings
401 if "try" in project and options["tasks_for"] == "hg-push":
402 set_try_config(parameters, task_config_file)
404 if options.get("optimize_target_tasks") is not None:
405 parameters["optimize_target_tasks"] = options["optimize_target_tasks"]
407 # Determine if this should be a backstop push.
408 parameters["backstop"] = is_backstop(parameters)
410 # For the android perf tasks, run them half as often
411 parameters["android_perftest_backstop"] = is_backstop(
412 parameters,
413 push_interval=40,
414 time_interval=60 * 8,
415 backstop_strategy="android_perftest_backstop",
418 if "decision-parameters" in graph_config["taskgraph"]:
419 find_object(graph_config["taskgraph"]["decision-parameters"])(
420 graph_config, parameters
423 result = Parameters(**parameters)
424 result.check()
425 return result
428 def get_existing_tasks(rebuild_kinds, parameters, graph_config):
430 Find the decision task corresponding to the on-push graph, and return
431 a mapping of labels to task-ids from it. This will skip the kinds specificed
432 by `rebuild_kinds`.
434 try:
435 decision_task = retry(
436 find_decision_task,
437 args=(parameters, graph_config),
438 attempts=4,
439 sleeptime=5 * 60,
441 except Exception:
442 logger.exception("Didn't find existing push task.")
443 sys.exit(1)
444 _, task_graph = TaskGraph.from_json(
445 get_artifact(decision_task, "public/full-task-graph.json")
447 parameters["existing_tasks"] = find_existing_tasks_from_previous_kinds(
448 task_graph, [decision_task], rebuild_kinds
452 def set_try_config(parameters, task_config_file):
453 if os.path.isfile(task_config_file):
454 logger.info(f"using try tasks from {task_config_file}")
455 with open(task_config_file) as fh:
456 task_config = json.load(fh)
457 task_config_version = task_config.pop("version", 1)
458 if task_config_version == 1:
459 parameters["try_mode"] = "try_task_config"
460 parameters["try_task_config"] = task_config
461 elif task_config_version == 2:
462 parameters.update(task_config["parameters"])
463 parameters["try_mode"] = "try_task_config"
464 else:
465 raise Exception(
466 f"Unknown `try_task_config.json` version: {task_config_version}"
469 if "try:" in parameters["message"]:
470 parameters["try_mode"] = "try_option_syntax"
471 parameters.update(parse_message(parameters["message"]))
472 else:
473 parameters["try_options"] = None
476 def set_decision_indexes(decision_task_id, params, graph_config):
477 index_paths = []
478 if params["android_perftest_backstop"]:
479 index_paths.insert(0, ANDROID_PERFTEST_BACKSTOP_INDEX)
480 if params["backstop"]:
481 # When two Decision tasks run at nearly the same time, it's possible
482 # they both end up being backstops if the second checks the backstop
483 # index before the first inserts it. Insert this index first to reduce
484 # the chances of that happening.
485 index_paths.insert(0, BACKSTOP_INDEX)
487 subs = params.copy()
488 subs["trust-domain"] = graph_config["trust-domain"]
490 for index_path in index_paths:
491 insert_index(index_path.format(**subs), decision_task_id, use_proxy=True)
494 def write_artifact(filename, data):
495 logger.info(f"writing artifact file `{filename}`")
496 if not os.path.isdir(ARTIFACTS_DIR):
497 os.mkdir(ARTIFACTS_DIR)
498 path = os.path.join(ARTIFACTS_DIR, filename)
499 if filename.endswith(".yml"):
500 with open(path, "w") as f:
501 yaml.safe_dump(data, f, allow_unicode=True, default_flow_style=False)
502 elif filename.endswith(".json"):
503 with open(path, "w") as f:
504 json.dump(data, f, sort_keys=True, indent=2, separators=(",", ": "))
505 elif filename.endswith(".json.gz"):
506 import gzip
508 with gzip.open(path, "wb") as f:
509 f.write(json.dumps(data).encode("utf-8"))
510 else:
511 raise TypeError(f"Don't know how to write to {filename}")
514 def read_artifact(filename):
515 path = os.path.join(ARTIFACTS_DIR, filename)
516 if filename.endswith(".yml"):
517 return load_yaml(path, filename)
518 if filename.endswith(".json"):
519 with open(path) as f:
520 return json.load(f)
521 if filename.endswith(".json.gz"):
522 import gzip
524 with gzip.open(path, "rb") as f:
525 return json.load(f.decode("utf-8"))
526 else:
527 raise TypeError(f"Don't know how to read {filename}")
530 def rename_artifact(src, dest):
531 os.rename(os.path.join(ARTIFACTS_DIR, src), os.path.join(ARTIFACTS_DIR, dest))