1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
12 from collections
import defaultdict
15 from redo
import retry
16 from taskgraph
import create
17 from taskgraph
.create
import create_tasks
19 # TODO: Let standalone taskgraph generate parameters instead of calling internals
20 from taskgraph
.decision
import (
21 _determine_more_accurate_base_ref
,
22 _determine_more_accurate_base_rev
,
25 from taskgraph
.generator
import TaskGraphGenerator
26 from taskgraph
.parameters
import Parameters
27 from taskgraph
.taskgraph
import TaskGraph
28 from taskgraph
.util
.python_path
import find_object
29 from taskgraph
.util
.taskcluster
import get_artifact
30 from taskgraph
.util
.vcs
import get_repository
31 from taskgraph
.util
.yaml
import load_yaml
34 from .actions
import render_actions_json
35 from .parameters
import get_app_version
, get_version
36 from .try_option_syntax
import parse_message
37 from .util
.backstop
import BACKSTOP_INDEX
, is_backstop
38 from .util
.bugbug
import push_schedules
39 from .util
.chunking
import resolver
40 from .util
.hg
import get_hg_commit_message
, get_hg_revision_branch
41 from .util
.partials
import populate_release_history
42 from .util
.taskcluster
import insert_index
43 from .util
.taskgraph
import find_decision_task
, find_existing_tasks_from_previous_kinds
45 logger
= logging
.getLogger(__name__
)
47 ARTIFACTS_DIR
= "artifacts"
49 # For each project, this gives a set of parameters specific to the project.
50 # See `taskcluster/docs/parameters.rst` for information on parameters.
51 PER_PROJECT_PARAMETERS
= {
53 "enable_always_target": True,
54 "target_tasks_method": "try_tasks",
55 "release_type": "nightly",
58 "target_tasks_method": "try_tasks",
61 "target_tasks_method": "default",
64 "target_tasks_method": "default",
67 "enable_always_target": True,
68 "target_tasks_method": "holly_tasks",
71 "target_tasks_method": "default",
72 "release_type": "nightly-oak",
75 "target_tasks_method": "graphics_tasks",
78 "optimize_strategies": "gecko_taskgraph.optimize:project.autoland",
79 "target_tasks_method": "autoland_tasks",
80 "test_manifest_loader": "bugbug", # Remove this line to disable "manifest scheduling".
83 "target_tasks_method": "mozilla_central_tasks",
84 "release_type": "nightly",
87 "target_tasks_method": "mozilla_beta_tasks",
88 "release_type": "beta",
91 "target_tasks_method": "mozilla_release_tasks",
92 "release_type": "release",
95 "target_tasks_method": "mozilla_esr115_tasks",
96 "release_type": "esr115",
99 "target_tasks_method": "pine_tasks",
100 "release_type": "nightly-pine",
103 "target_tasks_method": "larch_tasks",
104 "release_type": "nightly-larch",
107 "target_tasks_method": "kaios_tasks",
110 "target_tasks_method": "mozilla_central_tasks",
112 # the default parameters are used for projects that do not match above.
114 "target_tasks_method": "default",
119 def full_task_graph_to_runnable_jobs(full_task_json
):
121 for label
, node
in full_task_json
.items():
122 if not ("extra" in node
["task"] and "treeherder" in node
["task"]["extra"]):
125 th
= node
["task"]["extra"]["treeherder"]
126 runnable_jobs
[label
] = {"symbol": th
["symbol"]}
128 for i
in ("groupName", "groupSymbol", "collection"):
130 runnable_jobs
[label
][i
] = th
[i
]
131 if th
.get("machine", {}).get("platform"):
132 runnable_jobs
[label
]["platform"] = th
["machine"]["platform"]
136 def full_task_graph_to_manifests_by_task(full_task_json
):
137 manifests_by_task
= defaultdict(list)
138 for label
, node
in full_task_json
.items():
139 manifests
= node
["attributes"].get("test_manifests")
143 manifests_by_task
[label
].extend(manifests
)
144 return manifests_by_task
147 def try_syntax_from_message(message
):
149 Parse the try syntax out of a commit message, returning '' if none is
152 try_idx
= message
.find("try:")
155 return message
[try_idx
:].split("\n", 1)[0]
158 def taskgraph_decision(options
, parameters
=None):
160 Run the decision task. This function implements `mach taskgraph decision`,
161 and is responsible for
163 * processing decision task command-line options into parameters
164 * running task-graph generation exactly the same way the other `mach
165 taskgraph` commands do
166 * generating a set of artifacts to memorialize the graph
167 * calling TaskCluster APIs to create the graph
170 parameters
= parameters
or (
171 lambda graph_config
: get_decision_parameters(graph_config
, options
)
174 decision_task_id
= os
.environ
["TASK_ID"]
176 # create a TaskGraphGenerator instance
177 tgg
= TaskGraphGenerator(
178 root_dir
=options
.get("root"),
179 parameters
=parameters
,
180 decision_task_id
=decision_task_id
,
181 write_artifacts
=True,
184 if not create
.testing
:
185 # set additional index paths for the decision task
186 set_decision_indexes(decision_task_id
, tgg
.parameters
, tgg
.graph_config
)
188 # write out the parameters used to generate this graph
189 write_artifact("parameters.yml", dict(**tgg
.parameters
))
191 # write out the public/actions.json file
194 render_actions_json(tgg
.parameters
, tgg
.graph_config
, decision_task_id
),
197 # write out the full graph for reference
198 full_task_json
= tgg
.full_task_graph
.to_json()
199 write_artifact("full-task-graph.json", full_task_json
)
201 # write out the public/runnable-jobs.json file
203 "runnable-jobs.json", full_task_graph_to_runnable_jobs(full_task_json
)
206 # write out the public/manifests-by-task.json file
208 "manifests-by-task.json.gz",
209 full_task_graph_to_manifests_by_task(full_task_json
),
212 # write out the public/tests-by-manifest.json file
213 write_artifact("tests-by-manifest.json.gz", resolver
.tests_by_manifest
)
215 # this is just a test to check whether the from_json() function is working
216 _
, _
= TaskGraph
.from_json(full_task_json
)
218 # write out the target task set to allow reproducing this as input
219 write_artifact("target-tasks.json", list(tgg
.target_task_set
.tasks
.keys()))
221 # write out the optimized task graph to describe what will actually happen,
222 # and the map of labels to taskids
223 write_artifact("task-graph.json", tgg
.morphed_task_graph
.to_json())
224 write_artifact("label-to-taskid.json", tgg
.label_to_taskid
)
226 # write bugbug scheduling information if it was invoked
227 if len(push_schedules
) > 0:
228 write_artifact("bugbug-push-schedules.json", push_schedules
.popitem()[1])
230 # cache run-task & misc/fetch-content
231 scripts_root_dir
= os
.path
.join(GECKO
, "taskcluster/scripts")
232 run_task_file_path
= os
.path
.join(scripts_root_dir
, "run-task")
233 fetch_content_file_path
= os
.path
.join(scripts_root_dir
, "misc/fetch-content")
234 shutil
.copy2(run_task_file_path
, ARTIFACTS_DIR
)
235 shutil
.copy2(fetch_content_file_path
, ARTIFACTS_DIR
)
237 # actually create the graph
240 tgg
.morphed_task_graph
,
243 decision_task_id
=decision_task_id
,
247 def get_decision_parameters(graph_config
, options
):
249 Load parameters from the command-line options for 'taskgraph decision'.
250 This also applies per-project parameters, based on the given project.
253 product_dir
= graph_config
["product-dir"]
271 "target_tasks_method",
277 commit_message
= get_hg_commit_message(os
.path
.join(GECKO
, product_dir
))
279 repo_path
= os
.getcwd()
280 repo
= get_repository(repo_path
)
281 parameters
["base_ref"] = _determine_more_accurate_base_ref(
283 candidate_base_ref
=options
.get("base_ref"),
284 head_ref
=options
.get("head_ref"),
285 base_rev
=options
.get("base_rev"),
288 parameters
["base_rev"] = _determine_more_accurate_base_rev(
290 base_ref
=parameters
["base_ref"],
291 candidate_base_rev
=options
.get("base_rev"),
292 head_rev
=options
.get("head_rev"),
293 env_prefix
=_get_env_prefix(graph_config
),
296 # Define default filter list, as most configurations shouldn't need
298 parameters
["filters"] = [
299 "target_tasks_method",
301 parameters
["enable_always_target"] = ["docker-image"]
302 parameters
["existing_tasks"] = {}
303 parameters
["do_not_optimize"] = []
304 parameters
["build_number"] = 1
305 parameters
["version"] = get_version(product_dir
)
306 parameters
["app_version"] = get_app_version(product_dir
)
307 parameters
["message"] = try_syntax_from_message(commit_message
)
308 parameters
["hg_branch"] = get_hg_revision_branch(
309 GECKO
, revision
=parameters
["head_rev"]
311 parameters
["next_version"] = None
312 parameters
["optimize_strategies"] = None
313 parameters
["optimize_target_tasks"] = True
314 parameters
["phabricator_diff"] = None
315 parameters
["release_type"] = ""
316 parameters
["release_eta"] = ""
317 parameters
["release_enable_partner_repack"] = False
318 parameters
["release_enable_partner_attribution"] = False
319 parameters
["release_partners"] = []
320 parameters
["release_partner_config"] = {}
321 parameters
["release_partner_build_number"] = 1
322 parameters
["release_enable_emefree"] = False
323 parameters
["release_product"] = None
324 parameters
["required_signoffs"] = []
325 parameters
["signoff_urls"] = {}
326 parameters
["test_manifest_loader"] = "default"
327 parameters
["try_mode"] = None
328 parameters
["try_task_config"] = {}
329 parameters
["try_options"] = None
331 # owner must be an email, but sometimes (e.g., for ffxbld) it is not, in which
333 if "@" not in parameters
["owner"]:
334 parameters
["owner"] += "@noreply.mozilla.org"
336 # use the pushdate as build_date if given, else use current time
337 parameters
["build_date"] = parameters
["pushdate"] or int(time
.time())
338 # moz_build_date is the build identifier based on build_date
339 parameters
["moz_build_date"] = time
.strftime(
340 "%Y%m%d%H%M%S", time
.gmtime(parameters
["build_date"])
343 project
= parameters
["project"]
345 parameters
.update(PER_PROJECT_PARAMETERS
[project
])
348 "using default project parameters; add {} to "
349 "PER_PROJECT_PARAMETERS in {} to customize behavior "
350 "for this project".format(project
, __file__
)
352 parameters
.update(PER_PROJECT_PARAMETERS
["default"])
354 # `target_tasks_method` has higher precedence than `project` parameters
355 if options
.get("target_tasks_method"):
356 parameters
["target_tasks_method"] = options
["target_tasks_method"]
358 # ..but can be overridden by the commit message: if it contains the special
359 # string "DONTBUILD" and this is an on-push decision task, then use the
360 # special 'nothing' target task method.
361 if "DONTBUILD" in commit_message
and options
["tasks_for"] == "hg-push":
362 parameters
["target_tasks_method"] = "nothing"
364 if options
.get("include_push_tasks"):
365 get_existing_tasks(options
.get("rebuild_kinds", []), parameters
, graph_config
)
367 # If the target method is nightly, we should build partials. This means
368 # knowing what has been released previously.
369 # An empty release_history is fine, it just means no partials will be built
370 parameters
.setdefault("release_history", dict())
371 if "nightly" in parameters
.get("target_tasks_method", ""):
372 parameters
["release_history"] = populate_release_history("Firefox", project
)
374 if options
.get("try_task_config_file"):
375 task_config_file
= os
.path
.abspath(options
.get("try_task_config_file"))
377 # if try_task_config.json is present, load it
378 task_config_file
= os
.path
.join(os
.getcwd(), "try_task_config.json")
381 if "try" in project
and options
["tasks_for"] == "hg-push":
382 set_try_config(parameters
, task_config_file
)
384 if options
.get("optimize_target_tasks") is not None:
385 parameters
["optimize_target_tasks"] = options
["optimize_target_tasks"]
387 # Determine if this should be a backstop push.
388 parameters
["backstop"] = is_backstop(parameters
)
390 if "decision-parameters" in graph_config
["taskgraph"]:
391 find_object(graph_config
["taskgraph"]["decision-parameters"])(
392 graph_config
, parameters
395 result
= Parameters(**parameters
)
400 def get_existing_tasks(rebuild_kinds
, parameters
, graph_config
):
402 Find the decision task corresponding to the on-push graph, and return
403 a mapping of labels to task-ids from it. This will skip the kinds specificed
407 decision_task
= retry(
409 args
=(parameters
, graph_config
),
414 logger
.exception("Didn't find existing push task.")
416 _
, task_graph
= TaskGraph
.from_json(
417 get_artifact(decision_task
, "public/full-task-graph.json")
419 parameters
["existing_tasks"] = find_existing_tasks_from_previous_kinds(
420 task_graph
, [decision_task
], rebuild_kinds
424 def set_try_config(parameters
, task_config_file
):
425 if os
.path
.isfile(task_config_file
):
426 logger
.info(f
"using try tasks from {task_config_file}")
427 with
open(task_config_file
) as fh
:
428 task_config
= json
.load(fh
)
429 task_config_version
= task_config
.pop("version", 1)
430 if task_config_version
== 1:
431 parameters
["try_mode"] = "try_task_config"
432 parameters
["try_task_config"] = task_config
433 elif task_config_version
== 2:
434 parameters
.update(task_config
["parameters"])
435 parameters
["try_mode"] = "try_task_config"
438 f
"Unknown `try_task_config.json` version: {task_config_version}"
441 if "try:" in parameters
["message"]:
442 parameters
["try_mode"] = "try_option_syntax"
443 parameters
.update(parse_message(parameters
["message"]))
445 parameters
["try_options"] = None
448 def set_decision_indexes(decision_task_id
, params
, graph_config
):
450 if params
["backstop"]:
451 index_paths
.append(BACKSTOP_INDEX
)
454 subs
["trust-domain"] = graph_config
["trust-domain"]
456 index_paths
= [i
.format(**subs
) for i
in index_paths
]
457 for index_path
in index_paths
:
458 insert_index(index_path
, decision_task_id
, use_proxy
=True)
461 def write_artifact(filename
, data
):
462 logger
.info(f
"writing artifact file `{filename}`")
463 if not os
.path
.isdir(ARTIFACTS_DIR
):
464 os
.mkdir(ARTIFACTS_DIR
)
465 path
= os
.path
.join(ARTIFACTS_DIR
, filename
)
466 if filename
.endswith(".yml"):
467 with
open(path
, "w") as f
:
468 yaml
.safe_dump(data
, f
, allow_unicode
=True, default_flow_style
=False)
469 elif filename
.endswith(".json"):
470 with
open(path
, "w") as f
:
471 json
.dump(data
, f
, sort_keys
=True, indent
=2, separators
=(",", ": "))
472 elif filename
.endswith(".json.gz"):
475 with gzip
.open(path
, "wb") as f
:
476 f
.write(json
.dumps(data
).encode("utf-8"))
478 raise TypeError(f
"Don't know how to write to {filename}")
481 def read_artifact(filename
):
482 path
= os
.path
.join(ARTIFACTS_DIR
, filename
)
483 if filename
.endswith(".yml"):
484 return load_yaml(path
, filename
)
485 if filename
.endswith(".json"):
486 with
open(path
) as f
:
488 if filename
.endswith(".json.gz"):
491 with gzip
.open(path
, "rb") as f
:
492 return json
.load(f
.decode("utf-8"))
494 raise TypeError(f
"Don't know how to read {filename}")
497 def rename_artifact(src
, dest
):
498 os
.rename(os
.path
.join(ARTIFACTS_DIR
, src
), os
.path
.join(ARTIFACTS_DIR
, dest
))