Bug 1890689 accumulate input in LargerReceiverBlockSizeThanDesiredBuffering GTest...
[gecko.git] / taskcluster / gecko_taskgraph / main.py
blobe261f26c8008eca65061799a013d972ccfa66ee1
1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
5 import argparse
6 import atexit
7 import json
8 import logging
9 import os
10 import re
11 import shutil
12 import subprocess
13 import sys
14 import tempfile
15 import traceback
16 from collections import namedtuple
17 from concurrent.futures import ProcessPoolExecutor, as_completed
18 from pathlib import Path
19 from typing import Any, List
21 import appdirs
22 import yaml
24 from gecko_taskgraph import GECKO
25 from gecko_taskgraph.files_changed import get_locally_changed_files
27 Command = namedtuple("Command", ["func", "args", "kwargs", "defaults"])
28 commands = {}
31 def command(*args, **kwargs):
32 defaults = kwargs.pop("defaults", {})
34 def decorator(func):
35 commands[args[0]] = Command(func, args, kwargs, defaults)
36 return func
38 return decorator
41 def argument(*args, **kwargs):
42 def decorator(func):
43 if not hasattr(func, "args"):
44 func.args = []
45 func.args.append((args, kwargs))
46 return func
48 return decorator
51 def format_taskgraph_labels(taskgraph):
52 return "\n".join(
53 sorted(
54 taskgraph.tasks[index].label for index in taskgraph.graph.visit_postorder()
59 def format_taskgraph_json(taskgraph):
60 return json.dumps(
61 taskgraph.to_json(), sort_keys=True, indent=2, separators=(",", ": ")
65 def format_taskgraph_yaml(taskgraph):
66 from mozbuild.util import ReadOnlyDict
68 class TGDumper(yaml.SafeDumper):
69 def ignore_aliases(self, data):
70 return True
72 def represent_ro_dict(self, data):
73 return self.represent_dict(dict(data))
75 TGDumper.add_representer(ReadOnlyDict, TGDumper.represent_ro_dict)
77 return yaml.dump(taskgraph.to_json(), Dumper=TGDumper, default_flow_style=False)
80 def get_filtered_taskgraph(taskgraph, tasksregex, exclude_keys):
81 """
82 Filter all the tasks on basis of a regular expression
83 and returns a new TaskGraph object
84 """
85 from taskgraph.graph import Graph
86 from taskgraph.task import Task
87 from taskgraph.taskgraph import TaskGraph
89 if tasksregex:
90 named_links_dict = taskgraph.graph.named_links_dict()
91 filteredtasks = {}
92 filterededges = set()
93 regexprogram = re.compile(tasksregex)
95 for key in taskgraph.graph.visit_postorder():
96 task = taskgraph.tasks[key]
97 if regexprogram.match(task.label):
98 filteredtasks[key] = task
99 for depname, dep in named_links_dict[key].items():
100 if regexprogram.match(dep):
101 filterededges.add((key, dep, depname))
103 taskgraph = TaskGraph(filteredtasks, Graph(set(filteredtasks), filterededges))
105 if exclude_keys:
106 for label, task in taskgraph.tasks.items():
107 task_dict = task.to_json()
108 for key in exclude_keys:
109 obj = task_dict
110 attrs = key.split(".")
111 while attrs[0] in obj:
112 if len(attrs) == 1:
113 del obj[attrs[0]]
114 break
115 obj = obj[attrs[0]]
116 attrs = attrs[1:]
117 taskgraph.tasks[label] = Task.from_json(task_dict)
119 return taskgraph
122 FORMAT_METHODS = {
123 "labels": format_taskgraph_labels,
124 "json": format_taskgraph_json,
125 "yaml": format_taskgraph_yaml,
129 def get_taskgraph_generator(root, parameters):
130 """Helper function to make testing a little easier."""
131 from taskgraph.generator import TaskGraphGenerator
133 return TaskGraphGenerator(root_dir=root, parameters=parameters)
136 def format_taskgraph(options, parameters, overrides, logfile=None):
137 import taskgraph
138 from taskgraph.parameters import parameters_loader
140 if logfile:
141 handler = logging.FileHandler(logfile, mode="w")
142 if logging.root.handlers:
143 oldhandler = logging.root.handlers[-1]
144 logging.root.removeHandler(oldhandler)
145 handler.setFormatter(oldhandler.formatter)
146 logging.root.addHandler(handler)
148 if options["fast"]:
149 taskgraph.fast = True
151 if isinstance(parameters, str):
152 parameters = parameters_loader(
153 parameters,
154 overrides=overrides,
155 strict=False,
158 tgg = get_taskgraph_generator(options.get("root"), parameters)
160 tg = getattr(tgg, options["graph_attr"])
161 tg = get_filtered_taskgraph(tg, options["tasks_regex"], options["exclude_keys"])
162 format_method = FORMAT_METHODS[options["format"] or "labels"]
163 return format_method(tg)
166 def dump_output(out, path=None, params_spec=None):
167 from taskgraph.parameters import Parameters
169 params_name = Parameters.format_spec(params_spec)
170 fh = None
171 if path:
172 # Substitute params name into file path if necessary
173 if params_spec and "{params}" not in path:
174 name, ext = os.path.splitext(path)
175 name += "_{params}"
176 path = name + ext
178 path = path.format(params=params_name)
179 fh = open(path, "w")
180 else:
181 print(
182 "Dumping result with parameters from {}:".format(params_name),
183 file=sys.stderr,
185 print(out + "\n", file=fh)
188 def generate_taskgraph(options, parameters, overrides, logdir):
189 from taskgraph.parameters import Parameters
191 def logfile(spec):
192 """Determine logfile given a parameters specification."""
193 if logdir is None:
194 return None
195 return os.path.join(
196 logdir,
197 "{}_{}.log".format(options["graph_attr"], Parameters.format_spec(spec)),
200 # Don't bother using futures if there's only one parameter. This can make
201 # tracebacks a little more readable and avoids additional process overhead.
202 if len(parameters) == 1:
203 spec = parameters[0]
204 out = format_taskgraph(options, spec, overrides, logfile(spec))
205 dump_output(out, options["output_file"])
206 return
208 futures = {}
209 with ProcessPoolExecutor(max_workers=options["max_workers"]) as executor:
210 for spec in parameters:
211 f = executor.submit(
212 format_taskgraph, options, spec, overrides, logfile(spec)
214 futures[f] = spec
216 for future in as_completed(futures):
217 output_file = options["output_file"]
218 spec = futures[future]
219 e = future.exception()
220 if e:
221 out = "".join(traceback.format_exception(type(e), e, e.__traceback__))
222 if options["diff"]:
223 # Dump to console so we don't accidentally diff the tracebacks.
224 output_file = None
225 else:
226 out = future.result()
228 dump_output(
229 out,
230 path=output_file,
231 params_spec=spec if len(parameters) > 1 else None,
235 @command(
236 "tasks",
237 help="Show all tasks in the taskgraph.",
238 defaults={"graph_attr": "full_task_set"},
240 @command(
241 "full", help="Show the full taskgraph.", defaults={"graph_attr": "full_task_graph"}
243 @command(
244 "target",
245 help="Show the set of target tasks.",
246 defaults={"graph_attr": "target_task_set"},
248 @command(
249 "target-graph",
250 help="Show the target graph.",
251 defaults={"graph_attr": "target_task_graph"},
253 @command(
254 "optimized",
255 help="Show the optimized graph.",
256 defaults={"graph_attr": "optimized_task_graph"},
258 @command(
259 "morphed",
260 help="Show the morphed graph.",
261 defaults={"graph_attr": "morphed_task_graph"},
263 @argument("--root", "-r", help="root of the taskgraph definition relative to topsrcdir")
264 @argument("--quiet", "-q", action="store_true", help="suppress all logging output")
265 @argument(
266 "--verbose", "-v", action="store_true", help="include debug-level logging output"
268 @argument(
269 "--json",
270 "-J",
271 action="store_const",
272 dest="format",
273 const="json",
274 help="Output task graph as a JSON object",
276 @argument(
277 "--yaml",
278 "-Y",
279 action="store_const",
280 dest="format",
281 const="yaml",
282 help="Output task graph as a YAML object",
284 @argument(
285 "--labels",
286 "-L",
287 action="store_const",
288 dest="format",
289 const="labels",
290 help="Output the label for each task in the task graph (default)",
292 @argument(
293 "--parameters",
294 "-p",
295 default=None,
296 action="append",
297 help="Parameters to use for the generation. Can be a path to file (.yml or "
298 ".json; see `taskcluster/docs/parameters.rst`), a directory (containing "
299 "parameters files), a url, of the form `project=mozilla-central` to download "
300 "latest parameters file for the specified project from CI, or of the form "
301 "`task-id=<decision task id>` to download parameters from the specified "
302 "decision task. Can be specified multiple times, in which case multiple "
303 "generations will happen from the same invocation (one per parameters "
304 "specified).",
306 @argument(
307 "--force-local-files-changed",
308 default=False,
309 action="store_true",
310 help="Compute the 'files-changed' parameter from local version control, "
311 "even when explicitly using a parameter set that already has it defined. "
312 "Note that this is already the default behaviour when no parameters are "
313 "specified.",
315 @argument(
316 "--no-optimize",
317 dest="optimize",
318 action="store_false",
319 default="true",
320 help="do not remove tasks from the graph that are found in the "
321 "index (a.k.a. optimize the graph)",
323 @argument(
324 "-o",
325 "--output-file",
326 default=None,
327 help="file path to store generated output.",
329 @argument(
330 "--tasks-regex",
331 "--tasks",
332 default=None,
333 help="only return tasks with labels matching this regular " "expression.",
335 @argument(
336 "--exclude-key",
337 default=None,
338 dest="exclude_keys",
339 action="append",
340 help="Exclude the specified key (using dot notation) from the final result. "
341 "This is mainly useful with '--diff' to filter out expected differences.",
343 @argument(
344 "-k",
345 "--target-kind",
346 dest="target_kinds",
347 action="append",
348 default=[],
349 help="only return tasks that are of the given kind, or their dependencies.",
351 @argument(
352 "-F",
353 "--fast",
354 default=False,
355 action="store_true",
356 help="enable fast task generation for local debugging.",
358 @argument(
359 "--diff",
360 const="default",
361 nargs="?",
362 default=None,
363 help="Generate and diff the current taskgraph against another revision. "
364 "Without args the base revision will be used. A revision specifier such as "
365 "the hash or `.~1` (hg) or `HEAD~1` (git) can be used as well.",
367 @argument(
368 "-j",
369 "--max-workers",
370 dest="max_workers",
371 default=None,
372 type=int,
373 help="The maximum number of workers to use for parallel operations such as"
374 "when multiple parameters files are passed.",
376 def show_taskgraph(options):
377 from mozversioncontrol import get_repository_object as get_repository
378 from taskgraph.parameters import Parameters, parameters_loader
380 if options.pop("verbose", False):
381 logging.root.setLevel(logging.DEBUG)
383 repo = None
384 cur_ref = None
385 diffdir = None
386 output_file = options["output_file"]
388 if options["diff"]:
389 # --root argument is taskgraph's config at <repo>/taskcluster/ci
390 repo_root = os.getcwd()
391 if options["root"]:
392 repo_root = f"{options['root']}/../.."
393 repo = get_repository(repo_root)
395 if not repo.working_directory_clean():
396 print(
397 "abort: can't diff taskgraph with dirty working directory",
398 file=sys.stderr,
400 return 1
402 # We want to return the working directory to the current state
403 # as best we can after we're done. In all known cases, using
404 # branch or bookmark (which are both available on the VCS object)
405 # as `branch` is preferable to a specific revision.
406 cur_ref = repo.branch or repo.head_ref[:12]
408 diffdir = tempfile.mkdtemp()
409 atexit.register(
410 shutil.rmtree, diffdir
411 ) # make sure the directory gets cleaned up
412 options["output_file"] = os.path.join(
413 diffdir, f"{options['graph_attr']}_{cur_ref}"
415 print(f"Generating {options['graph_attr']} @ {cur_ref}", file=sys.stderr)
417 overrides = {
418 "target-kinds": options.get("target_kinds"),
420 parameters: List[Any[str, Parameters]] = options.pop("parameters")
421 if not parameters:
422 parameters = [
423 parameters_loader(None, strict=False, overrides=overrides)
424 ] # will use default values
426 # This is the default behaviour anyway, so no need to re-compute.
427 options["force_local_files_changed"] = False
429 elif options["force_local_files_changed"]:
430 overrides["files-changed"] = sorted(get_locally_changed_files(GECKO))
432 for param in parameters[:]:
433 if isinstance(param, str) and os.path.isdir(param):
434 parameters.remove(param)
435 parameters.extend(
437 p.as_posix()
438 for p in Path(param).iterdir()
439 if p.suffix in (".yml", ".json")
443 logdir = None
444 if len(parameters) > 1:
445 # Log to separate files for each process instead of stderr to
446 # avoid interleaving.
447 basename = os.path.basename(os.getcwd())
448 logdir = os.path.join(appdirs.user_log_dir("taskgraph"), basename)
449 if not os.path.isdir(logdir):
450 os.makedirs(logdir)
451 else:
452 # Only setup logging if we have a single parameter spec. Otherwise
453 # logging will go to files. This is also used as a hook for Gecko
454 # to setup its `mach` based logging.
455 setup_logging()
457 generate_taskgraph(options, parameters, overrides, logdir)
459 if options["diff"]:
460 assert diffdir is not None
461 assert repo is not None
463 # Reload taskgraph modules to pick up changes and clear global state.
464 for mod in sys.modules.copy():
465 if mod != __name__ and mod.split(".", 1)[0].endswith(
466 ("taskgraph", "mozbuild")
468 del sys.modules[mod]
470 # Ensure gecko_taskgraph is ahead of taskcluster_taskgraph in sys.path.
471 # Without this, we may end up validating some things against the wrong
472 # schema.
473 import gecko_taskgraph # noqa
475 if options["diff"] == "default":
476 base_ref = repo.base_ref
477 else:
478 base_ref = options["diff"]
480 try:
481 repo.update(base_ref)
482 base_ref = repo.head_ref[:12]
483 options["output_file"] = os.path.join(
484 diffdir, f"{options['graph_attr']}_{base_ref}"
486 print(f"Generating {options['graph_attr']} @ {base_ref}", file=sys.stderr)
487 generate_taskgraph(options, parameters, overrides, logdir)
488 finally:
489 repo.update(cur_ref)
491 # Generate diff(s)
492 diffcmd = [
493 "diff",
494 "-U20",
495 "--report-identical-files",
496 f"--label={options['graph_attr']}@{base_ref}",
497 f"--label={options['graph_attr']}@{cur_ref}",
500 non_fatal_failures = []
501 for spec in parameters:
502 base_path = os.path.join(diffdir, f"{options['graph_attr']}_{base_ref}")
503 cur_path = os.path.join(diffdir, f"{options['graph_attr']}_{cur_ref}")
505 params_name = None
506 if len(parameters) > 1:
507 params_name = Parameters.format_spec(spec)
508 base_path += f"_{params_name}"
509 cur_path += f"_{params_name}"
511 # If the base or cur files are missing it means that generation
512 # failed. If one of them failed but not the other, the failure is
513 # likely due to the patch making changes to taskgraph in modules
514 # that don't get reloaded (safe to ignore). If both generations
515 # failed, there's likely a real issue.
516 base_missing = not os.path.isfile(base_path)
517 cur_missing = not os.path.isfile(cur_path)
518 if base_missing != cur_missing: # != is equivalent to XOR for booleans
519 non_fatal_failures.append(os.path.basename(base_path))
520 continue
522 try:
523 # If the output file(s) are missing, this command will raise
524 # CalledProcessError with a returncode > 1.
525 proc = subprocess.run(
526 diffcmd + [base_path, cur_path],
527 stdout=subprocess.PIPE,
528 stderr=subprocess.PIPE,
529 universal_newlines=True,
530 check=True,
532 diff_output = proc.stdout
533 returncode = 0
534 except subprocess.CalledProcessError as e:
535 # returncode 1 simply means diffs were found
536 if e.returncode != 1:
537 print(e.stderr, file=sys.stderr)
538 raise
539 diff_output = e.output
540 returncode = e.returncode
542 dump_output(
543 diff_output,
544 # Don't bother saving file if no diffs were found. Log to
545 # console in this case instead.
546 path=None if returncode == 0 else output_file,
547 params_spec=spec if len(parameters) > 1 else None,
550 if non_fatal_failures:
551 failstr = "\n ".join(sorted(non_fatal_failures))
552 print(
553 "WARNING: Diff skipped for the following generation{s} "
554 "due to failures:\n {failstr}".format(
555 s="s" if len(non_fatal_failures) > 1 else "", failstr=failstr
557 file=sys.stderr,
560 if options["format"] != "json":
561 print(
562 "If you were expecting differences in task bodies "
563 'you should pass "-J"\n',
564 file=sys.stderr,
567 if len(parameters) > 1:
568 print("See '{}' for logs".format(logdir), file=sys.stderr)
571 @command("build-image", help="Build a Docker image")
572 @argument("image_name", help="Name of the image to build")
573 @argument(
574 "-t", "--tag", help="tag that the image should be built as.", metavar="name:tag"
576 @argument(
577 "--context-only",
578 help="File name the context tarball should be written to."
579 "with this option it will only build the context.tar.",
580 metavar="context.tar",
582 def build_image(args):
583 from gecko_taskgraph.docker import build_context, build_image
585 if args["context_only"] is None:
586 build_image(args["image_name"], args["tag"], os.environ)
587 else:
588 build_context(args["image_name"], args["context_only"], os.environ)
591 @command(
592 "load-image",
593 help="Load a pre-built Docker image. Note that you need to "
594 "have docker installed and running for this to work.",
596 @argument(
597 "--task-id",
598 help="Load the image at public/image.tar.zst in this task, "
599 "rather than searching the index",
601 @argument(
602 "-t",
603 "--tag",
604 help="tag that the image should be loaded as. If not "
605 "image will be loaded with tag from the tarball",
606 metavar="name:tag",
608 @argument(
609 "image_name",
610 nargs="?",
611 help="Load the image of this name based on the current "
612 "contents of the tree (as built for mozilla-central "
613 "or mozilla-inbound)",
615 def load_image(args):
616 from gecko_taskgraph.docker import load_image_by_name, load_image_by_task_id
618 if not args.get("image_name") and not args.get("task_id"):
619 print("Specify either IMAGE-NAME or TASK-ID")
620 sys.exit(1)
621 try:
622 if args["task_id"]:
623 ok = load_image_by_task_id(args["task_id"], args.get("tag"))
624 else:
625 ok = load_image_by_name(args["image_name"], args.get("tag"))
626 if not ok:
627 sys.exit(1)
628 except Exception:
629 traceback.print_exc()
630 sys.exit(1)
633 @command("image-digest", help="Print the digest of a docker image.")
634 @argument(
635 "image_name",
636 help="Print the digest of the image of this name based on the current "
637 "contents of the tree.",
639 def image_digest(args):
640 from gecko_taskgraph.docker import get_image_digest
642 try:
643 digest = get_image_digest(args["image_name"])
644 print(digest)
645 except Exception:
646 traceback.print_exc()
647 sys.exit(1)
650 @command("decision", help="Run the decision task")
651 @argument("--root", "-r", help="root of the taskgraph definition relative to topsrcdir")
652 @argument(
653 "--message",
654 required=False,
655 help=argparse.SUPPRESS,
657 @argument(
658 "--project",
659 required=True,
660 help="Project to use for creating task graph. Example: --project=try",
662 @argument("--pushlog-id", dest="pushlog_id", required=True, default="0")
663 @argument("--pushdate", dest="pushdate", required=True, type=int, default=0)
664 @argument("--owner", required=True, help="email address of who owns this graph")
665 @argument("--level", required=True, help="SCM level of this repository")
666 @argument(
667 "--target-tasks-method", help="method for selecting the target tasks to generate"
669 @argument(
670 "--repository-type",
671 required=True,
672 help='Type of repository, either "hg" or "git"',
674 @argument("--base-repository", required=True, help='URL for "base" repository to clone')
675 @argument(
676 "--base-ref", default="", help='Reference of the revision in the "base" repository'
678 @argument(
679 "--base-rev",
680 default="",
681 help="Taskgraph decides what to do based on the revision range between "
682 "`--base-rev` and `--head-rev`. Value is determined automatically if not provided",
684 @argument(
685 "--head-repository",
686 required=True,
687 help='URL for "head" repository to fetch revision from',
689 @argument(
690 "--head-ref", required=True, help="Reference (this is same as rev usually for hg)"
692 @argument(
693 "--head-rev", required=True, help="Commit revision to use from head repository"
695 @argument("--head-tag", help="Tag attached to the revision", default="")
696 @argument(
697 "--tasks-for", required=True, help="the tasks_for value used to generate this task"
699 @argument("--try-task-config-file", help="path to try task configuration file")
700 def decision(options):
701 from gecko_taskgraph.decision import taskgraph_decision
703 taskgraph_decision(options)
706 @command("action-callback", description="Run action callback used by action tasks")
707 @argument(
708 "--root",
709 "-r",
710 default="taskcluster/ci",
711 help="root of the taskgraph definition relative to topsrcdir",
713 def action_callback(options):
714 from gecko_taskgraph.actions import trigger_action_callback
715 from gecko_taskgraph.actions.util import get_parameters
717 try:
718 # the target task for this action (or null if it's a group action)
719 task_id = json.loads(os.environ.get("ACTION_TASK_ID", "null"))
720 # the target task group for this action
721 task_group_id = os.environ.get("ACTION_TASK_GROUP_ID", None)
722 input = json.loads(os.environ.get("ACTION_INPUT", "null"))
723 callback = os.environ.get("ACTION_CALLBACK", None)
724 root = options["root"]
726 parameters = get_parameters(task_group_id)
728 return trigger_action_callback(
729 task_group_id=task_group_id,
730 task_id=task_id,
731 input=input,
732 callback=callback,
733 parameters=parameters,
734 root=root,
735 test=False,
737 except Exception:
738 traceback.print_exc()
739 sys.exit(1)
742 @command("test-action-callback", description="Run an action callback in a testing mode")
743 @argument(
744 "--root",
745 "-r",
746 default="taskcluster/ci",
747 help="root of the taskgraph definition relative to topsrcdir",
749 @argument(
750 "--parameters",
751 "-p",
752 default="",
753 help="parameters file (.yml or .json; see " "`taskcluster/docs/parameters.rst`)`",
755 @argument("--task-id", default=None, help="TaskId to which the action applies")
756 @argument(
757 "--task-group-id", default=None, help="TaskGroupId to which the action applies"
759 @argument("--input", default=None, help="Action input (.yml or .json)")
760 @argument("callback", default=None, help="Action callback name (Python function name)")
761 def test_action_callback(options):
762 import taskgraph.parameters
763 from taskgraph.config import load_graph_config
764 from taskgraph.util import yaml
766 import gecko_taskgraph.actions
768 def load_data(filename):
769 with open(filename) as f:
770 if filename.endswith(".yml"):
771 return yaml.load_stream(f)
772 if filename.endswith(".json"):
773 return json.load(f)
774 raise Exception(f"unknown filename {filename}")
776 try:
777 task_id = options["task_id"]
779 if options["input"]:
780 input = load_data(options["input"])
781 else:
782 input = None
784 root = options["root"]
785 graph_config = load_graph_config(root)
786 trust_domain = graph_config["trust-domain"]
787 graph_config.register()
789 parameters = taskgraph.parameters.load_parameters_file(
790 options["parameters"], strict=False, trust_domain=trust_domain
792 parameters.check()
794 return gecko_taskgraph.actions.trigger_action_callback(
795 task_group_id=options["task_group_id"],
796 task_id=task_id,
797 input=input,
798 callback=options["callback"],
799 parameters=parameters,
800 root=root,
801 test=True,
803 except Exception:
804 traceback.print_exc()
805 sys.exit(1)
808 def create_parser():
809 parser = argparse.ArgumentParser(description="Interact with taskgraph")
810 subparsers = parser.add_subparsers()
811 for _, (func, args, kwargs, defaults) in commands.items():
812 subparser = subparsers.add_parser(*args, **kwargs)
813 for arg in func.args:
814 subparser.add_argument(*arg[0], **arg[1])
815 subparser.set_defaults(command=func, **defaults)
816 return parser
819 def setup_logging():
820 logging.basicConfig(
821 format="%(asctime)s - %(levelname)s - %(message)s", level=logging.INFO
825 def main(args=sys.argv[1:]):
826 setup_logging()
827 parser = create_parser()
828 args = parser.parse_args(args)
829 try:
830 args.command(vars(args))
831 except Exception:
832 traceback.print_exc()
833 sys.exit(1)