Bug 1828523 [wpt PR 39579] - Only restore dialog focus if focus is in the dialog...
[gecko.git] / tools / moztreedocs / mach_commands.py
blob6f44c65c209e7ee2ab59074d8a0b56ffee4b55d4
1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
5 import fnmatch
6 import json
7 import multiprocessing
8 import os
9 import re
10 import subprocess
11 import sys
12 import tempfile
13 import time
14 import uuid
15 from functools import partial
16 from pprint import pprint
18 import mozpack.path as mozpath
19 import sentry_sdk
20 import yaml
21 from mach.decorators import Command, CommandArgument, SubCommand
22 from mach.registrar import Registrar
23 from mozbuild.util import memoize
25 here = os.path.abspath(os.path.dirname(__file__))
26 topsrcdir = os.path.abspath(os.path.dirname(os.path.dirname(here)))
27 DOC_ROOT = os.path.join(topsrcdir, "docs")
28 BASE_LINK = "http://gecko-docs.mozilla.org-l1.s3-website.us-west-2.amazonaws.com/"
31 # Helps manage in-tree documentation.
34 @Command(
35 "doc",
36 category="devenv",
37 virtualenv_name="docs",
38 description="Generate and serve documentation from the tree.",
40 @CommandArgument(
41 "path",
42 default=None,
43 metavar="DIRECTORY",
44 nargs="?",
45 help="Path to documentation to build and display.",
47 @CommandArgument(
48 "--format", default="html", dest="fmt", help="Documentation format to write."
50 @CommandArgument(
51 "--outdir", default=None, metavar="DESTINATION", help="Where to write output."
53 @CommandArgument(
54 "--archive",
55 action="store_true",
56 help="Write a gzipped tarball of generated docs.",
58 @CommandArgument(
59 "--no-open",
60 dest="auto_open",
61 default=True,
62 action="store_false",
63 help="Don't automatically open HTML docs in a browser.",
65 @CommandArgument(
66 "--no-serve",
67 dest="serve",
68 default=True,
69 action="store_false",
70 help="Don't serve the generated docs after building.",
72 @CommandArgument(
73 "--http",
74 default="localhost:5500",
75 metavar="ADDRESS",
76 help="Serve documentation on the specified host and port, "
77 'default "localhost:5500".',
79 @CommandArgument("--upload", action="store_true", help="Upload generated files to S3.")
80 @CommandArgument(
81 "-j",
82 "--jobs",
83 default=str(multiprocessing.cpu_count()),
84 dest="jobs",
85 help="Distribute the build over N processes in parallel.",
87 @CommandArgument("--write-url", default=None, help="Write S3 Upload URL to text file")
88 @CommandArgument(
89 "--linkcheck", action="store_true", help="Check if the links are still valid"
91 @CommandArgument(
92 "--dump-trees", default=None, help="Dump the Sphinx trees to specified file."
94 @CommandArgument(
95 "--fatal-warnings",
96 dest="enable_fatal_warnings",
97 action="store_true",
98 help="Enable fatal warnings.",
100 @CommandArgument(
101 "--check-num-warnings",
102 action="store_true",
103 help="Check that the upper bound on the number of warnings is respected.",
105 @CommandArgument("--verbose", action="store_true", help="Run Sphinx in verbose mode")
106 @CommandArgument(
107 "--no-autodoc",
108 action="store_true",
109 help="Disable generating Python/JS API documentation",
111 def build_docs(
112 command_context,
113 path=None,
114 fmt="html",
115 outdir=None,
116 auto_open=True,
117 serve=True,
118 http=None,
119 archive=False,
120 upload=False,
121 jobs=None,
122 write_url=None,
123 linkcheck=None,
124 dump_trees=None,
125 enable_fatal_warnings=False,
126 check_num_warnings=False,
127 verbose=None,
128 no_autodoc=False,
130 # TODO: Bug 1704891 - move the ESLint setup tools to a shared place.
131 import setup_helper
133 setup_helper.set_project_root(command_context.topsrcdir)
135 if not setup_helper.check_node_executables_valid():
136 return 1
138 setup_helper.eslint_maybe_setup()
140 # Set the path so that Sphinx can find jsdoc, unfortunately there isn't
141 # a way to pass this to Sphinx itself at the moment.
142 os.environ["PATH"] = (
143 mozpath.join(command_context.topsrcdir, "node_modules", ".bin")
144 + os.pathsep
145 + _node_path()
146 + os.pathsep
147 + os.environ["PATH"]
150 import webbrowser
152 from livereload import Server
154 from moztreedocs.package import create_tarball
156 unique_id = "%s/%s" % (project(), str(uuid.uuid1()))
158 outdir = outdir or os.path.join(command_context.topobjdir, "docs")
159 savedir = os.path.join(outdir, fmt)
161 if path is None:
162 path = command_context.topsrcdir
163 if os.environ.get("MOZ_AUTOMATION") != "1":
164 print(
165 "\nBuilding the full documentation tree.\n"
166 "Did you mean to only build part of the documentation?\n"
167 "For a faster command, consider running:\n"
168 " ./mach doc path/to/docs\n"
170 path = os.path.normpath(os.path.abspath(path))
172 docdir = _find_doc_dir(path)
173 if not docdir:
174 print(_dump_sphinx_backtrace())
175 return die(
176 "failed to generate documentation:\n"
177 "%s: could not find docs at this location" % path
180 if linkcheck:
181 # We want to verify if the links are valid or not
182 fmt = "linkcheck"
183 if no_autodoc:
184 if check_num_warnings:
185 return die(
186 "'--no-autodoc' flag may not be used with '--check-num-warnings'"
188 toggle_no_autodoc()
190 status, warnings = _run_sphinx(docdir, savedir, fmt=fmt, jobs=jobs, verbose=verbose)
191 if status != 0:
192 print(_dump_sphinx_backtrace())
193 return die(
194 "failed to generate documentation:\n"
195 "%s: sphinx return code %d" % (path, status)
197 else:
198 print("\nGenerated documentation:\n%s" % savedir)
199 msg = ""
201 if enable_fatal_warnings:
202 fatal_warnings = _check_sphinx_fatal_warnings(warnings)
203 if fatal_warnings:
204 msg += f"Error: Got fatal warnings:\n{''.join(fatal_warnings)}"
205 if check_num_warnings:
206 num_new = _check_sphinx_num_warnings(warnings)
207 if num_new:
208 msg += f"Error: {num_new} new warnings"
209 if msg:
210 return die(f"failed to generate documentation:\n {msg}")
212 # Upload the artifact containing the link to S3
213 # This would be used by code-review to post the link to Phabricator
214 if write_url is not None:
215 unique_link = BASE_LINK + unique_id + "/index.html"
216 with open(write_url, "w") as fp:
217 fp.write(unique_link)
218 fp.flush()
219 print("Generated " + write_url)
221 if dump_trees is not None:
222 parent = os.path.dirname(dump_trees)
223 if parent and not os.path.isdir(parent):
224 os.makedirs(parent)
225 with open(dump_trees, "w") as fh:
226 json.dump(manager().trees, fh)
228 if archive:
229 archive_path = os.path.join(outdir, "%s.tar.gz" % project())
230 create_tarball(archive_path, savedir)
231 print("Archived to %s" % archive_path)
233 if upload:
234 _s3_upload(savedir, project(), unique_id, version())
236 if not serve:
237 index_path = os.path.join(savedir, "index.html")
238 if auto_open and os.path.isfile(index_path):
239 webbrowser.open(index_path)
240 return
242 # Create livereload server. Any files modified in the specified docdir
243 # will cause a re-build and refresh of the browser (if open).
244 try:
245 host, port = http.split(":", 1)
246 port = int(port)
247 except ValueError:
248 return die("invalid address: %s" % http)
250 server = Server()
252 sphinx_trees = manager().trees or {savedir: docdir}
253 for _, src in sphinx_trees.items():
254 run_sphinx = partial(
255 _run_sphinx, src, savedir, fmt=fmt, jobs=jobs, verbose=verbose
257 server.watch(src, run_sphinx)
258 server.serve(
259 host=host,
260 port=port,
261 root=savedir,
262 open_url_delay=0.1 if auto_open else None,
266 def _dump_sphinx_backtrace():
268 If there is a sphinx dump file, read and return
269 its content.
270 By default, it isn't displayed.
272 pattern = "sphinx-err-*"
273 output = ""
274 tmpdir = "/tmp"
276 if not os.path.isdir(tmpdir):
277 # Only run it on Linux
278 return
279 files = os.listdir(tmpdir)
280 for name in files:
281 if fnmatch.fnmatch(name, pattern):
282 pathFile = os.path.join(tmpdir, name)
283 stat = os.stat(pathFile)
284 output += "Name: {0} / Creation date: {1}\n".format(
285 pathFile, time.ctime(stat.st_mtime)
287 with open(pathFile) as f:
288 output += f.read()
289 return output
292 def _run_sphinx(docdir, savedir, config=None, fmt="html", jobs=None, verbose=None):
293 import sphinx.cmd.build
295 config = config or manager().conf_py_path
296 # When running sphinx with sentry, it adds significant overhead
297 # and makes the build generation very very very slow
298 # So, disable it to generate the doc faster
299 sentry_sdk.init(None)
300 warn_fd, warn_path = tempfile.mkstemp()
301 os.close(warn_fd)
302 try:
303 args = [
304 "-T",
305 "-b",
306 fmt,
307 "-c",
308 os.path.dirname(config),
309 "-w",
310 warn_path,
311 docdir,
312 savedir,
314 if jobs:
315 args.extend(["-j", jobs])
316 if verbose:
317 args.extend(["-v", "-v"])
318 print("Run sphinx with:")
319 print(args)
320 status = sphinx.cmd.build.build_main(args)
321 with open(warn_path) as warn_file:
322 warnings = warn_file.readlines()
323 return status, warnings
324 finally:
325 try:
326 os.unlink(warn_path)
327 except Exception as ex:
328 print(ex)
331 def _check_sphinx_fatal_warnings(warnings):
332 with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
333 fatal_warnings_src = yaml.safe_load(fh)["fatal warnings"]
334 fatal_warnings_regex = [re.compile(item) for item in fatal_warnings_src]
335 fatal_warnings = []
336 for warning in warnings:
337 if any(item.search(warning) for item in fatal_warnings_regex):
338 fatal_warnings.append(warning)
339 return fatal_warnings
342 def _check_sphinx_num_warnings(warnings):
343 # warnings file contains other strings as well
344 num_warnings = len([w for w in warnings if "WARNING" in w])
345 with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
346 max_num = yaml.safe_load(fh)["max_num_warnings"]
347 if num_warnings > max_num:
348 return num_warnings - max_num
349 return None
352 def manager():
353 from moztreedocs import manager
355 return manager
358 def toggle_no_autodoc():
359 import moztreedocs
361 moztreedocs._SphinxManager.NO_AUTODOC = True
364 @memoize
365 def _read_project_properties():
366 import imp
368 path = os.path.normpath(manager().conf_py_path)
369 with open(path, "r") as fh:
370 conf = imp.load_module("doc_conf", fh, path, (".py", "r", imp.PY_SOURCE))
372 # Prefer the Mozilla project name, falling back to Sphinx's
373 # default variable if it isn't defined.
374 project = getattr(conf, "moz_project_name", None)
375 if not project:
376 project = conf.project.replace(" ", "_")
378 return {"project": project, "version": getattr(conf, "version", None)}
381 def project():
382 return _read_project_properties()["project"]
385 def version():
386 return _read_project_properties()["version"]
389 def _node_path():
390 from mozbuild.nodeutil import find_node_executable
392 node, _ = find_node_executable()
394 return os.path.dirname(node)
397 def _find_doc_dir(path):
398 if os.path.isfile(path):
399 return
401 valid_doc_dirs = ("doc", "docs")
402 for d in valid_doc_dirs:
403 p = os.path.join(path, d)
404 if os.path.isdir(p):
405 path = p
407 for index_file in ["index.rst", "index.md"]:
408 if os.path.exists(os.path.join(path, index_file)):
409 return path
412 def _s3_upload(root, project, unique_id, version=None):
413 # Workaround the issue
414 # BlockingIOError: [Errno 11] write could not complete without blocking
415 # https://github.com/travis-ci/travis-ci/issues/8920
416 import fcntl
418 from moztreedocs.package import distribution_files
419 from moztreedocs.upload import s3_set_redirects, s3_upload
421 fcntl.fcntl(1, fcntl.F_SETFL, 0)
423 # Files are uploaded to multiple locations:
425 # <project>/latest
426 # <project>/<version>
428 # This allows multiple projects and versions to be stored in the
429 # S3 bucket.
431 files = list(distribution_files(root))
432 key_prefixes = []
433 if version:
434 key_prefixes.append("%s/%s" % (project, version))
436 # Until we redirect / to main/latest, upload the main docs
437 # to the root.
438 if project == "main":
439 key_prefixes.append("")
441 key_prefixes.append(unique_id)
443 with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
444 redirects = yaml.safe_load(fh)["redirects"]
446 redirects = {k.strip("/"): v.strip("/") for k, v in redirects.items()}
448 all_redirects = {}
450 for prefix in key_prefixes:
451 s3_upload(files, prefix)
453 # Don't setup redirects for the "version" or "uuid" prefixes since
454 # we are exceeding a 50 redirect limit and external things are
455 # unlikely to link there anyway (see bug 1614908).
456 if (version and prefix.endswith(version)) or prefix == unique_id:
457 continue
459 if prefix:
460 prefix += "/"
461 all_redirects.update({prefix + k: prefix + v for k, v in redirects.items()})
463 print("Redirects currently staged")
464 pprint(all_redirects, indent=1)
466 s3_set_redirects(all_redirects)
468 unique_link = BASE_LINK + unique_id + "/index.html"
469 print("Uploaded documentation can be accessed here " + unique_link)
472 @SubCommand(
473 "doc",
474 "mach-telemetry",
475 description="Generate documentation from Glean metrics.yaml files",
477 def generate_telemetry_docs(command_context):
478 args = [
479 sys.executable,
480 "-m" "glean_parser",
481 "translate",
482 "-f",
483 "markdown",
484 "-o",
485 os.path.join(topsrcdir, "python/mach/docs/"),
486 os.path.join(topsrcdir, "python/mach/pings.yaml"),
487 os.path.join(topsrcdir, "python/mach/metrics.yaml"),
489 metrics_paths = [
490 handler.metrics_path
491 for handler in Registrar.command_handlers.values()
492 if handler.metrics_path is not None
494 args.extend(
495 [os.path.join(command_context.topsrcdir, path) for path in set(metrics_paths)]
497 subprocess.check_call(args)
500 @SubCommand(
501 "doc",
502 "show-targets",
503 description="List all reference targets. Requires the docs to have been built.",
505 @CommandArgument(
506 "--format", default="html", dest="fmt", help="Documentation format used."
508 @CommandArgument(
509 "--outdir", default=None, metavar="DESTINATION", help="Where output was written."
511 def show_reference_targets(command_context, fmt="html", outdir=None):
512 command_context.activate_virtualenv()
513 command_context.virtualenv_manager.install_pip_requirements(
514 os.path.join(here, "requirements.txt")
517 import sphinx.ext.intersphinx
519 outdir = outdir or os.path.join(command_context.topobjdir, "docs")
520 inv_path = os.path.join(outdir, fmt, "objects.inv")
522 if not os.path.exists(inv_path):
523 return die(
524 "object inventory not found: {inv_path}.\n"
525 "Rebuild the docs and rerun this command"
527 sphinx.ext.intersphinx.inspect_main([inv_path])
530 def die(msg, exit_code=1):
531 msg = "%s: %s" % (sys.argv[0], msg)
532 print(msg, file=sys.stderr)
533 return exit_code