Bug 1858509 add thread-safety annotations around MediaSourceDemuxer::mMonitor r=alwu
[gecko.git] / tools / moztreedocs / mach_commands.py
blobad63ae0e44986b2e4ac187182b287d5e444e0496
1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
5 import fnmatch
6 import json
7 import multiprocessing
8 import os
9 import re
10 import subprocess
11 import sys
12 import tempfile
13 import time
14 import uuid
15 from functools import partial
16 from pprint import pprint
18 import mozpack.path as mozpath
19 import sentry_sdk
20 import yaml
21 from mach.decorators import Command, CommandArgument, SubCommand
22 from mach.registrar import Registrar
23 from mozbuild.util import memoize
24 from mozfile import load_source
26 here = os.path.abspath(os.path.dirname(__file__))
27 topsrcdir = os.path.abspath(os.path.dirname(os.path.dirname(here)))
28 DOC_ROOT = os.path.join(topsrcdir, "docs")
29 BASE_LINK = "http://gecko-docs.mozilla.org-l1.s3-website.us-west-2.amazonaws.com/"
32 # Helps manage in-tree documentation.
35 @Command(
36 "doc",
37 category="devenv",
38 virtualenv_name="docs",
39 description="Generate and serve documentation from the tree.",
41 @CommandArgument(
42 "path",
43 default=None,
44 metavar="DIRECTORY",
45 nargs="?",
46 help="Path to documentation to build and display.",
48 @CommandArgument(
49 "--format", default="html", dest="fmt", help="Documentation format to write."
51 @CommandArgument(
52 "--outdir", default=None, metavar="DESTINATION", help="Where to write output."
54 @CommandArgument(
55 "--archive",
56 action="store_true",
57 help="Write a gzipped tarball of generated docs.",
59 @CommandArgument(
60 "--no-open",
61 dest="auto_open",
62 default=True,
63 action="store_false",
64 help="Don't automatically open HTML docs in a browser.",
66 @CommandArgument(
67 "--no-serve",
68 dest="serve",
69 default=True,
70 action="store_false",
71 help="Don't serve the generated docs after building.",
73 @CommandArgument(
74 "--http",
75 default="localhost:5500",
76 metavar="ADDRESS",
77 help="Serve documentation on the specified host and port, "
78 'default "localhost:5500".',
80 @CommandArgument("--upload", action="store_true", help="Upload generated files to S3.")
81 @CommandArgument(
82 "-j",
83 "--jobs",
84 default=str(multiprocessing.cpu_count()),
85 dest="jobs",
86 help="Distribute the build over N processes in parallel.",
88 @CommandArgument("--write-url", default=None, help="Write S3 Upload URL to text file")
89 @CommandArgument(
90 "--linkcheck", action="store_true", help="Check if the links are still valid"
92 @CommandArgument(
93 "--dump-trees", default=None, help="Dump the Sphinx trees to specified file."
95 @CommandArgument(
96 "--fatal-warnings",
97 dest="enable_fatal_warnings",
98 action="store_true",
99 help="Enable fatal warnings.",
101 @CommandArgument(
102 "--check-num-warnings",
103 action="store_true",
104 help="Check that the upper bound on the number of warnings is respected.",
106 @CommandArgument("--verbose", action="store_true", help="Run Sphinx in verbose mode")
107 @CommandArgument(
108 "--no-autodoc",
109 action="store_true",
110 help="Disable generating Python/JS API documentation",
112 def build_docs(
113 command_context,
114 path=None,
115 fmt="html",
116 outdir=None,
117 auto_open=True,
118 serve=True,
119 http=None,
120 archive=False,
121 upload=False,
122 jobs=None,
123 write_url=None,
124 linkcheck=None,
125 dump_trees=None,
126 enable_fatal_warnings=False,
127 check_num_warnings=False,
128 verbose=None,
129 no_autodoc=False,
131 # TODO: Bug 1704891 - move the ESLint setup tools to a shared place.
132 import setup_helper
134 setup_helper.set_project_root(command_context.topsrcdir)
136 if not setup_helper.check_node_executables_valid():
137 return 1
139 setup_helper.eslint_maybe_setup()
141 # Set the path so that Sphinx can find jsdoc, unfortunately there isn't
142 # a way to pass this to Sphinx itself at the moment.
143 os.environ["PATH"] = (
144 mozpath.join(command_context.topsrcdir, "node_modules", ".bin")
145 + os.pathsep
146 + _node_path()
147 + os.pathsep
148 + os.environ["PATH"]
151 import webbrowser
153 from livereload import Server
155 from moztreedocs.package import create_tarball
157 unique_id = "%s/%s" % (project(), str(uuid.uuid1()))
159 outdir = outdir or os.path.join(command_context.topobjdir, "docs")
160 savedir = os.path.join(outdir, fmt)
162 if path is None:
163 path = command_context.topsrcdir
164 if os.environ.get("MOZ_AUTOMATION") != "1":
165 print(
166 "\nBuilding the full documentation tree.\n"
167 "Did you mean to only build part of the documentation?\n"
168 "For a faster command, consider running:\n"
169 " ./mach doc path/to/docs\n"
171 path = os.path.normpath(os.path.abspath(path))
173 docdir = _find_doc_dir(path)
174 if not docdir:
175 print(_dump_sphinx_backtrace())
176 return die(
177 "failed to generate documentation:\n"
178 "%s: could not find docs at this location" % path
181 if linkcheck:
182 # We want to verify if the links are valid or not
183 fmt = "linkcheck"
184 if no_autodoc:
185 if check_num_warnings:
186 return die(
187 "'--no-autodoc' flag may not be used with '--check-num-warnings'"
189 toggle_no_autodoc()
191 status, warnings = _run_sphinx(docdir, savedir, fmt=fmt, jobs=jobs, verbose=verbose)
192 if status != 0:
193 print(_dump_sphinx_backtrace())
194 return die(
195 "failed to generate documentation:\n"
196 "%s: sphinx return code %d" % (path, status)
198 else:
199 print("\nGenerated documentation:\n%s" % savedir)
200 msg = ""
202 if enable_fatal_warnings:
203 fatal_warnings = _check_sphinx_fatal_warnings(warnings)
204 if fatal_warnings:
205 msg += f"Error: Got fatal warnings:\n{''.join(fatal_warnings)}"
206 if check_num_warnings:
207 [num_new, num_actual] = _check_sphinx_num_warnings(warnings)
208 print("Logged %s warnings\n" % num_actual)
209 if num_new:
210 msg += f"Error: {num_new} new warnings have been introduced compared to the limit in docs/config.yml"
211 if msg:
212 return dieWithTestFailure(msg)
214 # Upload the artifact containing the link to S3
215 # This would be used by code-review to post the link to Phabricator
216 if write_url is not None:
217 unique_link = BASE_LINK + unique_id + "/index.html"
218 with open(write_url, "w") as fp:
219 fp.write(unique_link)
220 fp.flush()
221 print("Generated " + write_url)
223 if dump_trees is not None:
224 parent = os.path.dirname(dump_trees)
225 if parent and not os.path.isdir(parent):
226 os.makedirs(parent)
227 with open(dump_trees, "w") as fh:
228 json.dump(manager().trees, fh)
230 if archive:
231 archive_path = os.path.join(outdir, "%s.tar.gz" % project())
232 create_tarball(archive_path, savedir)
233 print("Archived to %s" % archive_path)
235 if upload:
236 _s3_upload(savedir, project(), unique_id, version())
238 if not serve:
239 index_path = os.path.join(savedir, "index.html")
240 if auto_open and os.path.isfile(index_path):
241 webbrowser.open(index_path)
242 return
244 # Create livereload server. Any files modified in the specified docdir
245 # will cause a re-build and refresh of the browser (if open).
246 try:
247 host, port = http.split(":", 1)
248 port = int(port)
249 except ValueError:
250 return die("invalid address: %s" % http)
252 server = Server()
254 sphinx_trees = manager().trees or {savedir: docdir}
255 for _, src in sphinx_trees.items():
256 run_sphinx = partial(
257 _run_sphinx, src, savedir, fmt=fmt, jobs=jobs, verbose=verbose
259 server.watch(src, run_sphinx)
260 server.serve(
261 host=host,
262 port=port,
263 root=savedir,
264 open_url_delay=0.1 if auto_open else None,
268 def _dump_sphinx_backtrace():
270 If there is a sphinx dump file, read and return
271 its content.
272 By default, it isn't displayed.
274 pattern = "sphinx-err-*"
275 output = ""
276 tmpdir = "/tmp"
278 if not os.path.isdir(tmpdir):
279 # Only run it on Linux
280 return
281 files = os.listdir(tmpdir)
282 for name in files:
283 if fnmatch.fnmatch(name, pattern):
284 pathFile = os.path.join(tmpdir, name)
285 stat = os.stat(pathFile)
286 output += "Name: {0} / Creation date: {1}\n".format(
287 pathFile, time.ctime(stat.st_mtime)
289 with open(pathFile) as f:
290 output += f.read()
291 return output
294 def _run_sphinx(docdir, savedir, config=None, fmt="html", jobs=None, verbose=None):
295 import sphinx.cmd.build
297 config = config or manager().conf_py_path
298 # When running sphinx with sentry, it adds significant overhead
299 # and makes the build generation very very very slow
300 # So, disable it to generate the doc faster
301 sentry_sdk.init(None)
302 warn_fd, warn_path = tempfile.mkstemp()
303 os.close(warn_fd)
304 try:
305 args = [
306 "-T",
307 "-b",
308 fmt,
309 "-c",
310 os.path.dirname(config),
311 "-w",
312 warn_path,
313 docdir,
314 savedir,
316 if jobs:
317 args.extend(["-j", jobs])
318 if verbose:
319 args.extend(["-v", "-v"])
320 print("Run sphinx with:")
321 print(args)
322 status = sphinx.cmd.build.build_main(args)
323 with open(warn_path) as warn_file:
324 warnings = warn_file.readlines()
325 return status, warnings
326 finally:
327 try:
328 os.unlink(warn_path)
329 except Exception as ex:
330 print(ex)
333 def _check_sphinx_fatal_warnings(warnings):
334 with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
335 fatal_warnings_src = yaml.safe_load(fh)["fatal warnings"]
336 fatal_warnings_regex = [re.compile(item) for item in fatal_warnings_src]
337 fatal_warnings = []
338 for warning in warnings:
339 if any(item.search(warning) for item in fatal_warnings_regex):
340 fatal_warnings.append(warning)
341 return fatal_warnings
344 def _check_sphinx_num_warnings(warnings):
345 # warnings file contains other strings as well
346 num_warnings = len([w for w in warnings if "WARNING" in w])
347 with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
348 max_num = yaml.safe_load(fh)["max_num_warnings"]
349 if num_warnings > max_num:
350 return [num_warnings - max_num, num_warnings]
351 return [0, num_warnings]
354 def manager():
355 from moztreedocs import manager
357 return manager
360 def toggle_no_autodoc():
361 import moztreedocs
363 moztreedocs._SphinxManager.NO_AUTODOC = True
366 @memoize
367 def _read_project_properties():
368 path = os.path.normpath(manager().conf_py_path)
369 conf = load_source("doc_conf", path)
371 # Prefer the Mozilla project name, falling back to Sphinx's
372 # default variable if it isn't defined.
373 project = getattr(conf, "moz_project_name", None)
374 if not project:
375 project = conf.project.replace(" ", "_")
377 return {"project": project, "version": getattr(conf, "version", None)}
380 def project():
381 return _read_project_properties()["project"]
384 def version():
385 return _read_project_properties()["version"]
388 def _node_path():
389 from mozbuild.nodeutil import find_node_executable
391 node, _ = find_node_executable()
393 return os.path.dirname(node)
396 def _find_doc_dir(path):
397 if os.path.isfile(path):
398 return
400 valid_doc_dirs = ("doc", "docs")
401 for d in valid_doc_dirs:
402 p = os.path.join(path, d)
403 if os.path.isdir(p):
404 path = p
406 for index_file in ["index.rst", "index.md"]:
407 if os.path.exists(os.path.join(path, index_file)):
408 return path
411 def _s3_upload(root, project, unique_id, version=None):
412 # Workaround the issue
413 # BlockingIOError: [Errno 11] write could not complete without blocking
414 # https://github.com/travis-ci/travis-ci/issues/8920
415 import fcntl
417 from moztreedocs.package import distribution_files
418 from moztreedocs.upload import s3_set_redirects, s3_upload
420 fcntl.fcntl(1, fcntl.F_SETFL, 0)
422 # Files are uploaded to multiple locations:
424 # <project>/latest
425 # <project>/<version>
427 # This allows multiple projects and versions to be stored in the
428 # S3 bucket.
430 files = list(distribution_files(root))
431 key_prefixes = []
432 if version:
433 key_prefixes.append("%s/%s" % (project, version))
435 # Until we redirect / to main/latest, upload the main docs
436 # to the root.
437 if project == "main":
438 key_prefixes.append("")
440 key_prefixes.append(unique_id)
442 with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
443 redirects = yaml.safe_load(fh)["redirects"]
445 redirects = {k.strip("/"): v.strip("/") for k, v in redirects.items()}
447 all_redirects = {}
449 for prefix in key_prefixes:
450 s3_upload(files, prefix)
452 # Don't setup redirects for the "version" or "uuid" prefixes since
453 # we are exceeding a 50 redirect limit and external things are
454 # unlikely to link there anyway (see bug 1614908).
455 if (version and prefix.endswith(version)) or prefix == unique_id:
456 continue
458 if prefix:
459 prefix += "/"
460 all_redirects.update({prefix + k: prefix + v for k, v in redirects.items()})
462 print("Redirects currently staged")
463 pprint(all_redirects, indent=1)
465 s3_set_redirects(all_redirects)
467 unique_link = BASE_LINK + unique_id + "/index.html"
468 print("Uploaded documentation can be accessed here " + unique_link)
471 @SubCommand(
472 "doc",
473 "mach-telemetry",
474 description="Generate documentation from Glean metrics.yaml files",
476 def generate_telemetry_docs(command_context):
477 args = [
478 sys.executable,
479 "-m" "glean_parser",
480 "translate",
481 "-f",
482 "markdown",
483 "-o",
484 os.path.join(topsrcdir, "python/mach/docs/"),
485 os.path.join(topsrcdir, "python/mach/pings.yaml"),
486 os.path.join(topsrcdir, "python/mach/metrics.yaml"),
488 metrics_paths = [
489 handler.metrics_path
490 for handler in Registrar.command_handlers.values()
491 if handler.metrics_path is not None
493 args.extend(
494 [os.path.join(command_context.topsrcdir, path) for path in set(metrics_paths)]
496 subprocess.check_call(args)
499 @SubCommand(
500 "doc",
501 "show-targets",
502 description="List all reference targets. Requires the docs to have been built.",
504 @CommandArgument(
505 "--format", default="html", dest="fmt", help="Documentation format used."
507 @CommandArgument(
508 "--outdir", default=None, metavar="DESTINATION", help="Where output was written."
510 def show_reference_targets(command_context, fmt="html", outdir=None):
511 command_context.activate_virtualenv()
512 command_context.virtualenv_manager.install_pip_requirements(
513 os.path.join(here, "requirements.txt")
516 import sphinx.ext.intersphinx
518 outdir = outdir or os.path.join(command_context.topobjdir, "docs")
519 inv_path = os.path.join(outdir, fmt, "objects.inv")
521 if not os.path.exists(inv_path):
522 return die(
523 "object inventory not found: {inv_path}.\n"
524 "Rebuild the docs and rerun this command"
526 sphinx.ext.intersphinx.inspect_main([inv_path])
529 def die(msg, exit_code=1):
530 msg = "%s %s: %s" % (sys.argv[0], sys.argv[1], msg)
531 print(msg, file=sys.stderr)
532 return exit_code
535 def dieWithTestFailure(msg, exit_code=1):
536 for m in msg.split("\n"):
537 msg = "TEST-UNEXPECTED-FAILURE | %s %s | %s" % (sys.argv[0], sys.argv[1], m)
538 print(msg, file=sys.stderr)
539 return exit_code