Bug 1900094 - Add telemetry for impressions missing due to domain-to-categories map...
[gecko.git] / tools / moztreedocs / mach_commands.py
blobab7eb6888356e91a4f07bcb90a5b285092e9f216
1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
5 import fnmatch
6 import json
7 import os
8 import re
9 import subprocess
10 import sys
11 import tempfile
12 import time
13 import uuid
14 from functools import partial
15 from pprint import pprint
17 import mozpack.path as mozpath
18 import sentry_sdk
19 import yaml
20 from mach.decorators import Command, CommandArgument, SubCommand
21 from mach.registrar import Registrar
22 from mozbuild.util import cpu_count, memoize
23 from mozfile import load_source
25 here = os.path.abspath(os.path.dirname(__file__))
26 topsrcdir = os.path.abspath(os.path.dirname(os.path.dirname(here)))
27 DOC_ROOT = os.path.join(topsrcdir, "docs")
28 BASE_LINK = "http://gecko-docs.mozilla.org-l1.s3-website.us-west-2.amazonaws.com/"
31 # Helps manage in-tree documentation.
34 @Command(
35 "doc",
36 category="devenv",
37 virtualenv_name="docs",
38 description="Generate and serve documentation from the tree.",
40 @CommandArgument(
41 "path",
42 default=None,
43 metavar="DIRECTORY",
44 nargs="?",
45 help="Path to documentation to build and display.",
47 @CommandArgument(
48 "--format", default="html", dest="fmt", help="Documentation format to write."
50 @CommandArgument(
51 "--outdir", default=None, metavar="DESTINATION", help="Where to write output."
53 @CommandArgument(
54 "--archive",
55 action="store_true",
56 help="Write a gzipped tarball of generated docs.",
58 @CommandArgument(
59 "--no-open",
60 dest="auto_open",
61 default=True,
62 action="store_false",
63 help="Don't automatically open HTML docs in a browser.",
65 @CommandArgument(
66 "--no-serve",
67 dest="serve",
68 default=True,
69 action="store_false",
70 help="Don't serve the generated docs after building.",
72 @CommandArgument(
73 "--http",
74 default="localhost:5500",
75 metavar="ADDRESS",
76 help="Serve documentation on the specified host and port, "
77 'default "localhost:5500".',
79 @CommandArgument("--upload", action="store_true", help="Upload generated files to S3.")
80 @CommandArgument(
81 "-j",
82 "--jobs",
83 default=str(cpu_count()),
84 dest="jobs",
85 help="Distribute the build over N processes in parallel.",
87 @CommandArgument("--write-url", default=None, help="Write S3 Upload URL to text file")
88 @CommandArgument(
89 "--linkcheck", action="store_true", help="Check if the links are still valid"
91 @CommandArgument(
92 "--dump-trees", default=None, help="Dump the Sphinx trees to specified file."
94 @CommandArgument(
95 "--fatal-warnings",
96 dest="enable_fatal_warnings",
97 action="store_true",
98 help="Enable fatal warnings.",
100 @CommandArgument(
101 "--check-num-warnings",
102 action="store_true",
103 help="Check that the upper bound on the number of warnings is respected.",
105 @CommandArgument("--verbose", action="store_true", help="Run Sphinx in verbose mode")
106 @CommandArgument(
107 "--no-autodoc",
108 action="store_true",
109 help="Disable generating Python/JS API documentation",
111 def build_docs(
112 command_context,
113 path=None,
114 fmt="html",
115 outdir=None,
116 auto_open=True,
117 serve=True,
118 http=None,
119 archive=False,
120 upload=False,
121 jobs=None,
122 write_url=None,
123 linkcheck=None,
124 dump_trees=None,
125 enable_fatal_warnings=False,
126 check_num_warnings=False,
127 verbose=None,
128 no_autodoc=False,
130 # TODO: Bug 1704891 - move the ESLint setup tools to a shared place.
131 import setup_helper
133 setup_helper.set_project_root(command_context.topsrcdir)
135 if not setup_helper.check_node_executables_valid():
136 return 1
138 setup_helper.eslint_maybe_setup()
140 # Set the path so that Sphinx can find jsdoc, unfortunately there isn't
141 # a way to pass this to Sphinx itself at the moment.
142 os.environ["PATH"] = (
143 mozpath.join(command_context.topsrcdir, "node_modules", ".bin")
144 + os.pathsep
145 + _node_path()
146 + os.pathsep
147 + os.environ["PATH"]
150 import webbrowser
152 from livereload import Server
154 from moztreedocs.package import create_tarball
156 unique_id = "%s/%s" % (project(), str(uuid.uuid1()))
158 outdir = outdir or os.path.join(command_context.topobjdir, "docs")
159 savedir = os.path.join(outdir, fmt)
161 if path is None:
162 path = command_context.topsrcdir
163 if os.environ.get("MOZ_AUTOMATION") != "1":
164 print(
165 "\nBuilding the full documentation tree.\n"
166 "Did you mean to only build part of the documentation?\n"
167 "For a faster command, consider running:\n"
168 " ./mach doc path/to/docs\n"
170 path = os.path.normpath(os.path.abspath(path))
172 docdir = _find_doc_dir(path)
173 if not docdir:
174 print(_dump_sphinx_backtrace())
175 return die(
176 "failed to generate documentation:\n"
177 "%s: could not find docs at this location" % path
180 if linkcheck:
181 # We want to verify if the links are valid or not
182 fmt = "linkcheck"
183 if no_autodoc:
184 if check_num_warnings:
185 return die(
186 "'--no-autodoc' flag may not be used with '--check-num-warnings'"
188 toggle_no_autodoc()
190 status, warnings = _run_sphinx(docdir, savedir, fmt=fmt, jobs=jobs, verbose=verbose)
191 if status != 0:
192 print(_dump_sphinx_backtrace())
193 return die(
194 "failed to generate documentation:\n"
195 "%s: sphinx return code %d" % (path, status)
197 else:
198 print("\nGenerated documentation:\n%s" % savedir)
199 msg = ""
201 if enable_fatal_warnings:
202 fatal_warnings = _check_sphinx_fatal_warnings(warnings)
203 if fatal_warnings:
204 msg += f"Error: Got fatal warnings:\n{''.join(fatal_warnings)}"
205 if check_num_warnings:
206 [num_new, num_actual] = _check_sphinx_num_warnings(warnings)
207 print("Logged %s warnings\n" % num_actual)
208 if num_new:
209 msg += f"Error: {num_new} new warnings have been introduced compared to the limit in docs/config.yml"
210 if msg:
211 return dieWithTestFailure(msg)
213 # Upload the artifact containing the link to S3
214 # This would be used by code-review to post the link to Phabricator
215 if write_url is not None:
216 unique_link = BASE_LINK + unique_id + "/index.html"
217 with open(write_url, "w") as fp:
218 fp.write(unique_link)
219 fp.flush()
220 print("Generated " + write_url)
222 if dump_trees is not None:
223 parent = os.path.dirname(dump_trees)
224 if parent and not os.path.isdir(parent):
225 os.makedirs(parent)
226 with open(dump_trees, "w") as fh:
227 json.dump(manager().trees, fh)
229 if archive:
230 archive_path = os.path.join(outdir, "%s.tar.gz" % project())
231 create_tarball(archive_path, savedir)
232 print("Archived to %s" % archive_path)
234 if upload:
235 _s3_upload(savedir, project(), unique_id, version())
237 if not serve:
238 index_path = os.path.join(savedir, "index.html")
239 if auto_open and os.path.isfile(index_path):
240 webbrowser.open(index_path)
241 return
243 # Create livereload server. Any files modified in the specified docdir
244 # will cause a re-build and refresh of the browser (if open).
245 try:
246 host, port = http.split(":", 1)
247 port = int(port)
248 except ValueError:
249 return die("invalid address: %s" % http)
251 server = Server()
253 sphinx_trees = manager().trees or {savedir: docdir}
254 for _, src in sphinx_trees.items():
255 run_sphinx = partial(
256 _run_sphinx, src, savedir, fmt=fmt, jobs=jobs, verbose=verbose
258 server.watch(src, run_sphinx)
259 server.serve(
260 host=host,
261 port=port,
262 root=savedir,
263 open_url_delay=0.1 if auto_open else None,
267 def _dump_sphinx_backtrace():
269 If there is a sphinx dump file, read and return
270 its content.
271 By default, it isn't displayed.
273 pattern = "sphinx-err-*"
274 output = ""
275 tmpdir = "/tmp"
277 if not os.path.isdir(tmpdir):
278 # Only run it on Linux
279 return
280 files = os.listdir(tmpdir)
281 for name in files:
282 if fnmatch.fnmatch(name, pattern):
283 pathFile = os.path.join(tmpdir, name)
284 stat = os.stat(pathFile)
285 output += "Name: {0} / Creation date: {1}\n".format(
286 pathFile, time.ctime(stat.st_mtime)
288 with open(pathFile) as f:
289 output += f.read()
290 return output
293 def _run_sphinx(docdir, savedir, config=None, fmt="html", jobs=None, verbose=None):
294 import sphinx.cmd.build
296 config = config or manager().conf_py_path
297 # When running sphinx with sentry, it adds significant overhead
298 # and makes the build generation very very very slow
299 # So, disable it to generate the doc faster
300 sentry_sdk.init(None)
301 warn_fd, warn_path = tempfile.mkstemp()
302 os.close(warn_fd)
303 try:
304 args = [
305 "-T",
306 "-b",
307 fmt,
308 "-c",
309 os.path.dirname(config),
310 "-w",
311 warn_path,
312 docdir,
313 savedir,
315 if jobs:
316 args.extend(["-j", jobs])
317 if verbose:
318 args.extend(["-v", "-v"])
319 print("Run sphinx with:")
320 print(args)
321 status = sphinx.cmd.build.build_main(args)
322 with open(warn_path) as warn_file:
323 warnings = warn_file.readlines()
324 return status, warnings
325 finally:
326 try:
327 os.unlink(warn_path)
328 except Exception as ex:
329 print(ex)
332 def _check_sphinx_fatal_warnings(warnings):
333 with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
334 fatal_warnings_src = yaml.safe_load(fh)["fatal warnings"]
335 fatal_warnings_regex = [re.compile(item) for item in fatal_warnings_src]
336 fatal_warnings = []
337 for warning in warnings:
338 if any(item.search(warning) for item in fatal_warnings_regex):
339 fatal_warnings.append(warning)
340 return fatal_warnings
343 def _check_sphinx_num_warnings(warnings):
344 # warnings file contains other strings as well
345 num_warnings = len([w for w in warnings if "WARNING" in w])
346 with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
347 max_num = yaml.safe_load(fh)["max_num_warnings"]
348 if num_warnings > max_num:
349 return [num_warnings - max_num, num_warnings]
350 return [0, num_warnings]
353 def manager():
354 from moztreedocs import manager
356 return manager
359 def toggle_no_autodoc():
360 import moztreedocs
362 moztreedocs._SphinxManager.NO_AUTODOC = True
365 @memoize
366 def _read_project_properties():
367 path = os.path.normpath(manager().conf_py_path)
368 conf = load_source("doc_conf", path)
370 # Prefer the Mozilla project name, falling back to Sphinx's
371 # default variable if it isn't defined.
372 project = getattr(conf, "moz_project_name", None)
373 if not project:
374 project = conf.project.replace(" ", "_")
376 return {"project": project, "version": getattr(conf, "version", None)}
379 def project():
380 return _read_project_properties()["project"]
383 def version():
384 return _read_project_properties()["version"]
387 def _node_path():
388 from mozbuild.nodeutil import find_node_executable
390 node, _ = find_node_executable()
392 return os.path.dirname(node)
395 def _find_doc_dir(path):
396 if os.path.isfile(path):
397 return
399 valid_doc_dirs = ("doc", "docs")
400 for d in valid_doc_dirs:
401 p = os.path.join(path, d)
402 if os.path.isdir(p):
403 path = p
405 for index_file in ["index.rst", "index.md"]:
406 if os.path.exists(os.path.join(path, index_file)):
407 return path
410 def _s3_upload(root, project, unique_id, version=None):
411 # Workaround the issue
412 # BlockingIOError: [Errno 11] write could not complete without blocking
413 # https://github.com/travis-ci/travis-ci/issues/8920
414 import fcntl
416 from moztreedocs.package import distribution_files
417 from moztreedocs.upload import s3_set_redirects, s3_upload
419 fcntl.fcntl(1, fcntl.F_SETFL, 0)
421 # Files are uploaded to multiple locations:
423 # <project>/latest
424 # <project>/<version>
426 # This allows multiple projects and versions to be stored in the
427 # S3 bucket.
429 files = list(distribution_files(root))
430 key_prefixes = []
431 if version:
432 key_prefixes.append("%s/%s" % (project, version))
434 # Until we redirect / to main/latest, upload the main docs
435 # to the root.
436 if project == "main":
437 key_prefixes.append("")
439 key_prefixes.append(unique_id)
441 with open(os.path.join(DOC_ROOT, "config.yml"), "r") as fh:
442 redirects = yaml.safe_load(fh)["redirects"]
444 redirects = {k.strip("/"): v.strip("/") for k, v in redirects.items()}
446 all_redirects = {}
448 for prefix in key_prefixes:
449 s3_upload(files, prefix)
451 # Don't setup redirects for the "version" or "uuid" prefixes since
452 # we are exceeding a 50 redirect limit and external things are
453 # unlikely to link there anyway (see bug 1614908).
454 if (version and prefix.endswith(version)) or prefix == unique_id:
455 continue
457 if prefix:
458 prefix += "/"
459 all_redirects.update({prefix + k: prefix + v for k, v in redirects.items()})
461 print("Redirects currently staged")
462 pprint(all_redirects, indent=1)
464 s3_set_redirects(all_redirects)
466 unique_link = BASE_LINK + unique_id + "/index.html"
467 print("Uploaded documentation can be accessed here " + unique_link)
470 @SubCommand(
471 "doc",
472 "mach-telemetry",
473 description="Generate documentation from Glean metrics.yaml files",
475 def generate_telemetry_docs(command_context):
476 args = [
477 sys.executable,
478 "-m" "glean_parser",
479 "translate",
480 "-f",
481 "markdown",
482 "-o",
483 os.path.join(topsrcdir, "python/mach/docs/"),
484 os.path.join(topsrcdir, "python/mach/pings.yaml"),
485 os.path.join(topsrcdir, "python/mach/metrics.yaml"),
486 os.path.join(topsrcdir, "python/mozbuild/metrics.yaml"),
488 metrics_paths = [
489 handler.metrics_path
490 for handler in Registrar.command_handlers.values()
491 if handler.metrics_path is not None
493 args.extend(
494 [os.path.join(command_context.topsrcdir, path) for path in set(metrics_paths)]
496 subprocess.check_call(args)
499 @SubCommand(
500 "doc",
501 "show-targets",
502 description="List all reference targets. Requires the docs to have been built.",
504 @CommandArgument(
505 "--format", default="html", dest="fmt", help="Documentation format used."
507 @CommandArgument(
508 "--outdir", default=None, metavar="DESTINATION", help="Where output was written."
510 def show_reference_targets(command_context, fmt="html", outdir=None):
511 command_context.activate_virtualenv()
512 command_context.virtualenv_manager.install_pip_requirements(
513 os.path.join(here, "requirements.txt")
516 import sphinx.ext.intersphinx
518 outdir = outdir or os.path.join(command_context.topobjdir, "docs")
519 inv_path = os.path.join(outdir, fmt, "objects.inv")
521 if not os.path.exists(inv_path):
522 return die(
523 "object inventory not found: {inv_path}.\n"
524 "Rebuild the docs and rerun this command"
526 sphinx.ext.intersphinx.inspect_main([inv_path])
529 def die(msg, exit_code=1):
530 msg = "%s %s: %s" % (sys.argv[0], sys.argv[1], msg)
531 print(msg, file=sys.stderr)
532 return exit_code
535 def dieWithTestFailure(msg, exit_code=1):
536 for m in msg.split("\n"):
537 msg = "TEST-UNEXPECTED-FAILURE | %s %s | %s" % (sys.argv[0], sys.argv[1], m)
538 print(msg, file=sys.stderr)
539 return exit_code