1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
15 from functools
import partial
16 from pprint
import pprint
18 import mozpack
.path
as mozpath
21 from mach
.decorators
import Command
, CommandArgument
, SubCommand
22 from mach
.registrar
import Registrar
23 from mozbuild
.util
import memoize
24 from mozfile
import load_source
26 here
= os
.path
.abspath(os
.path
.dirname(__file__
))
27 topsrcdir
= os
.path
.abspath(os
.path
.dirname(os
.path
.dirname(here
)))
28 DOC_ROOT
= os
.path
.join(topsrcdir
, "docs")
29 BASE_LINK
= "http://gecko-docs.mozilla.org-l1.s3-website.us-west-2.amazonaws.com/"
32 # Helps manage in-tree documentation.
38 virtualenv_name
="docs",
39 description
="Generate and serve documentation from the tree.",
46 help="Path to documentation to build and display.",
49 "--format", default
="html", dest
="fmt", help="Documentation format to write."
52 "--outdir", default
=None, metavar
="DESTINATION", help="Where to write output."
57 help="Write a gzipped tarball of generated docs.",
64 help="Don't automatically open HTML docs in a browser.",
71 help="Don't serve the generated docs after building.",
75 default
="localhost:5500",
77 help="Serve documentation on the specified host and port, "
78 'default "localhost:5500".',
80 @CommandArgument("--upload", action
="store_true", help="Upload generated files to S3.")
84 default
=str(multiprocessing
.cpu_count()),
86 help="Distribute the build over N processes in parallel.",
88 @CommandArgument("--write-url", default
=None, help="Write S3 Upload URL to text file")
90 "--linkcheck", action
="store_true", help="Check if the links are still valid"
93 "--dump-trees", default
=None, help="Dump the Sphinx trees to specified file."
97 dest
="enable_fatal_warnings",
99 help="Enable fatal warnings.",
102 "--check-num-warnings",
104 help="Check that the upper bound on the number of warnings is respected.",
106 @CommandArgument("--verbose", action
="store_true", help="Run Sphinx in verbose mode")
110 help="Disable generating Python/JS API documentation",
126 enable_fatal_warnings
=False,
127 check_num_warnings
=False,
131 # TODO: Bug 1704891 - move the ESLint setup tools to a shared place.
134 setup_helper
.set_project_root(command_context
.topsrcdir
)
136 if not setup_helper
.check_node_executables_valid():
139 setup_helper
.eslint_maybe_setup()
141 # Set the path so that Sphinx can find jsdoc, unfortunately there isn't
142 # a way to pass this to Sphinx itself at the moment.
143 os
.environ
["PATH"] = (
144 mozpath
.join(command_context
.topsrcdir
, "node_modules", ".bin")
153 from livereload
import Server
155 from moztreedocs
.package
import create_tarball
157 unique_id
= "%s/%s" % (project(), str(uuid
.uuid1()))
159 outdir
= outdir
or os
.path
.join(command_context
.topobjdir
, "docs")
160 savedir
= os
.path
.join(outdir
, fmt
)
163 path
= command_context
.topsrcdir
164 if os
.environ
.get("MOZ_AUTOMATION") != "1":
166 "\nBuilding the full documentation tree.\n"
167 "Did you mean to only build part of the documentation?\n"
168 "For a faster command, consider running:\n"
169 " ./mach doc path/to/docs\n"
171 path
= os
.path
.normpath(os
.path
.abspath(path
))
173 docdir
= _find_doc_dir(path
)
175 print(_dump_sphinx_backtrace())
177 "failed to generate documentation:\n"
178 "%s: could not find docs at this location" % path
182 # We want to verify if the links are valid or not
185 if check_num_warnings
:
187 "'--no-autodoc' flag may not be used with '--check-num-warnings'"
191 status
, warnings
= _run_sphinx(docdir
, savedir
, fmt
=fmt
, jobs
=jobs
, verbose
=verbose
)
193 print(_dump_sphinx_backtrace())
195 "failed to generate documentation:\n"
196 "%s: sphinx return code %d" % (path
, status
)
199 print("\nGenerated documentation:\n%s" % savedir
)
202 if enable_fatal_warnings
:
203 fatal_warnings
= _check_sphinx_fatal_warnings(warnings
)
205 msg
+= f
"Error: Got fatal warnings:\n{''.join(fatal_warnings)}"
206 if check_num_warnings
:
207 [num_new
, num_actual
] = _check_sphinx_num_warnings(warnings
)
208 print("Logged %s warnings\n" % num_actual
)
210 msg
+= f
"Error: {num_new} new warnings have been introduced compared to the limit in docs/config.yml"
212 return dieWithTestFailure(msg
)
214 # Upload the artifact containing the link to S3
215 # This would be used by code-review to post the link to Phabricator
216 if write_url
is not None:
217 unique_link
= BASE_LINK
+ unique_id
+ "/index.html"
218 with
open(write_url
, "w") as fp
:
219 fp
.write(unique_link
)
221 print("Generated " + write_url
)
223 if dump_trees
is not None:
224 parent
= os
.path
.dirname(dump_trees
)
225 if parent
and not os
.path
.isdir(parent
):
227 with
open(dump_trees
, "w") as fh
:
228 json
.dump(manager().trees
, fh
)
231 archive_path
= os
.path
.join(outdir
, "%s.tar.gz" % project())
232 create_tarball(archive_path
, savedir
)
233 print("Archived to %s" % archive_path
)
236 _s3_upload(savedir
, project(), unique_id
, version())
239 index_path
= os
.path
.join(savedir
, "index.html")
240 if auto_open
and os
.path
.isfile(index_path
):
241 webbrowser
.open(index_path
)
244 # Create livereload server. Any files modified in the specified docdir
245 # will cause a re-build and refresh of the browser (if open).
247 host
, port
= http
.split(":", 1)
250 return die("invalid address: %s" % http
)
254 sphinx_trees
= manager().trees
or {savedir
: docdir
}
255 for _
, src
in sphinx_trees
.items():
256 run_sphinx
= partial(
257 _run_sphinx
, src
, savedir
, fmt
=fmt
, jobs
=jobs
, verbose
=verbose
259 server
.watch(src
, run_sphinx
)
264 open_url_delay
=0.1 if auto_open
else None,
268 def _dump_sphinx_backtrace():
270 If there is a sphinx dump file, read and return
272 By default, it isn't displayed.
274 pattern
= "sphinx-err-*"
278 if not os
.path
.isdir(tmpdir
):
279 # Only run it on Linux
281 files
= os
.listdir(tmpdir
)
283 if fnmatch
.fnmatch(name
, pattern
):
284 pathFile
= os
.path
.join(tmpdir
, name
)
285 stat
= os
.stat(pathFile
)
286 output
+= "Name: {0} / Creation date: {1}\n".format(
287 pathFile
, time
.ctime(stat
.st_mtime
)
289 with
open(pathFile
) as f
:
294 def _run_sphinx(docdir
, savedir
, config
=None, fmt
="html", jobs
=None, verbose
=None):
295 import sphinx
.cmd
.build
297 config
= config
or manager().conf_py_path
298 # When running sphinx with sentry, it adds significant overhead
299 # and makes the build generation very very very slow
300 # So, disable it to generate the doc faster
301 sentry_sdk
.init(None)
302 warn_fd
, warn_path
= tempfile
.mkstemp()
310 os
.path
.dirname(config
),
317 args
.extend(["-j", jobs
])
319 args
.extend(["-v", "-v"])
320 print("Run sphinx with:")
322 status
= sphinx
.cmd
.build
.build_main(args
)
323 with
open(warn_path
) as warn_file
:
324 warnings
= warn_file
.readlines()
325 return status
, warnings
329 except Exception as ex
:
333 def _check_sphinx_fatal_warnings(warnings
):
334 with
open(os
.path
.join(DOC_ROOT
, "config.yml"), "r") as fh
:
335 fatal_warnings_src
= yaml
.safe_load(fh
)["fatal warnings"]
336 fatal_warnings_regex
= [re
.compile(item
) for item
in fatal_warnings_src
]
338 for warning
in warnings
:
339 if any(item
.search(warning
) for item
in fatal_warnings_regex
):
340 fatal_warnings
.append(warning
)
341 return fatal_warnings
344 def _check_sphinx_num_warnings(warnings
):
345 # warnings file contains other strings as well
346 num_warnings
= len([w
for w
in warnings
if "WARNING" in w
])
347 with
open(os
.path
.join(DOC_ROOT
, "config.yml"), "r") as fh
:
348 max_num
= yaml
.safe_load(fh
)["max_num_warnings"]
349 if num_warnings
> max_num
:
350 return [num_warnings
- max_num
, num_warnings
]
351 return [0, num_warnings
]
355 from moztreedocs
import manager
360 def toggle_no_autodoc():
363 moztreedocs
._SphinxManager
.NO_AUTODOC
= True
367 def _read_project_properties():
368 path
= os
.path
.normpath(manager().conf_py_path
)
369 conf
= load_source("doc_conf", path
)
371 # Prefer the Mozilla project name, falling back to Sphinx's
372 # default variable if it isn't defined.
373 project
= getattr(conf
, "moz_project_name", None)
375 project
= conf
.project
.replace(" ", "_")
377 return {"project": project
, "version": getattr(conf
, "version", None)}
381 return _read_project_properties()["project"]
385 return _read_project_properties()["version"]
389 from mozbuild
.nodeutil
import find_node_executable
391 node
, _
= find_node_executable()
393 return os
.path
.dirname(node
)
396 def _find_doc_dir(path
):
397 if os
.path
.isfile(path
):
400 valid_doc_dirs
= ("doc", "docs")
401 for d
in valid_doc_dirs
:
402 p
= os
.path
.join(path
, d
)
406 for index_file
in ["index.rst", "index.md"]:
407 if os
.path
.exists(os
.path
.join(path
, index_file
)):
411 def _s3_upload(root
, project
, unique_id
, version
=None):
412 # Workaround the issue
413 # BlockingIOError: [Errno 11] write could not complete without blocking
414 # https://github.com/travis-ci/travis-ci/issues/8920
417 from moztreedocs
.package
import distribution_files
418 from moztreedocs
.upload
import s3_set_redirects
, s3_upload
420 fcntl
.fcntl(1, fcntl
.F_SETFL
, 0)
422 # Files are uploaded to multiple locations:
425 # <project>/<version>
427 # This allows multiple projects and versions to be stored in the
430 files
= list(distribution_files(root
))
433 key_prefixes
.append("%s/%s" % (project
, version
))
435 # Until we redirect / to main/latest, upload the main docs
437 if project
== "main":
438 key_prefixes
.append("")
440 key_prefixes
.append(unique_id
)
442 with
open(os
.path
.join(DOC_ROOT
, "config.yml"), "r") as fh
:
443 redirects
= yaml
.safe_load(fh
)["redirects"]
445 redirects
= {k
.strip("/"): v
.strip("/") for k
, v
in redirects
.items()}
449 for prefix
in key_prefixes
:
450 s3_upload(files
, prefix
)
452 # Don't setup redirects for the "version" or "uuid" prefixes since
453 # we are exceeding a 50 redirect limit and external things are
454 # unlikely to link there anyway (see bug 1614908).
455 if (version
and prefix
.endswith(version
)) or prefix
== unique_id
:
460 all_redirects
.update({prefix
+ k
: prefix
+ v
for k
, v
in redirects
.items()})
462 print("Redirects currently staged")
463 pprint(all_redirects
, indent
=1)
465 s3_set_redirects(all_redirects
)
467 unique_link
= BASE_LINK
+ unique_id
+ "/index.html"
468 print("Uploaded documentation can be accessed here " + unique_link
)
474 description
="Generate documentation from Glean metrics.yaml files",
476 def generate_telemetry_docs(command_context
):
484 os
.path
.join(topsrcdir
, "python/mach/docs/"),
485 os
.path
.join(topsrcdir
, "python/mach/pings.yaml"),
486 os
.path
.join(topsrcdir
, "python/mach/metrics.yaml"),
490 for handler
in Registrar
.command_handlers
.values()
491 if handler
.metrics_path
is not None
494 [os
.path
.join(command_context
.topsrcdir
, path
) for path
in set(metrics_paths
)]
496 subprocess
.check_call(args
)
502 description
="List all reference targets. Requires the docs to have been built.",
505 "--format", default
="html", dest
="fmt", help="Documentation format used."
508 "--outdir", default
=None, metavar
="DESTINATION", help="Where output was written."
510 def show_reference_targets(command_context
, fmt
="html", outdir
=None):
511 command_context
.activate_virtualenv()
512 command_context
.virtualenv_manager
.install_pip_requirements(
513 os
.path
.join(here
, "requirements.txt")
516 import sphinx
.ext
.intersphinx
518 outdir
= outdir
or os
.path
.join(command_context
.topobjdir
, "docs")
519 inv_path
= os
.path
.join(outdir
, fmt
, "objects.inv")
521 if not os
.path
.exists(inv_path
):
523 "object inventory not found: {inv_path}.\n"
524 "Rebuild the docs and rerun this command"
526 sphinx
.ext
.intersphinx
.inspect_main([inv_path
])
529 def die(msg
, exit_code
=1):
530 msg
= "%s %s: %s" % (sys
.argv
[0], sys
.argv
[1], msg
)
531 print(msg
, file=sys
.stderr
)
535 def dieWithTestFailure(msg
, exit_code
=1):
536 for m
in msg
.split("\n"):
537 msg
= "TEST-UNEXPECTED-FAILURE | %s %s | %s" % (sys
.argv
[0], sys
.argv
[1], m
)
538 print(msg
, file=sys
.stderr
)