1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
14 from functools
import partial
15 from pprint
import pprint
17 import mozpack
.path
as mozpath
20 from mach
.decorators
import Command
, CommandArgument
, SubCommand
21 from mach
.registrar
import Registrar
22 from mozbuild
.util
import cpu_count
, memoize
23 from mozfile
import load_source
25 here
= os
.path
.abspath(os
.path
.dirname(__file__
))
26 topsrcdir
= os
.path
.abspath(os
.path
.dirname(os
.path
.dirname(here
)))
27 DOC_ROOT
= os
.path
.join(topsrcdir
, "docs")
28 BASE_LINK
= "http://gecko-docs.mozilla.org-l1.s3-website.us-west-2.amazonaws.com/"
31 # Helps manage in-tree documentation.
37 virtualenv_name
="docs",
38 description
="Generate and serve documentation from the tree.",
45 help="Path to documentation to build and display.",
48 "--format", default
="html", dest
="fmt", help="Documentation format to write."
51 "--outdir", default
=None, metavar
="DESTINATION", help="Where to write output."
56 help="Write a gzipped tarball of generated docs.",
63 help="Don't automatically open HTML docs in a browser.",
70 help="Don't serve the generated docs after building.",
74 default
="localhost:5500",
76 help="Serve documentation on the specified host and port, "
77 'default "localhost:5500".',
79 @CommandArgument("--upload", action
="store_true", help="Upload generated files to S3.")
83 default
=str(cpu_count()),
85 help="Distribute the build over N processes in parallel.",
87 @CommandArgument("--write-url", default
=None, help="Write S3 Upload URL to text file")
89 "--linkcheck", action
="store_true", help="Check if the links are still valid"
92 "--dump-trees", default
=None, help="Dump the Sphinx trees to specified file."
96 dest
="enable_fatal_warnings",
98 help="Enable fatal warnings.",
101 "--check-num-warnings",
103 help="Check that the upper bound on the number of warnings is respected.",
105 @CommandArgument("--verbose", action
="store_true", help="Run Sphinx in verbose mode")
109 help="Disable generating Python/JS API documentation",
125 enable_fatal_warnings
=False,
126 check_num_warnings
=False,
130 # TODO: Bug 1704891 - move the ESLint setup tools to a shared place.
133 setup_helper
.set_project_root(command_context
.topsrcdir
)
135 if not setup_helper
.check_node_executables_valid():
138 setup_helper
.eslint_maybe_setup()
140 # Set the path so that Sphinx can find jsdoc, unfortunately there isn't
141 # a way to pass this to Sphinx itself at the moment.
142 os
.environ
["PATH"] = (
143 mozpath
.join(command_context
.topsrcdir
, "node_modules", ".bin")
152 from livereload
import Server
154 from moztreedocs
.package
import create_tarball
156 unique_id
= "%s/%s" % (project(), str(uuid
.uuid1()))
158 outdir
= outdir
or os
.path
.join(command_context
.topobjdir
, "docs")
159 savedir
= os
.path
.join(outdir
, fmt
)
162 path
= command_context
.topsrcdir
163 if os
.environ
.get("MOZ_AUTOMATION") != "1":
165 "\nBuilding the full documentation tree.\n"
166 "Did you mean to only build part of the documentation?\n"
167 "For a faster command, consider running:\n"
168 " ./mach doc path/to/docs\n"
170 path
= os
.path
.normpath(os
.path
.abspath(path
))
172 docdir
= _find_doc_dir(path
)
174 print(_dump_sphinx_backtrace())
176 "failed to generate documentation:\n"
177 "%s: could not find docs at this location" % path
181 # We want to verify if the links are valid or not
184 if check_num_warnings
:
186 "'--no-autodoc' flag may not be used with '--check-num-warnings'"
190 status
, warnings
= _run_sphinx(docdir
, savedir
, fmt
=fmt
, jobs
=jobs
, verbose
=verbose
)
192 print(_dump_sphinx_backtrace())
194 "failed to generate documentation:\n"
195 "%s: sphinx return code %d" % (path
, status
)
198 print("\nGenerated documentation:\n%s" % savedir
)
201 if enable_fatal_warnings
:
202 fatal_warnings
= _check_sphinx_fatal_warnings(warnings
)
204 msg
+= f
"Error: Got fatal warnings:\n{''.join(fatal_warnings)}"
205 if check_num_warnings
:
206 [num_new
, num_actual
] = _check_sphinx_num_warnings(warnings
)
207 print("Logged %s warnings\n" % num_actual
)
209 msg
+= f
"Error: {num_new} new warnings have been introduced compared to the limit in docs/config.yml"
211 return dieWithTestFailure(msg
)
213 # Upload the artifact containing the link to S3
214 # This would be used by code-review to post the link to Phabricator
215 if write_url
is not None:
216 unique_link
= BASE_LINK
+ unique_id
+ "/index.html"
217 with
open(write_url
, "w") as fp
:
218 fp
.write(unique_link
)
220 print("Generated " + write_url
)
222 if dump_trees
is not None:
223 parent
= os
.path
.dirname(dump_trees
)
224 if parent
and not os
.path
.isdir(parent
):
226 with
open(dump_trees
, "w") as fh
:
227 json
.dump(manager().trees
, fh
)
230 archive_path
= os
.path
.join(outdir
, "%s.tar.gz" % project())
231 create_tarball(archive_path
, savedir
)
232 print("Archived to %s" % archive_path
)
235 _s3_upload(savedir
, project(), unique_id
, version())
238 index_path
= os
.path
.join(savedir
, "index.html")
239 if auto_open
and os
.path
.isfile(index_path
):
240 webbrowser
.open(index_path
)
243 # Create livereload server. Any files modified in the specified docdir
244 # will cause a re-build and refresh of the browser (if open).
246 host
, port
= http
.split(":", 1)
249 return die("invalid address: %s" % http
)
253 sphinx_trees
= manager().trees
or {savedir
: docdir
}
254 for _
, src
in sphinx_trees
.items():
255 run_sphinx
= partial(
256 _run_sphinx
, src
, savedir
, fmt
=fmt
, jobs
=jobs
, verbose
=verbose
258 server
.watch(src
, run_sphinx
)
263 open_url_delay
=0.1 if auto_open
else None,
267 def _dump_sphinx_backtrace():
269 If there is a sphinx dump file, read and return
271 By default, it isn't displayed.
273 pattern
= "sphinx-err-*"
277 if not os
.path
.isdir(tmpdir
):
278 # Only run it on Linux
280 files
= os
.listdir(tmpdir
)
282 if fnmatch
.fnmatch(name
, pattern
):
283 pathFile
= os
.path
.join(tmpdir
, name
)
284 stat
= os
.stat(pathFile
)
285 output
+= "Name: {0} / Creation date: {1}\n".format(
286 pathFile
, time
.ctime(stat
.st_mtime
)
288 with
open(pathFile
) as f
:
293 def _run_sphinx(docdir
, savedir
, config
=None, fmt
="html", jobs
=None, verbose
=None):
294 import sphinx
.cmd
.build
296 config
= config
or manager().conf_py_path
297 # When running sphinx with sentry, it adds significant overhead
298 # and makes the build generation very very very slow
299 # So, disable it to generate the doc faster
300 sentry_sdk
.init(None)
301 warn_fd
, warn_path
= tempfile
.mkstemp()
309 os
.path
.dirname(config
),
316 args
.extend(["-j", jobs
])
318 args
.extend(["-v", "-v"])
319 print("Run sphinx with:")
321 status
= sphinx
.cmd
.build
.build_main(args
)
322 with
open(warn_path
) as warn_file
:
323 warnings
= warn_file
.readlines()
324 return status
, warnings
328 except Exception as ex
:
332 def _check_sphinx_fatal_warnings(warnings
):
333 with
open(os
.path
.join(DOC_ROOT
, "config.yml"), "r") as fh
:
334 fatal_warnings_src
= yaml
.safe_load(fh
)["fatal warnings"]
335 fatal_warnings_regex
= [re
.compile(item
) for item
in fatal_warnings_src
]
337 for warning
in warnings
:
338 if any(item
.search(warning
) for item
in fatal_warnings_regex
):
339 fatal_warnings
.append(warning
)
340 return fatal_warnings
343 def _check_sphinx_num_warnings(warnings
):
344 # warnings file contains other strings as well
345 num_warnings
= len([w
for w
in warnings
if "WARNING" in w
])
346 with
open(os
.path
.join(DOC_ROOT
, "config.yml"), "r") as fh
:
347 max_num
= yaml
.safe_load(fh
)["max_num_warnings"]
348 if num_warnings
> max_num
:
349 return [num_warnings
- max_num
, num_warnings
]
350 return [0, num_warnings
]
354 from moztreedocs
import manager
359 def toggle_no_autodoc():
362 moztreedocs
._SphinxManager
.NO_AUTODOC
= True
366 def _read_project_properties():
367 path
= os
.path
.normpath(manager().conf_py_path
)
368 conf
= load_source("doc_conf", path
)
370 # Prefer the Mozilla project name, falling back to Sphinx's
371 # default variable if it isn't defined.
372 project
= getattr(conf
, "moz_project_name", None)
374 project
= conf
.project
.replace(" ", "_")
376 return {"project": project
, "version": getattr(conf
, "version", None)}
380 return _read_project_properties()["project"]
384 return _read_project_properties()["version"]
388 from mozbuild
.nodeutil
import find_node_executable
390 node
, _
= find_node_executable()
392 return os
.path
.dirname(node
)
395 def _find_doc_dir(path
):
396 if os
.path
.isfile(path
):
399 valid_doc_dirs
= ("doc", "docs")
400 for d
in valid_doc_dirs
:
401 p
= os
.path
.join(path
, d
)
405 for index_file
in ["index.rst", "index.md"]:
406 if os
.path
.exists(os
.path
.join(path
, index_file
)):
410 def _s3_upload(root
, project
, unique_id
, version
=None):
411 # Workaround the issue
412 # BlockingIOError: [Errno 11] write could not complete without blocking
413 # https://github.com/travis-ci/travis-ci/issues/8920
416 from moztreedocs
.package
import distribution_files
417 from moztreedocs
.upload
import s3_set_redirects
, s3_upload
419 fcntl
.fcntl(1, fcntl
.F_SETFL
, 0)
421 # Files are uploaded to multiple locations:
424 # <project>/<version>
426 # This allows multiple projects and versions to be stored in the
429 files
= list(distribution_files(root
))
432 key_prefixes
.append("%s/%s" % (project
, version
))
434 # Until we redirect / to main/latest, upload the main docs
436 if project
== "main":
437 key_prefixes
.append("")
439 key_prefixes
.append(unique_id
)
441 with
open(os
.path
.join(DOC_ROOT
, "config.yml"), "r") as fh
:
442 redirects
= yaml
.safe_load(fh
)["redirects"]
444 redirects
= {k
.strip("/"): v
.strip("/") for k
, v
in redirects
.items()}
448 for prefix
in key_prefixes
:
449 s3_upload(files
, prefix
)
451 # Don't setup redirects for the "version" or "uuid" prefixes since
452 # we are exceeding a 50 redirect limit and external things are
453 # unlikely to link there anyway (see bug 1614908).
454 if (version
and prefix
.endswith(version
)) or prefix
== unique_id
:
459 all_redirects
.update({prefix
+ k
: prefix
+ v
for k
, v
in redirects
.items()})
461 print("Redirects currently staged")
462 pprint(all_redirects
, indent
=1)
464 s3_set_redirects(all_redirects
)
466 unique_link
= BASE_LINK
+ unique_id
+ "/index.html"
467 print("Uploaded documentation can be accessed here " + unique_link
)
473 description
="Generate documentation from Glean metrics.yaml files",
475 def generate_telemetry_docs(command_context
):
483 os
.path
.join(topsrcdir
, "python/mach/docs/"),
484 os
.path
.join(topsrcdir
, "python/mach/pings.yaml"),
485 os
.path
.join(topsrcdir
, "python/mach/metrics.yaml"),
486 os
.path
.join(topsrcdir
, "python/mozbuild/metrics.yaml"),
490 for handler
in Registrar
.command_handlers
.values()
491 if handler
.metrics_path
is not None
494 [os
.path
.join(command_context
.topsrcdir
, path
) for path
in set(metrics_paths
)]
496 subprocess
.check_call(args
)
502 description
="List all reference targets. Requires the docs to have been built.",
505 "--format", default
="html", dest
="fmt", help="Documentation format used."
508 "--outdir", default
=None, metavar
="DESTINATION", help="Where output was written."
510 def show_reference_targets(command_context
, fmt
="html", outdir
=None):
511 command_context
.activate_virtualenv()
512 command_context
.virtualenv_manager
.install_pip_requirements(
513 os
.path
.join(here
, "requirements.txt")
516 import sphinx
.ext
.intersphinx
518 outdir
= outdir
or os
.path
.join(command_context
.topobjdir
, "docs")
519 inv_path
= os
.path
.join(outdir
, fmt
, "objects.inv")
521 if not os
.path
.exists(inv_path
):
523 "object inventory not found: {inv_path}.\n"
524 "Rebuild the docs and rerun this command"
526 sphinx
.ext
.intersphinx
.inspect_main([inv_path
])
529 def die(msg
, exit_code
=1):
530 msg
= "%s %s: %s" % (sys
.argv
[0], sys
.argv
[1], msg
)
531 print(msg
, file=sys
.stderr
)
535 def dieWithTestFailure(msg
, exit_code
=1):
536 for m
in msg
.split("\n"):
537 msg
= "TEST-UNEXPECTED-FAILURE | %s %s | %s" % (sys
.argv
[0], sys
.argv
[1], m
)
538 print(msg
, file=sys
.stderr
)