Bug 1858509 add thread-safety annotations around MediaSourceDemuxer::mMonitor r=alwu
[gecko.git] / tools / moztreedocs / __init__.py
blob3c54d4c45dcb931c9df1f2397de5f51f0e1d45b1
1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
5 import os
6 import tempfile
7 from pathlib import PurePath
9 import frontmatter
10 import sphinx
11 import sphinx.ext.apidoc
12 import yaml
13 from mozbuild.base import MozbuildObject
14 from mozbuild.frontend.reader import BuildReader
15 from mozbuild.util import memoize
16 from mozpack.copier import FileCopier
17 from mozpack.files import FileFinder
18 from mozpack.manifests import InstallManifest
20 here = os.path.abspath(os.path.dirname(__file__))
21 build = MozbuildObject.from_environment(cwd=here)
23 MAIN_DOC_PATH = os.path.normpath(os.path.join(build.topsrcdir, "docs"))
25 logger = sphinx.util.logging.getLogger(__name__)
28 @memoize
29 def read_build_config(docdir):
30 """Read the active build config and return the relevant doc paths.
32 The return value is cached so re-generating with the same docdir won't
33 invoke the build system a second time."""
34 trees = {}
35 python_package_dirs = set()
37 is_main = docdir == MAIN_DOC_PATH
38 relevant_mozbuild_path = None if is_main else docdir
40 # Reading the Sphinx variables doesn't require a full build context.
41 # Only define the parts we need.
42 class fakeconfig(object):
43 topsrcdir = build.topsrcdir
45 variables = ("SPHINX_TREES", "SPHINX_PYTHON_PACKAGE_DIRS")
46 reader = BuildReader(fakeconfig())
47 result = reader.find_variables_from_ast(variables, path=relevant_mozbuild_path)
48 for path, name, key, value in result:
49 reldir = os.path.dirname(path)
51 if name == "SPHINX_TREES":
52 # If we're building a subtree, only process that specific subtree.
53 # topsrcdir always uses POSIX-style path, normalize it for proper comparison.
54 absdir = os.path.normpath(os.path.join(build.topsrcdir, reldir, value))
55 if not is_main and absdir not in (docdir, MAIN_DOC_PATH):
56 # allow subpaths of absdir (i.e. docdir = <absdir>/sub/path/)
57 if docdir.startswith(absdir):
58 key = os.path.join(key, docdir.split(f"{key}/")[-1])
59 else:
60 continue
62 assert key
63 if key.startswith("/"):
64 key = key[1:]
65 else:
66 key = os.path.normpath(os.path.join(reldir, key))
68 if key in trees:
69 raise Exception(
70 "%s has already been registered as a destination." % key
72 trees[key] = os.path.join(reldir, value)
74 if name == "SPHINX_PYTHON_PACKAGE_DIRS":
75 python_package_dirs.add(os.path.join(reldir, value))
77 return trees, python_package_dirs
80 class _SphinxManager(object):
81 """Manages the generation of Sphinx documentation for the tree."""
83 NO_AUTODOC = False
85 def __init__(self, topsrcdir, main_path):
86 self.topsrcdir = topsrcdir
87 self.conf_py_path = os.path.join(main_path, "conf.py")
88 self.index_path = os.path.join(main_path, "index.rst")
90 # Instance variables that get set in self.generate_docs()
91 self.staging_dir = None
92 self.trees = None
93 self.python_package_dirs = None
95 def generate_docs(self, app):
96 """Generate/stage documentation."""
97 if self.NO_AUTODOC:
98 logger.info("Python/JS API documentation generation will be skipped")
99 app.config["extensions"].remove("sphinx.ext.autodoc")
100 app.config["extensions"].remove("sphinx_js")
101 self.staging_dir = os.path.join(app.outdir, "_staging")
103 logger.info("Reading Sphinx metadata from build configuration")
104 self.trees, self.python_package_dirs = read_build_config(app.srcdir)
106 logger.info("Staging static documentation")
107 self._synchronize_docs(app)
109 if not self.NO_AUTODOC:
110 self._generate_python_api_docs()
112 def _generate_python_api_docs(self):
113 """Generate Python API doc files."""
114 out_dir = os.path.join(self.staging_dir, "python")
115 base_args = ["--no-toc", "-o", out_dir]
117 for p in sorted(self.python_package_dirs):
118 full = os.path.join(self.topsrcdir, p)
120 finder = FileFinder(full)
121 dirs = {os.path.dirname(f[0]) for f in finder.find("**")}
123 test_dirs = {"test", "tests"}
124 excludes = {d for d in dirs if set(PurePath(d).parts) & test_dirs}
126 args = list(base_args)
127 args.append(full)
128 args.extend(excludes)
130 sphinx.ext.apidoc.main(argv=args)
132 def _process_markdown(self, m, markdown_file, dest):
134 When dealing with a markdown file, we check if we have a front matter.
135 If this is the case, we read the information, create a temporary file,
136 reuse the front matter info into the md file
138 with open(markdown_file, "r", encoding="utf_8") as f:
139 # Load the front matter header
140 post = frontmatter.load(f)
141 if len(post.keys()) > 0:
142 # Has a front matter, use it
143 with tempfile.NamedTemporaryFile("w", delete=False) as fh:
144 # Use the frontmatter title
145 fh.write(post["title"] + "\n")
146 # Add the md syntax for the title
147 fh.write("=" * len(post["title"]) + "\n")
148 # If there is a summary, add it
149 if "summary" in post:
150 fh.write(post["summary"] + "\n")
151 # Write the content
152 fh.write(post.__str__())
153 fh.close()
154 # Instead of a symlink, we copy the file
155 m.add_copy(fh.name, dest)
156 else:
157 # No front matter, create the symlink like for rst
158 # as it will be the the same file
159 m.add_link(markdown_file, dest)
161 def _synchronize_docs(self, app):
162 m = InstallManifest()
164 with open(os.path.join(MAIN_DOC_PATH, "config.yml"), "r") as fh:
165 tree_config = yaml.safe_load(fh)["categories"]
167 m.add_link(self.conf_py_path, "conf.py")
169 for dest, source in sorted(self.trees.items()):
170 source_dir = os.path.join(self.topsrcdir, source)
171 for root, _, files in os.walk(source_dir):
172 for f in files:
173 source_path = os.path.normpath(os.path.join(root, f))
174 rel_source = source_path[len(source_dir) + 1 :]
175 target = os.path.normpath(os.path.join(dest, rel_source))
176 if source_path.endswith(".md"):
177 self._process_markdown(
178 m, source_path, os.path.join(".", target)
180 else:
181 m.add_link(source_path, target)
183 copier = FileCopier()
184 m.populate_registry(copier)
186 # In the case of livereload, we don't want to delete unmodified (unaccounted) files.
187 copier.copy(
188 self.staging_dir, remove_empty_directories=False, remove_unaccounted=False
191 with open(self.index_path, "r") as fh:
192 data = fh.read()
194 def is_toplevel(key):
195 """Whether the tree is nested under the toplevel index, or is
196 nested under another tree's index.
198 for k in self.trees:
199 if k == key:
200 continue
201 if key.startswith(k):
202 return False
203 return True
205 def format_paths(paths):
206 source_doc = ["%s/index" % p for p in paths]
207 return "\n ".join(source_doc)
209 toplevel_trees = {k: v for k, v in self.trees.items() if is_toplevel(k)}
211 CATEGORIES = {}
212 # generate the datastructure to deal with the tree
213 for t in tree_config:
214 CATEGORIES[t] = format_paths(tree_config[t])
216 # During livereload, we don't correctly rebuild the full document
217 # tree (Bug 1557020). The page is no longer referenced within the index
218 # tree, thus we shall check categorisation only if complete tree is being rebuilt.
219 if app.srcdir == self.topsrcdir:
220 indexes = set(
222 os.path.normpath(os.path.join(p, "index"))
223 for p in toplevel_trees.keys()
226 # Format categories like indexes
227 cats = "\n".join(CATEGORIES.values()).split("\n")
228 # Remove heading spaces
229 cats = [os.path.normpath(x.strip()) for x in cats]
230 indexes = tuple(set(indexes) - set(cats))
231 if indexes:
232 # In case a new doc isn't categorized
233 print(indexes)
234 raise Exception(
235 "Uncategorized documentation. Please add it in docs/config.yml"
238 data = data.format(**CATEGORIES)
240 with open(os.path.join(self.staging_dir, "index.rst"), "w") as fh:
241 fh.write(data)
244 manager = _SphinxManager(build.topsrcdir, MAIN_DOC_PATH)