Lumiera 0.pre.04
»edit your freedom«
Loading...
Searching...
No Matches
BuilderDoxygen.py
Go to the documentation of this file.
1# coding: utf-8
2
5
6#
7# Astxx, the Asterisk C++ API and Utility Library.
8# Copyright (C)
9# 2005, 2006, Matthew A. Nicholson
10# 2006, Tim Blechmann
11# 2007, Christoph Boehme
12# 2012, Dirk Baechle
13# 2013, Russel Winder
14#
15# This library is free software; you can redistribute it and/or
16# modify it under the terms of the GNU Lesser General Public
17# License version 2.1 as published by the Free Software Foundation.
18
19
20# history: 8/2008 adapted for Lumiera build system
21# added patch for Doxyfile in subdirectory
22# see http://www.scons.org/wiki/DoxygenBuilder
23# 4/2025 Upgrade to latest upstream version for Python-3
24
25
26import os
27import os.path
28import glob
29from fnmatch import fnmatch
30from functools import reduce
31
32# Currently supported output formats and their default
33# values and output locations.
34# From left to right:
35# 1. default setting YES|NO
36# 2. default output folder for this format
37# 3. name of the (main) output file
38# 4. default extension "
39# 5. field for overriding the output file extension
40output_formats = {
41 "HTML": ("YES", "html", "index", ".html", "HTML_FILE_EXTENSION"),
42 "LATEX": ("YES", "latex", "refman", ".tex", ""),
43 "RTF": ("NO", "rtf", "refman", ".rtf", ""),
44 "MAN": ("NO", "man", "", ".3", "MAN_EXTENSION"),
45 "XML": ("NO", "xml", "index", ".xml", ""),
46}
47
48
49def DoxyfileParse(file_contents, conf_dir, data=None):
50 """
51 Parse a Doxygen source file and return a dictionary of all the values.
52 Values will be strings and lists of strings.
53 """
54 if data is None:
55 data = {}
56
57 import shlex
58
59 lex = shlex.shlex(instream=file_contents, posix=True)
60 lex.wordchars += "*+./-:@"
61 lex.whitespace = lex.whitespace.replace("\n", "")
62 lex.escape = ""
63
64 lineno = lex.lineno
65 token = lex.get_token()
66 key = None
67 last_token = ""
68 key_token = True # The first token should be a key.
69 next_key = False
70 new_data = True
71
72 def append_data(data, key, new_data, token):
73 if new_data or len(data[key]) == 0:
74 data[key].append(token)
75 else:
76 data[key][-1] += token
77
78 while token:
79 if token in ["\n"]:
80 if last_token not in ["\\"]:
81 key_token = True
82 elif token in ["\\"]:
83 pass
84 elif key_token:
85 key = token
86 key_token = False
87 else:
88 if token == "+=":
89 if key not in data:
90 data[key] = []
91 elif token == "=":
92 if key == "TAGFILES" and key in data:
93 append_data(data, key, False, "=")
94 new_data = False
95 elif key == "@INCLUDE" and key in data:
96 # don't reset the @INCLUDE list when we see a new @INCLUDE line.
97 pass
98 else:
99 data[key] = []
100 elif key == "@INCLUDE":
101 # special case for @INCLUDE key: read the referenced
102 # file as a doxyfile too.
103 nextfile = token
104 if not os.path.isabs(nextfile):
105 nextfile = os.path.join(conf_dir, nextfile)
106 if nextfile in data[key]:
107 raise Exception("recursive @INCLUDE in Doxygen config: " + nextfile)
108 data[key].append(nextfile)
109 with open(nextfile, "r") as fh:
110 DoxyfileParse(fh.read(), conf_dir, data)
111 else:
112 append_data(data, key, new_data, token)
113 new_data = True
114
115 last_token = token
116 token = lex.get_token()
117
118 if last_token == "\\" and token != "\n":
119 new_data = False
120 append_data(data, key, new_data, "\\")
121
122 # compress lists of len 1 into single strings
123 for (k, v) in list(data.items()):
124 if len(v) == 0:
125 data.pop(k)
126
127 # items in the following list will be kept as lists and not converted to strings
128 if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS", "TAGFILES", "@INCLUDE"]:
129 continue
130
131 if len(v) == 1:
132 data[k] = v[0]
133
134 return data
135
136
137def DoxySourceFiles(node, env):
138 """
139 Scan the given node's contents (a Doxygen file) and add
140 any files used to generate docs to the list of source files.
141 """
142 default_file_patterns = [
143 "*.c",
144 "*.cc",
145 "*.cxx",
146 "*.cpp",
147 "*.c++",
148 "*.java",
149 "*.ii",
150 "*.ixx",
151 "*.ipp",
152 "*.i++",
153 "*.inl",
154 "*.h",
155 "*.hh ",
156 "*.hxx",
157 "*.hpp",
158 "*.h++",
159 "*.idl",
160 "*.odl",
161 "*.cs",
162 "*.php",
163 "*.php3",
164 "*.inc",
165 "*.m",
166 "*.mm",
167 "*.py",
168 ]
169
170 default_exclude_patterns = [
171 "*~",
172 ]
173
174 sources = []
175
176 # We're running in the top-level directory, but the doxygen
177 # configuration file is in the same directory as node; this means
178 # that relative pathnames in node must be adjusted before they can
179 # go onto the sources list
180 conf_dir = os.path.dirname(str(node))
181
182 data = DoxyfileParse(node.get_text_contents(), conf_dir)
183
184 if data.get("RECURSIVE", "NO") == "YES":
185 recursive = True
186 else:
187 recursive = False
188
189 file_patterns = data.get("FILE_PATTERNS", default_file_patterns)
190 exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns)
191
192 input = data.get("INPUT")
193 if input:
194 for node in data.get("INPUT", []):
195 if not os.path.isabs(node):
196 node = os.path.join(conf_dir, node)
197 if os.path.isfile(node):
198 sources.append(node)
199 elif os.path.isdir(node):
200 if recursive:
201 for root, dirs, files in os.walk(node):
202 for f in files:
203 filename = os.path.join(root, f)
204
205 pattern_check = reduce(
206 lambda x, y: x or bool(fnmatch(filename, y)),
207 file_patterns,
208 False,
209 )
210 exclude_check = reduce(
211 lambda x, y: x and fnmatch(filename, y),
212 exclude_patterns,
213 True,
214 )
215
216 if pattern_check and not exclude_check:
217 sources.append(filename)
218 else:
219 for pattern in file_patterns:
220 sources.extend(glob.glob("/".join([node, pattern])))
221 else:
222 # No INPUT specified, so apply plain patterns only
223 if recursive:
224 for root, dirs, files in os.walk("."):
225 for f in files:
226 filename = os.path.join(root, f)
227
228 pattern_check = reduce(
229 lambda x, y: x or bool(fnmatch(filename, y)),
230 file_patterns,
231 False,
232 )
233 exclude_check = reduce(
234 lambda x, y: x and fnmatch(filename, y), exclude_patterns, True
235 )
236
237 if pattern_check and not exclude_check:
238 sources.append(filename)
239 else:
240 for pattern in file_patterns:
241 sources.extend(glob.glob(pattern))
242
243 # Add @INCLUDEd files to the list of source files:
244 for node in data.get("@INCLUDE", []):
245 sources.append(node)
246
247 # Add tagfiles to the list of source files:
248 for node in data.get("TAGFILES", []):
249 file = node.split("=")[0]
250 if not os.path.isabs(file):
251 file = os.path.join(conf_dir, file)
252 sources.append(file)
253
254 # Add additional files to the list of source files:
255 def append_additional_source(option, formats):
256 for f in formats:
257 if data.get("GENERATE_" + f, output_formats[f][0]) == "YES":
258 file = data.get(option, "")
259 if file != "":
260 if not os.path.isabs(file):
261 file = os.path.join(conf_dir, file)
262 if os.path.isfile(file):
263 sources.append(file)
264 break
265
266 append_additional_source("HTML_STYLESHEET", ["HTML"])
267 append_additional_source("HTML_HEADER", ["HTML"])
268 append_additional_source("HTML_FOOTER", ["HTML"])
269
270 return sources
271
272
273def DoxySourceScan(node, env, path):
274 """
275 Doxygen Doxyfile source scanner. This should scan the Doxygen file and add
276 any files used to generate docs to the list of source files.
277 """
278 filepaths = DoxySourceFiles(node, env)
279 sources = [env.File(path) for path in filepaths]
280 return sources
281
282
283def DoxySourceScanCheck(node, env):
284 """Check if we should scan this file"""
285 return os.path.isfile(node.path)
286
287
288def DoxyEmitter(target, source, env):
289 """Doxygen Doxyfile emitter"""
290 doxy_fpath = str(source[0])
291 conf_dir = os.path.dirname(doxy_fpath)
292
293 data = DoxyfileParse(source[0].get_text_contents(), conf_dir)
294
295 targets = []
296 out_dir = data.get("OUTPUT_DIRECTORY", ".")
297 if not os.path.isabs(out_dir):
298 out_dir = os.path.join(conf_dir, out_dir)
299
300 # add our output locations
301 for (k, v) in list(output_formats.items()):
302 if data.get("GENERATE_" + k, v[0]) == "YES":
303 # Initialize output file extension for MAN pages
304 if k == "MAN":
305 # Is the given extension valid?
306 manext = v[3]
307 if v[4] and v[4] in data:
308 manext = data.get(v[4])
309 # Try to strip off dots
310 manext = manext.replace(".", "")
311 # Can we convert it to an int?
312 try:
313 e = int(manext)
314 except:
315 # No, so set back to default
316 manext = "3"
317
318 od = env.Dir(
319 os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]), "man" + manext)
320 )
321 else:
322 od = env.Dir(os.path.join(out_dir, data.get(k + "_OUTPUT", v[1])))
323 # don't clobber target folders
324 env.Precious(od)
325 # set up cleaning stuff
326 env.Clean(od, od)
327
328 # Add target files
329 if k != "MAN":
330 # Is an extension override var given?
331 if v[4] and v[4] in data:
332 fname = v[2] + data.get(v[4])
333 else:
334 fname = v[2] + v[3]
335 of = env.File(
336 os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]), fname)
337 )
338 targets.append(of)
339 # don't clean single files, we remove the complete output folders (see above)
340 env.NoClean(of)
341 else:
342 # Special case: MAN pages
343 # We have to add a target file docs/man/man3/foo.h.3
344 # for each input file foo.h, so we scan the config file
345 # a second time... :(
346 filepaths = DoxySourceFiles(source[0], env)
347 for f in filepaths:
348 if os.path.isfile(f) and f != doxy_fpath:
349 of = env.File(
350 os.path.join(
351 out_dir,
352 data.get(k + "_OUTPUT", v[1]),
353 "man" + manext,
354 f + "." + manext,
355 )
356 )
357 targets.append(of)
358 # don't clean single files, we remove the complete output folders (see above)
359 env.NoClean(of)
360
361 # add the tag file if neccessary:
362 tagfile = data.get("GENERATE_TAGFILE", "")
363 if tagfile != "":
364 if not os.path.isabs(tagfile):
365 tagfile = os.path.join(conf_dir, tagfile)
366 targets.append(env.File(tagfile))
367
368 return (targets, source)
369
370
371def generate(env):
372 """
373 Add builders and construction variables for the
374 Doxygen tool. This is currently for Doxygen 1.4.6.
375 """
376 doxyfile_scanner = env.Scanner(
377 DoxySourceScan,
378 "DoxySourceScan",
379 scan_check=DoxySourceScanCheck,
380 )
381
382 import SCons.Builder
383
384 doxyfile_builder = SCons.Builder.Builder(
385 action = "cd ${SOURCE.dir} && (${DOXYGEN} ${SOURCE.file} 2>&1 |tee ,doxylog)",
386 emitter = DoxyEmitter,
387 target_factory = env.fs.Entry,
388 single_source = True,
389 source_scanner = doxyfile_scanner,
390 )
391
392 env.Append(
393 BUILDERS={
394 "Doxygen": doxyfile_builder,
395 }
396 )
397
398 env.AppendUnique(
399 DOXYGEN="doxygen",
400 )
401
402
403def exists(env):
404 """
405 Make sure doxygen exists.
406 """
407 return env.Detect("doxygen")
DoxySourceScan(node, env, path)
Doxygen Doxyfile source scanner.
DoxyEmitter(target, source, env)
Doxygen Doxyfile emitter.
DoxySourceScanCheck(node, env)
Check if we should scan this file.
DoxySourceFiles(node, env)
Scan the given node's contents (a Doxygen file) and add any files used to generate docs to the list o...
DoxyfileParse(file_contents, conf_dir, data=None)
Parse a Doxygen source file and return a dictionary of all the values.