Lumiera  0.pre.03
»edityourfreedom«
BuilderDoxygen.py
Go to the documentation of this file.
1 # -*- python -*-
2 
5 
6 #
7 # Astxx, the Asterisk C++ API and Utility Library.
8 # Copyright (C) 2005, 2006 Matthew A. Nicholson
9 # Copyright (C) 2006 Tim Blechmann
10 #
11 # This library is free software; you can redistribute it and/or
12 # modify it under the terms of the GNU Lesser General Public
13 # License version 2.1 as published by the Free Software Foundation.
14 #
15 # This library is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 # Lesser General Public License for more details.
19 #
20 # You should have received a copy of the GNU Lesser General Public License
21 # along with this library; if not, see http://www.gnu.org/licenses/
22 
23 
24 # history: 8/2008 adapted for Lumiera build system
25 # added patch for Doxyfile in subdirectory
26 # see http://www.scons.org/wiki/DoxygenBuilder
27 
28 
29 import os
30 import os.path
31 import glob
32 from fnmatch import fnmatch
33 
34 
35 def DoxyfileParse(file_contents):
36  """ Parse a Doxygen source file and return a dictionary of all the values.
37  Values will be strings and lists of strings.
38  """
39  data = {}
40 
41  import shlex
42  lex = shlex.shlex(instream = file_contents, posix = True)
43  lex.wordchars += "*+./-:"
44  lex.whitespace = lex.whitespace.replace("\n", "")
45  lex.escape = ""
46 
47  lineno = lex.lineno
48  token = lex.get_token()
49  key = token # the first token should be a key
50  last_token = ""
51  key_token = False
52  next_key = False
53  new_data = True
54 
55  def append_data(data, key, new_data, token):
56  if new_data or len(data[key]) == 0:
57  data[key].append(token)
58  else:
59  data[key][-1] += token
60 
61  while token:
62  if token in ['\n']:
63  if last_token not in ['\\']:
64  key_token = True
65  elif token in ['\\']:
66  pass
67  elif key_token:
68  key = token
69  key_token = False
70  else:
71  if token == "+=":
72  if not data.has_key(key):
73  data[key] = list()
74  elif token == "=":
75  data[key] = list()
76  else:
77  append_data(data, key, new_data, token)
78  new_data = True
79 
80  last_token = token
81  token = lex.get_token()
82 
83  if last_token == '\\' and token != '\n':
84  new_data = False
85  append_data(data, key, new_data, '\\')
86 
87  # compress lists of len 1 into single strings
88  for (k, v) in data.items():
89  if len(v) == 0:
90  data.pop(k)
91 
92  # items in the following list will be kept as lists and not converted to strings
93  if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]:
94  continue
95 
96  if len(v) == 1:
97  data[k] = v[0]
98 
99  return data
100 
101 
102 def DoxySourceScan(node, env, path):
103  """ Doxygen Doxyfile source scanner.
104  This should scan the Doxygen file and add any files
105  used to generate docs to the list of source files.
106  """
107  default_file_patterns = [
108  '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx',
109  '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++',
110  '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm',
111  '*.py',
112  ]
113 
114  default_exclude_patterns = [
115  '*~',
116  ]
117 
118  sources = []
119 
120  data = DoxyfileParse(node.get_contents())
121 
122  if data.get("RECURSIVE", "NO") == "YES":
123  recursive = True
124  else:
125  recursive = False
126 
127  file_patterns = data.get("FILE_PATTERNS", default_file_patterns)
128  exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns)
129 
130  #
131  # We're running in the top-level directory, but the doxygen configuration file
132  # is in the same directory as node; this means that relative pathnames in node
133  # must be adjusted before they can go onto the sources list
134  conf_dir = os.path.dirname(str(node))
135 
136  for node in data.get("INPUT", []):
137  if not os.path.isabs(node):
138  node = os.path.join(conf_dir, node)
139  if os.path.isfile(node):
140  sources.append(node)
141  elif os.path.isdir(node):
142  if recursive:
143  for root, dirs, files in os.walk(node):
144  for f in files:
145  filename = os.path.join(root, f)
146 
147  pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False)
148  exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True)
149 
150  if pattern_check and not exclude_check:
151  sources.append(filename)
152  else:
153  for pattern in file_patterns:
154  sources.extend(glob.glob("/".join([node, pattern])))
155 
156  sources = map( lambda path: env.File(path), sources )
157  return sources
158 
159 
160 def DoxySourceScanCheck(node, env):
161  """ Check if we should scan this file """
162  return os.path.isfile(node.path)
163 
164 
165 def DoxyEmitter(source, target, env):
166  """ Doxygen Doxyfile emitter """
167  # possible output formats and their default values and output locations
168  output_formats = {
169  "HTML": ("YES", "html"),
170  "LATEX": ("YES", "latex"),
171  "RTF": ("NO", "rtf"),
172  "MAN": ("NO", "man"),
173  "XML": ("NO", "xml"),
174  }
175 
176  data = DoxyfileParse(source[0].get_contents())
177 
178  targets = []
179  out_dir = data.get("OUTPUT_DIRECTORY", ".")
180 
181  # add our output locations
182  for (k, v) in output_formats.items():
183  if data.get("GENERATE_" + k, v[0]) == "YES":
184  targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) )
185 
186  # don't clobber targets
187  for node in targets:
188  env.Precious(node)
189 
190  # set up cleaning stuff
191  for node in targets:
192  env.Clean(node, node)
193 
194  return (targets, source)
195 
196 
197 def generate(env):
198  """ Add builders and construction variables for the
199  Doxygen tool. This is currently for Doxygen 1.4.6.
200  """
201  doxyfile_scanner = env.Scanner(
202  DoxySourceScan,
203  "DoxySourceScan",
204  scan_check = DoxySourceScanCheck,
205  )
206 
207  import SCons.Builder
208  doxyfile_builder = SCons.Builder.Builder(
209  action = "cd ${SOURCE.dir} && (${DOXYGEN} ${SOURCE.file} 2>&1 |tee ,doxylog)",
210  emitter = DoxyEmitter,
211  target_factory = env.fs.Entry,
212  single_source = True,
213  source_scanner = doxyfile_scanner,
214  )
215 
216  env.Append(BUILDERS = {
217  'Doxygen': doxyfile_builder,
218  })
219 
220  env.Replace(
221  DOXYGEN = 'doxygen'
222  )
223 
224 
225 def exists(env):
226  """ Make sure doxygen exists.
227  """
228  return env.Detect("doxygen")
229 
def DoxySourceScan(node, env, path)
def DoxyfileParse(file_contents)
string join(CON &&coll, string const &delim=", ")
enumerate a collection's contents, separated by delimiter.
def DoxySourceScanCheck(node, env)
def DoxyEmitter(source, target, env)