Lumiera  0.pre.03
»edit your freedom«
BuilderDoxygen.py
1 # coding: utf-8
2 
5 
6 #
7 # Astxx, the Asterisk C++ API and Utility Library.
8 # Copyright (C)
9 # 2005, 2006, Matthew A. Nicholson
10 # 2006, Tim Blechmann
11 #
12 # This library is free software; you can redistribute it and/or
13 # modify it under the terms of the GNU Lesser General Public
14 # License version 2.1 as published by the Free Software Foundation.
15 
16 
17 # history: 8/2008 adapted for Lumiera build system
18 # added patch for Doxyfile in subdirectory
19 # see http://www.scons.org/wiki/DoxygenBuilder
20 
21 
22 import os
23 import os.path
24 import glob
25 from fnmatch import fnmatch
26 
27 
28 def DoxyfileParse(file_contents):
29  """ Parse a Doxygen source file and return a dictionary of all the values.
30  Values will be strings and lists of strings.
31  """
32  data = {}
33 
34  import shlex
35  lex = shlex.shlex(instream = file_contents, posix = True)
36  lex.wordchars += "*+./-:"
37  lex.whitespace = lex.whitespace.replace("\n", "")
38  lex.escape = ""
39 
40  lineno = lex.lineno
41  token = lex.get_token()
42  key = token # the first token should be a key
43  last_token = ""
44  key_token = False
45  next_key = False
46  new_data = True
47 
48  def append_data(data, key, new_data, token):
49  if new_data or len(data[key]) == 0:
50  data[key].append(token)
51  else:
52  data[key][-1] += token
53 
54  while token:
55  if token in ['\n']:
56  if last_token not in ['\\']:
57  key_token = True
58  elif token in ['\\']:
59  pass
60  elif key_token:
61  key = token
62  key_token = False
63  else:
64  if token == "+=":
65  if not data.has_key(key):
66  data[key] = list()
67  elif token == "=":
68  data[key] = list()
69  else:
70  append_data(data, key, new_data, token)
71  new_data = True
72 
73  last_token = token
74  token = lex.get_token()
75 
76  if last_token == '\\' and token != '\n':
77  new_data = False
78  append_data(data, key, new_data, '\\')
79 
80  # compress lists of len 1 into single strings
81  for (k, v) in data.items():
82  if len(v) == 0:
83  data.pop(k)
84 
85  # items in the following list will be kept as lists and not converted to strings
86  if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]:
87  continue
88 
89  if len(v) == 1:
90  data[k] = v[0]
91 
92  return data
93 
94 
95 def DoxySourceScan(node, env, path):
96  """ Doxygen Doxyfile source scanner.
97  This should scan the Doxygen file and add any files
98  used to generate docs to the list of source files.
99  """
100  default_file_patterns = [
101  '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx',
102  '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++',
103  '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm',
104  '*.py',
105  ]
106 
107  default_exclude_patterns = [
108  '*~',
109  ]
110 
111  sources = []
112 
113  data = DoxyfileParse(node.get_contents())
114 
115  if data.get("RECURSIVE", "NO") == "YES":
116  recursive = True
117  else:
118  recursive = False
119 
120  file_patterns = data.get("FILE_PATTERNS", default_file_patterns)
121  exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns)
122 
123  #
124  # We're running in the top-level directory, but the doxygen configuration file
125  # is in the same directory as node; this means that relative pathnames in node
126  # must be adjusted before they can go onto the sources list
127  conf_dir = os.path.dirname(str(node))
128 
129  for node in data.get("INPUT", []):
130  if not os.path.isabs(node):
131  node = os.path.join(conf_dir, node)
132  if os.path.isfile(node):
133  sources.append(node)
134  elif os.path.isdir(node):
135  if recursive:
136  for root, dirs, files in os.walk(node):
137  for f in files:
138  filename = os.path.join(root, f)
139 
140  pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False)
141  exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True)
142 
143  if pattern_check and not exclude_check:
144  sources.append(filename)
145  else:
146  for pattern in file_patterns:
147  sources.extend(glob.glob("/".join([node, pattern])))
148 
149  sources = map( lambda path: env.File(path), sources )
150  return sources
151 
152 
153 def DoxySourceScanCheck(node, env):
154  """ Check if we should scan this file """
155  return os.path.isfile(node.path)
156 
157 
158 def DoxyEmitter(source, target, env):
159  """ Doxygen Doxyfile emitter """
160  # possible output formats and their default values and output locations
161  output_formats = {
162  "HTML": ("YES", "html"),
163  "LATEX": ("YES", "latex"),
164  "RTF": ("NO", "rtf"),
165  "MAN": ("NO", "man"),
166  "XML": ("NO", "xml"),
167  }
168 
169  data = DoxyfileParse(source[0].get_contents())
170 
171  targets = []
172  out_dir = data.get("OUTPUT_DIRECTORY", ".")
173 
174  # add our output locations
175  for (k, v) in output_formats.items():
176  if data.get("GENERATE_" + k, v[0]) == "YES":
177  targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) )
178 
179  # don't clobber targets
180  for node in targets:
181  env.Precious(node)
182 
183  # set up cleaning stuff
184  for node in targets:
185  env.Clean(node, node)
186 
187  return (targets, source)
188 
189 
190 def generate(env):
191  """ Add builders and construction variables for the
192  Doxygen tool. This is currently for Doxygen 1.4.6.
193  """
194  doxyfile_scanner = env.Scanner(
195  DoxySourceScan,
196  "DoxySourceScan",
197  scan_check = DoxySourceScanCheck,
198  )
199 
200  import SCons.Builder
201  doxyfile_builder = SCons.Builder.Builder(
202  action = "cd ${SOURCE.dir} && (${DOXYGEN} ${SOURCE.file} 2>&1 |tee ,doxylog)",
203  emitter = DoxyEmitter,
204  target_factory = env.fs.Entry,
205  single_source = True,
206  source_scanner = doxyfile_scanner,
207  )
208 
209  env.Append(BUILDERS = {
210  'Doxygen': doxyfile_builder,
211  })
212 
213  env.Replace(
214  DOXYGEN = 'doxygen'
215  )
216 
217 
218 def exists(env):
219  """ Make sure doxygen exists.
220  """
221  return env.Detect("doxygen")
222 
int reduce(Gtk::Button &icon)
attempt to reduce space consumption