Added option to maintain lazy RU-decomposition during transpositions. Plus minor fixes.
# vim: set et sw=3 tw=0 fo=awqorc ft=python:
#
# Astxx, the Asterisk C++ API and Utility Library.
# Copyright (C) 2005, 2006 Matthew A. Nicholson
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License version 2.1 as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
import glob
def DoxyfileParse(file_contents):
"""
Parse a Doxygen source file and return a dictionary of all the values.
Values will be strings and lists of strings.
"""
data = {}
import shlex
lex = shlex.shlex(instream = file_contents, posix = True)
lex.wordchars += "./-"
lex.whitespace = lex.whitespace.replace("\n", "")
lex.escape = ""
lineno = lex.lineno
token = lex.get_token()
key = token # the first token should be a key
last_token = ""
key_token = False
next_key = False
while token:
if token in ['\n']:
if last_token not in ['\\']:
key_token = True
elif token in ['\\']:
pass
elif key_token:
key = token
key_token = False
else:
if token == "+=":
if not data.has_key(key):
data[key] = list()
elif token == "=":
data[key] = list()
else:
data[key].append(token)
last_token = token
token = lex.get_token()
# compress lists of len 1 into single strings
for (k, v) in data.items():
if len(v) == 0:
data.pop(k)
if k in ["INPUT", "FILE_PATTERNS"]:
continue
if len(v) == 1:
data[k] = v[0]
return data
def DoxySourceScan(node, env, path):
"""
Doxygen Doxyfile source scanner. This should scan the Doxygen file and add
any files used to generate docs to the list of source files.
"""
default_file_patterns = [
'*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx',
'*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++',
'*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm',
'*.py',
]
sources = []
data = DoxyfileParse(node.get_contents())
if data.get("RECURSIVE", "NO") == "YES":
recursive = True
else:
recursive = False
file_patterns = data.get("FILE_PATTERNS", default_file_patterns)
def recursively_scan_dir(dir, file_patterns):
matches = []
if not os.path.isdir(dir):
return matches
for pattern in file_patterns:
matches.extend(glob.glob("/".join([dir, pattern])))
for node in os.listdir(dir):
path = "/".join([dir, node])
if os.path.isdir(path):
matches.extend(recursively_scan_dir(path, file_patterns))
return matches
for node in data.get("INPUT", []):
if os.path.isfile(node):
sources.add(node)
elif os.path.isdir(node):
if recursive:
sources.extend(recursively_scan_dir(node, file_patterns))
else:
for pattern in file_patterns:
sources.extend(glob.glob("/".join([node, pattern])))
return sources
def DoxySourceScanCheck(node, env):
"""Check if we should scan this file"""
return os.path.isfile(node.path)
def DoxyEmitter(source, target, env):
"""Doxygen Doxyfile emitter"""
# possible output formats and their default values and output locations
output_formats = {
"HTML": ("YES", "html"),
"LATEX": ("NO", "latex"),
"RTF": ("NO", "rtf"),
"MAN": ("NO", "man"),
"XML": ("NO", "xml"),
}
data = DoxyfileParse(source[0].get_contents())
targets = []
out_dir = data.get("OUTPUT_DIRECTORY", ".")
# add our output locations
for (k, v) in output_formats.items():
if data.get("GENERATE_" + k, v[0]) == "YES":
targets.append("/".join([out_dir, data.get(k + "_OUTPUT", v[1])]))
# don't clobber targets
for node in targets:
env.Precious(node)
# set up cleaning stuff
for node in targets:
env.Clean(node, node)
return (targets, source)
def generate(env):
"""
Add builders and construction variables for the
Doxygen tool. This is currently for Doxygen 1.4.6.
"""
doxygen_scanner = env.Scanner(
DoxySourceScan,
"DoxySourceScan",
scan_check = DoxySourceScanCheck,
)
doxygen_builder = env.Builder(
action = env.Action("${DOXYGEN} ${SOURCE}"),
emitter = DoxyEmitter,
target_factory = env.fs.Entry,
single_source = True,
source_scanner = doxygen_scanner,
)
env.Append(BUILDERS = {
'Doxygen': doxygen_builder,
})
env.AppendUnique(
DOXYGEN = 'doxygen',
)
def exists(env):
"""
Make sure doxygen exists.
"""
return env.Detect("doxygen")