blob: cfc540771bbd6457f91dd4fa8afddcbad2ee7652 [file] [log] [blame]
Baptiste Lepilleur4cd8bae2007-03-15 22:11:38 +00001import os
2import os.path
3import glob
4from fnmatch import fnmatch
5import targz
6
7##def DoxyfileParse(file_contents):
8## """
9## Parse a Doxygen source file and return a dictionary of all the values.
10## Values will be strings and lists of strings.
11## """
12## data = {}
13##
14## import shlex
15## lex = shlex.shlex(instream = file_contents, posix = True)
16## lex.wordchars += "*+./-:"
17## lex.whitespace = lex.whitespace.replace("\n", "")
18## lex.escape = ""
19##
20## lineno = lex.lineno
21## last_backslash_lineno = lineno
22## token = lex.get_token()
23## key = token # the first token should be a key
24## last_token = ""
25## key_token = False
26## next_key = False
27## new_data = True
28##
29## def append_data(data, key, new_data, token):
30## if new_data or len(data[key]) == 0:
31## data[key].append(token)
32## else:
33## data[key][-1] += token
34##
35## while token:
36## if token in ['\n']:
37## if last_token not in ['\\']:
38## key_token = True
39## elif token in ['\\']:
40## pass
41## elif key_token:
42## key = token
43## key_token = False
44## else:
45## if token == "+=":
46## if not data.has_key(key):
47## data[key] = list()
48## elif token == "=":
49## data[key] = list()
50## else:
51## append_data( data, key, new_data, token )
52## new_data = True
53##
54## last_token = token
55## token = lex.get_token()
56##
57## if last_token == '\\' and token != '\n':
58## new_data = False
59## append_data( data, key, new_data, '\\' )
60##
61## # compress lists of len 1 into single strings
62## for (k, v) in data.items():
63## if len(v) == 0:
64## data.pop(k)
65##
66## # items in the following list will be kept as lists and not converted to strings
67## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]:
68## continue
69##
70## if len(v) == 1:
71## data[k] = v[0]
72##
73## return data
74##
75##def DoxySourceScan(node, env, path):
76## """
77## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add
78## any files used to generate docs to the list of source files.
79## """
80## default_file_patterns = [
81## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx',
82## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++',
83## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm',
84## '*.py',
85## ]
86##
87## default_exclude_patterns = [
88## '*~',
89## ]
90##
91## sources = []
92##
93## data = DoxyfileParse(node.get_contents())
94##
95## if data.get("RECURSIVE", "NO") == "YES":
96## recursive = True
97## else:
98## recursive = False
99##
100## file_patterns = data.get("FILE_PATTERNS", default_file_patterns)
101## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns)
102##
103## for node in data.get("INPUT", []):
104## if os.path.isfile(node):
105## sources.add(node)
106## elif os.path.isdir(node):
107## if recursive:
108## for root, dirs, files in os.walk(node):
109## for f in files:
110## filename = os.path.join(root, f)
111##
112## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False)
113## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True)
114##
115## if pattern_check and not exclude_check:
116## sources.append(filename)
117## else:
118## for pattern in file_patterns:
119## sources.extend(glob.glob("/".join([node, pattern])))
120## sources = map( lambda path: env.File(path), sources )
121## return sources
122##
123##
124##def DoxySourceScanCheck(node, env):
125## """Check if we should scan this file"""
126## return os.path.isfile(node.path)
127
128def srcDistEmitter(source, target, env):
129## """Doxygen Doxyfile emitter"""
130## # possible output formats and their default values and output locations
131## output_formats = {
132## "HTML": ("YES", "html"),
133## "LATEX": ("YES", "latex"),
134## "RTF": ("NO", "rtf"),
135## "MAN": ("YES", "man"),
136## "XML": ("NO", "xml"),
137## }
138##
139## data = DoxyfileParse(source[0].get_contents())
140##
141## targets = []
142## out_dir = data.get("OUTPUT_DIRECTORY", ".")
143##
144## # add our output locations
145## for (k, v) in output_formats.items():
146## if data.get("GENERATE_" + k, v[0]) == "YES":
147## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) )
148##
149## # don't clobber targets
150## for node in targets:
151## env.Precious(node)
152##
153## # set up cleaning stuff
154## for node in targets:
155## env.Clean(node, node)
156##
157## return (targets, source)
158 return (target,source)
159
160def generate(env):
161 """
162 Add builders and construction variables for the
163 SrcDist tool.
164 """
165## doxyfile_scanner = env.Scanner(
166## DoxySourceScan,
167## "DoxySourceScan",
168## scan_check = DoxySourceScanCheck,
169## )
170
171 srcdist_builder = targz.makeBuilder( srcDistEmitter )
172
173 env['BUILDERS']['SrcDist'] = srcdist_builder
174
175def exists(env):
176 """
177 Make sure srcdist exists.
178 """
179 return True