diff options
Diffstat (limited to 'lib/jsoncpp/scons-tools')
-rw-r--r-- | lib/jsoncpp/scons-tools/doxygen.py | 116 | ||||
-rw-r--r-- | lib/jsoncpp/scons-tools/globtool.py | 53 | ||||
-rw-r--r-- | lib/jsoncpp/scons-tools/srcdist.py | 179 | ||||
-rw-r--r-- | lib/jsoncpp/scons-tools/substinfile.py | 79 | ||||
-rw-r--r-- | lib/jsoncpp/scons-tools/targz.py | 82 |
5 files changed, 509 insertions, 0 deletions
diff --git a/lib/jsoncpp/scons-tools/doxygen.py b/lib/jsoncpp/scons-tools/doxygen.py new file mode 100644 index 0000000000..5ace420a4c --- /dev/null +++ b/lib/jsoncpp/scons-tools/doxygen.py @@ -0,0 +1,116 @@ +# Big issue: +# emitter depends on doxyfile which is generated from doxyfile.in. +# build fails after cleaning and relaunching the build. + +# Todo: +# Add helper function to environment like for glob +# Easier passage of header/footer +# Automatic deduction of index.html path based on custom parameters passed to doxyfile + +import os +import os.path +from fnmatch import fnmatch +import SCons + +def Doxyfile_emitter(target, source, env): + """ + Modify the target and source lists to use the defaults if nothing + else has been specified. + + Dependencies on external HTML documentation references are also + appended to the source list. + """ + doxyfile_template = env.File(env['DOXYFILE_FILE']) + source.insert(0, doxyfile_template) + + return target, source + +def Doxyfile_Builder(target, source, env): + """Input: + DOXYFILE_FILE + Path of the template file for the output doxyfile + + DOXYFILE_DICT + A dictionnary of parameter to append to the generated doxyfile + """ + subdir = os.path.split(source[0].abspath)[0] + doc_top_dir = os.path.split(target[0].abspath)[0] + doxyfile_path = source[0].abspath + doxy_file = file( target[0].abspath, 'wt' ) + try: + # First, output the template file + try: + f = file(doxyfile_path, 'rt') + doxy_file.write( f.read() ) + f.close() + doxy_file.write( '\n' ) + doxy_file.write( '# Generated content:\n' ) + except: + raise SCons.Errors.UserError, "Can't read doxygen template file '%s'" % doxyfile_path + # Then, the input files + doxy_file.write( 'INPUT = \\\n' ) + for source in source: + if source.abspath != doxyfile_path: # skip doxyfile path, which is the first source + doxy_file.write( '"%s" \\\n' % source.abspath ) + doxy_file.write( '\n' ) + # Dot... + values_dict = { 'HAVE_DOT': env.get('DOT') and 'YES' or 'NO', + 'DOT_PATH': env.get('DOT') and os.path.split(env['DOT'])[0] or '', + 'OUTPUT_DIRECTORY': doc_top_dir, + 'WARN_LOGFILE': target[0].abspath + '-warning.log'} + values_dict.update( env['DOXYFILE_DICT'] ) + # Finally, output user dictionary values which override any of the previously set parameters. + for key, value in values_dict.iteritems(): + doxy_file.write ('%s = "%s"\n' % (key, str(value))) + finally: + doxy_file.close() + +def generate(env): + """ + Add builders and construction variables for the + Doxygen tool. + """ + ## Doxyfile builder + def doxyfile_message (target, source, env): + return "creating Doxygen config file '%s'" % target[0] + + doxyfile_variables = [ + 'DOXYFILE_DICT', + 'DOXYFILE_FILE' + ] + + #doxyfile_action = SCons.Action.Action( Doxyfile_Builder, doxyfile_message, + # doxyfile_variables ) + doxyfile_action = SCons.Action.Action( Doxyfile_Builder, doxyfile_message) + + doxyfile_builder = SCons.Builder.Builder( action = doxyfile_action, + emitter = Doxyfile_emitter ) + + env['BUILDERS']['Doxyfile'] = doxyfile_builder + env['DOXYFILE_DICT'] = {} + env['DOXYFILE_FILE'] = 'doxyfile.in' + + ## Doxygen builder + def Doxygen_emitter(target, source, env): + output_dir = str( source[0].dir ) + if str(target[0]) == str(source[0]): + target = env.File( os.path.join( output_dir, 'html', 'index.html' ) ) + return target, source + + doxygen_action = SCons.Action.Action( [ '$DOXYGEN_COM'] ) + doxygen_builder = SCons.Builder.Builder( action = doxygen_action, + emitter = Doxygen_emitter ) + env['BUILDERS']['Doxygen'] = doxygen_builder + env['DOXYGEN_COM'] = '$DOXYGEN $DOXYGEN_FLAGS $SOURCE' + env['DOXYGEN_FLAGS'] = '' + env['DOXYGEN'] = 'doxygen' + + dot_path = env.WhereIs("dot") + if dot_path: + env['DOT'] = dot_path + +def exists(env): + """ + Make sure doxygen exists. + """ + return env.Detect("doxygen") diff --git a/lib/jsoncpp/scons-tools/globtool.py b/lib/jsoncpp/scons-tools/globtool.py new file mode 100644 index 0000000000..811140e8aa --- /dev/null +++ b/lib/jsoncpp/scons-tools/globtool.py @@ -0,0 +1,53 @@ +import fnmatch +import os + +def generate( env ): + def Glob( env, includes = None, excludes = None, dir = '.' ): + """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') + helper function to environment. + + Glob both the file-system files. + + includes: list of file name pattern included in the return list when matched. + excludes: list of file name pattern exluced from the return list. + + Example: + sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) + """ + def filterFilename(path): + abs_path = os.path.join( dir, path ) + if not os.path.isfile(abs_path): + return 0 + fn = os.path.basename(path) + match = 0 + for include in includes: + if fnmatch.fnmatchcase( fn, include ): + match = 1 + break + if match == 1 and not excludes is None: + for exclude in excludes: + if fnmatch.fnmatchcase( fn, exclude ): + match = 0 + break + return match + if includes is None: + includes = ('*',) + elif type(includes) in ( type(''), type(u'') ): + includes = (includes,) + if type(excludes) in ( type(''), type(u'') ): + excludes = (excludes,) + dir = env.Dir(dir).abspath + paths = os.listdir( dir ) + def makeAbsFileNode( path ): + return env.File( os.path.join( dir, path ) ) + nodes = filter( filterFilename, paths ) + return map( makeAbsFileNode, nodes ) + + from SCons.Script import Environment + Environment.Glob = Glob + +def exists(env): + """ + Tool always exists. + """ + return True diff --git a/lib/jsoncpp/scons-tools/srcdist.py b/lib/jsoncpp/scons-tools/srcdist.py new file mode 100644 index 0000000000..864ff40815 --- /dev/null +++ b/lib/jsoncpp/scons-tools/srcdist.py @@ -0,0 +1,179 @@ +import os +import os.path +from fnmatch import fnmatch +import targz + +##def DoxyfileParse(file_contents): +## """ +## Parse a Doxygen source file and return a dictionary of all the values. +## Values will be strings and lists of strings. +## """ +## data = {} +## +## import shlex +## lex = shlex.shlex(instream = file_contents, posix = True) +## lex.wordchars += "*+./-:" +## lex.whitespace = lex.whitespace.replace("\n", "") +## lex.escape = "" +## +## lineno = lex.lineno +## last_backslash_lineno = lineno +## token = lex.get_token() +## key = token # the first token should be a key +## last_token = "" +## key_token = False +## next_key = False +## new_data = True +## +## def append_data(data, key, new_data, token): +## if new_data or len(data[key]) == 0: +## data[key].append(token) +## else: +## data[key][-1] += token +## +## while token: +## if token in ['\n']: +## if last_token not in ['\\']: +## key_token = True +## elif token in ['\\']: +## pass +## elif key_token: +## key = token +## key_token = False +## else: +## if token == "+=": +## if not data.has_key(key): +## data[key] = list() +## elif token == "=": +## data[key] = list() +## else: +## append_data( data, key, new_data, token ) +## new_data = True +## +## last_token = token +## token = lex.get_token() +## +## if last_token == '\\' and token != '\n': +## new_data = False +## append_data( data, key, new_data, '\\' ) +## +## # compress lists of len 1 into single strings +## for (k, v) in data.items(): +## if len(v) == 0: +## data.pop(k) +## +## # items in the following list will be kept as lists and not converted to strings +## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]: +## continue +## +## if len(v) == 1: +## data[k] = v[0] +## +## return data +## +##def DoxySourceScan(node, env, path): +## """ +## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add +## any files used to generate docs to the list of source files. +## """ +## default_file_patterns = [ +## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx', +## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++', +## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm', +## '*.py', +## ] +## +## default_exclude_patterns = [ +## '*~', +## ] +## +## sources = [] +## +## data = DoxyfileParse(node.get_contents()) +## +## if data.get("RECURSIVE", "NO") == "YES": +## recursive = True +## else: +## recursive = False +## +## file_patterns = data.get("FILE_PATTERNS", default_file_patterns) +## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns) +## +## for node in data.get("INPUT", []): +## if os.path.isfile(node): +## sources.add(node) +## elif os.path.isdir(node): +## if recursive: +## for root, dirs, files in os.walk(node): +## for f in files: +## filename = os.path.join(root, f) +## +## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False) +## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True) +## +## if pattern_check and not exclude_check: +## sources.append(filename) +## else: +## for pattern in file_patterns: +## sources.extend(glob.glob("/".join([node, pattern]))) +## sources = map( lambda path: env.File(path), sources ) +## return sources +## +## +##def DoxySourceScanCheck(node, env): +## """Check if we should scan this file""" +## return os.path.isfile(node.path) + +def srcDistEmitter(source, target, env): +## """Doxygen Doxyfile emitter""" +## # possible output formats and their default values and output locations +## output_formats = { +## "HTML": ("YES", "html"), +## "LATEX": ("YES", "latex"), +## "RTF": ("NO", "rtf"), +## "MAN": ("YES", "man"), +## "XML": ("NO", "xml"), +## } +## +## data = DoxyfileParse(source[0].get_contents()) +## +## targets = [] +## out_dir = data.get("OUTPUT_DIRECTORY", ".") +## +## # add our output locations +## for (k, v) in output_formats.items(): +## if data.get("GENERATE_" + k, v[0]) == "YES": +## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) +## +## # don't clobber targets +## for node in targets: +## env.Precious(node) +## +## # set up cleaning stuff +## for node in targets: +## env.Clean(node, node) +## +## return (targets, source) + return (target,source) + +def generate(env): + """ + Add builders and construction variables for the + SrcDist tool. + """ +## doxyfile_scanner = env.Scanner( +## DoxySourceScan, +## "DoxySourceScan", +## scan_check = DoxySourceScanCheck, +## ) + + if targz.exists(env): + srcdist_builder = targz.makeBuilder( srcDistEmitter ) + + env['BUILDERS']['SrcDist'] = srcdist_builder + +def exists(env): + """ + Make sure srcdist exists. + """ + return targz.exists(env) diff --git a/lib/jsoncpp/scons-tools/substinfile.py b/lib/jsoncpp/scons-tools/substinfile.py new file mode 100644 index 0000000000..4d305851bb --- /dev/null +++ b/lib/jsoncpp/scons-tools/substinfile.py @@ -0,0 +1,79 @@ +import re +from SCons.Script import * # the usual scons stuff you get in a SConscript + +def generate(env): + """ + Add builders and construction variables for the + SubstInFile tool. + + Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT + from the source to the target. + The values of SUBST_DICT first have any construction variables expanded + (its keys are not expanded). + If a value of SUBST_DICT is a python callable function, it is called and + the result is expanded as the value. + If there's more than one source and more than one target, each target gets + substituted from the corresponding source. + """ + def do_subst_in_file(targetfile, sourcefile, dict): + """Replace all instances of the keys of dict with their values. + For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, + then all instances of %VERSION% in the file will be replaced with 1.2345 etc. + """ + try: + f = open(sourcefile, 'rb') + contents = f.read() + f.close() + except: + raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile + for (k,v) in dict.items(): + contents = re.sub(k, v, contents) + try: + f = open(targetfile, 'wb') + f.write(contents) + f.close() + except: + raise SCons.Errors.UserError, "Can't write target file %s"%targetfile + return 0 # success + + def subst_in_file(target, source, env): + if not env.has_key('SUBST_DICT'): + raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." + d = dict(env['SUBST_DICT']) # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()).replace('\\','\\\\') + elif SCons.Util.is_String(v): + d[k] = env.subst(v).replace('\\','\\\\') + else: + raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) + for (t,s) in zip(target, source): + return do_subst_in_file(str(t), str(s), d) + + def subst_in_file_string(target, source, env): + """This is what gets printed on the console.""" + return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t)) + for (t,s) in zip(target, source)]) + + def subst_emitter(target, source, env): + """Add dependency from substituted SUBST_DICT to target. + Returns original target, source tuple unchanged. + """ + d = env['SUBST_DICT'].copy() # copy it + for (k,v) in d.items(): + if callable(v): + d[k] = env.subst(v()) + elif SCons.Util.is_String(v): + d[k]=env.subst(v) + Depends(target, SCons.Node.Python.Value(d)) + return target, source + +## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? + subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) + env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) + +def exists(env): + """ + Make sure tool exists. + """ + return True diff --git a/lib/jsoncpp/scons-tools/targz.py b/lib/jsoncpp/scons-tools/targz.py new file mode 100644 index 0000000000..f5432003df --- /dev/null +++ b/lib/jsoncpp/scons-tools/targz.py @@ -0,0 +1,82 @@ +"""tarball + +Tool-specific initialization for tarball. + +""" + +## Commands to tackle a command based implementation: +##to unpack on the fly... +##gunzip < FILE.tar.gz | tar xvf - +##to pack on the fly... +##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz + +import os.path + +import SCons.Builder +import SCons.Node.FS +import SCons.Util + +try: + import gzip + import tarfile + internal_targz = 1 +except ImportError: + internal_targz = 0 + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +if internal_targz: + def targz(target, source, env): + def archive_name( path ): + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + return archive_name + + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + tar.add(path, archive_name(path) ) + compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) + base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) + target_path = str(target[0]) + fileobj = gzip.GzipFile( target_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) + for source in source: + source_path = str(source) + if source.isdir(): + os.path.walk(source_path, visit, tar) + else: + tar.add(source_path, archive_name(source_path) ) # filename, arcname + tar.close() + + targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) + + def makeBuilder( emitter = None ): + return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), + source_factory = SCons.Node.FS.Entry, + source_scanner = SCons.Defaults.DirScanner, + suffix = '$TARGZ_SUFFIX', + multi = 1) + TarGzBuilder = makeBuilder() + + def generate(env): + """Add Builders and construction variables for zip to an Environment. + The following environnement variables may be set: + TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level). + TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative + to something other than top-dir). + """ + env['BUILDERS']['TarGz'] = TarGzBuilder + env['TARGZ_COM'] = targzAction + env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9 + env['TARGZ_SUFFIX'] = '.tar.gz' + env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory. +else: + def generate(env): + pass + + +def exists(env): + return internal_targz |