Drop scons support
diff --git a/.gitignore b/.gitignore
index c869151..dc4d290 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,7 +6,6 @@
*.process-output
*.rewrite
/bin/
-/buildscons/
/libs/
/doc/doxyfile
/dist/
diff --git a/SConstruct b/SConstruct
deleted file mode 100644
index f3a73f7..0000000
--- a/SConstruct
+++ /dev/null
@@ -1,248 +0,0 @@
-"""
-Notes:
-- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time.
-
-To add a platform:
-- add its name in options allowed_values below
-- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example.
-"""
-
-import os
-import os.path
-import sys
-
-JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip()
-DIST_DIR = '#dist'
-
-options = Variables()
-options.Add( EnumVariable('platform',
- 'Platform (compiler/stl) used to build the project',
- 'msvc71',
- allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(),
- ignorecase=2) )
-
-try:
- platform = ARGUMENTS['platform']
- if platform == 'linux-gcc':
- CXX = 'g++' # not quite right, but env is not yet available.
- import commands
- version = commands.getoutput('%s -dumpversion' %CXX)
- platform = 'linux-gcc-%s' %version
- print "Using platform '%s'" %platform
- LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '')
- LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform)
- os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH
- print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH
-except KeyError:
- print 'You must specify a "platform"'
- sys.exit(2)
-
-print "Building using PLATFORM =", platform
-
-rootbuild_dir = Dir('#buildscons')
-build_dir = os.path.join( '#buildscons', platform )
-bin_dir = os.path.join( '#bin', platform )
-lib_dir = os.path.join( '#libs', platform )
-sconsign_dir_path = Dir(build_dir).abspath
-sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' )
-
-# Ensure build directory exist (SConsignFile fail otherwise!)
-if not os.path.exists( sconsign_dir_path ):
- os.makedirs( sconsign_dir_path )
-
-# Store all dependencies signature in a database
-SConsignFile( sconsign_path )
-
-def make_environ_vars():
- """Returns a dictionnary with environment variable to use when compiling."""
- # PATH is required to find the compiler
- # TEMP is required for at least mingw
- # LD_LIBRARY_PATH & co is required on some system for the compiler
- vars = {}
- for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'):
- if name in os.environ:
- vars[name] = os.environ[name]
- return vars
-
-
-env = Environment( ENV = make_environ_vars(),
- toolpath = ['scons-tools'],
- tools=[] ) #, tools=['default'] )
-
-if platform == 'suncc':
- env.Tool( 'sunc++' )
- env.Tool( 'sunlink' )
- env.Tool( 'sunar' )
- env.Append( CCFLAGS = ['-mt'] )
-elif platform == 'vacpp':
- env.Tool( 'default' )
- env.Tool( 'aixcc' )
- env['CXX'] = 'xlC_r' #scons does not pick-up the correct one !
- # using xlC_r ensure multi-threading is enabled:
- # http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm
- env.Append( CCFLAGS = '-qrtti=all',
- LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning
-elif platform == 'msvc6':
- env['MSVS_VERSION']='6.0'
- for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
- env.Tool( tool )
- env['CXXFLAGS']='-GR -GX /nologo /MT'
-elif platform == 'msvc70':
- env['MSVS_VERSION']='7.0'
- for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
- env.Tool( tool )
- env['CXXFLAGS']='-GR -GX /nologo /MT'
-elif platform == 'msvc71':
- env['MSVS_VERSION']='7.1'
- for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
- env.Tool( tool )
- env['CXXFLAGS']='-GR -GX /nologo /MT'
-elif platform == 'msvc80':
- env['MSVS_VERSION']='8.0'
- for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
- env.Tool( tool )
- env['CXXFLAGS']='-GR -EHsc /nologo /MT'
-elif platform == 'msvc90':
- env['MSVS_VERSION']='9.0'
- # Scons 1.2 fails to detect the correct location of the platform SDK.
- # So we propagate those from the environment. This requires that the
- # user run vcvars32.bat before compiling.
- if 'INCLUDE' in os.environ:
- env['ENV']['INCLUDE'] = os.environ['INCLUDE']
- if 'LIB' in os.environ:
- env['ENV']['LIB'] = os.environ['LIB']
- for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
- env.Tool( tool )
- env['CXXFLAGS']='-GR -EHsc /nologo /MT'
-elif platform == 'mingw':
- env.Tool( 'mingw' )
- env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] )
-elif platform.startswith('linux-gcc'):
- env.Tool( 'default' )
- env.Append( LIBS = ['pthread'], CCFLAGS = os.environ.get("CXXFLAGS", "-Wall"), LINKFLAGS=os.environ.get("LDFLAGS", "") )
- env['SHARED_LIB_ENABLED'] = True
-else:
- print "UNSUPPORTED PLATFORM."
- env.Exit(1)
-
-env.Tool('targz')
-env.Tool('srcdist')
-env.Tool('globtool')
-
-env.Append( CPPPATH = ['#include'],
- LIBPATH = lib_dir )
-short_platform = platform
-if short_platform.startswith('msvc'):
- short_platform = short_platform[2:]
-# Notes: on Windows you need to rebuild the source for each variant
-# Build script does not support that yet so we only build static libraries.
-# This also fails on AIX because both dynamic and static library ends with
-# extension .a.
-env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False)
-env['LIB_PLATFORM'] = short_platform
-env['LIB_LINK_TYPE'] = 'lib' # static
-env['LIB_CRUNTIME'] = 'mt'
-env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention
-env['JSONCPP_VERSION'] = JSONCPP_VERSION
-env['BUILD_DIR'] = env.Dir(build_dir)
-env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir)
-env['DIST_DIR'] = DIST_DIR
-if 'TarGz' in env['BUILDERS']:
- class SrcDistAdder:
- def __init__( self, env ):
- self.env = env
- def __call__( self, *args, **kw ):
- apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw )
- env['SRCDIST_BUILDER'] = env.TarGz
-else: # If tarfile module is missing
- class SrcDistAdder:
- def __init__( self, env ):
- pass
- def __call__( self, *args, **kw ):
- pass
-env['SRCDIST_ADD'] = SrcDistAdder( env )
-env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] )
-
-env_testing = env.Clone( )
-env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] )
-
-def buildJSONExample( env, target_sources, target_name ):
- env = env.Clone()
- env.Append( CPPPATH = ['#'] )
- exe = env.Program( target=target_name,
- source=target_sources )
- env['SRCDIST_ADD']( source=[target_sources] )
- global bin_dir
- return env.Install( bin_dir, exe )
-
-def buildJSONTests( env, target_sources, target_name ):
- jsontests_node = buildJSONExample( env, target_sources, target_name )
- check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) )
- env.AlwaysBuild( check_alias_target )
-
-def buildUnitTests( env, target_sources, target_name ):
- jsontests_node = buildJSONExample( env, target_sources, target_name )
- check_alias_target = env.Alias( 'check', jsontests_node,
- RunUnitTests( jsontests_node, jsontests_node ) )
- env.AlwaysBuild( check_alias_target )
-
-def buildLibrary( env, target_sources, target_name ):
- static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}',
- source=target_sources )
- global lib_dir
- env.Install( lib_dir, static_lib )
- if env['SHARED_LIB_ENABLED']:
- shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}',
- source=target_sources )
- env.Install( lib_dir, shared_lib )
- env['SRCDIST_ADD']( source=[target_sources] )
-
-Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' )
-
-def buildProjectInDirectory( target_directory ):
- global build_dir
- target_build_dir = os.path.join( build_dir, target_directory )
- target = os.path.join( target_directory, 'sconscript' )
- SConscript( target, build_dir=target_build_dir, duplicate=0 )
- env['SRCDIST_ADD']( source=[target] )
-
-
-def runJSONTests_action( target, source = None, env = None ):
- # Add test scripts to python path
- jsontest_path = Dir( '#test' ).abspath
- sys.path.insert( 0, jsontest_path )
- data_path = os.path.join( jsontest_path, 'data' )
- import runjsontests
- return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path )
-
-def runJSONTests_string( target, source = None, env = None ):
- return 'RunJSONTests("%s")' % source[0]
-
-import SCons.Action
-ActionFactory = SCons.Action.ActionFactory
-RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string )
-
-def runUnitTests_action( target, source = None, env = None ):
- # Add test scripts to python path
- jsontest_path = Dir( '#test' ).abspath
- sys.path.insert( 0, jsontest_path )
- import rununittests
- return rununittests.runAllTests( os.path.abspath(source[0].path) )
-
-def runUnitTests_string( target, source = None, env = None ):
- return 'RunUnitTests("%s")' % source[0]
-
-RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string )
-
-env.Alias( 'check' )
-
-srcdist_cmd = env['SRCDIST_ADD']( source = """
- AUTHORS README.md SConstruct
- """.split() )
-env.Alias( 'src-dist', srcdist_cmd )
-
-buildProjectInDirectory( 'src/jsontestrunner' )
-buildProjectInDirectory( 'src/lib_json' )
-buildProjectInDirectory( 'src/test_lib_json' )
-#print env.Dump()
-
diff --git a/scons-tools/globtool.py b/scons-tools/globtool.py
deleted file mode 100644
index e51af0f..0000000
--- a/scons-tools/globtool.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2009 Baptiste Lepilleur and The JsonCpp Authors
-# Distributed under MIT license, or public domain if desired and
-# recognized in your jurisdiction.
-# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
-
-import fnmatch
-import os
-
-def generate(env):
- def Glob(env, includes = None, excludes = None, dir = '.'):
- """Adds Glob(includes = Split('*'), excludes = None, dir = '.')
- helper function to environment.
-
- Glob both the file-system files.
-
- includes: list of file name pattern included in the return list when matched.
- excludes: list of file name pattern exluced from the return list.
-
- Example:
- sources = env.Glob(("*.cpp", '*.h'), "~*.cpp", "#src")
- """
- def filterFilename(path):
- abs_path = os.path.join(dir, path)
- if not os.path.isfile(abs_path):
- return 0
- fn = os.path.basename(path)
- match = 0
- for include in includes:
- if fnmatch.fnmatchcase(fn, include):
- match = 1
- break
- if match == 1 and not excludes is None:
- for exclude in excludes:
- if fnmatch.fnmatchcase(fn, exclude):
- match = 0
- break
- return match
- if includes is None:
- includes = ('*',)
- elif type(includes) in (type(''), type(u'')):
- includes = (includes,)
- if type(excludes) in (type(''), type(u'')):
- excludes = (excludes,)
- dir = env.Dir(dir).abspath
- paths = os.listdir(dir)
- def makeAbsFileNode(path):
- return env.File(os.path.join(dir, path))
- nodes = filter(filterFilename, paths)
- return map(makeAbsFileNode, nodes)
-
- from SCons.Script import Environment
- Environment.Glob = Glob
-
-def exists(env):
- """
- Tool always exists.
- """
- return True
diff --git a/scons-tools/srcdist.py b/scons-tools/srcdist.py
deleted file mode 100644
index afbd2fa..0000000
--- a/scons-tools/srcdist.py
+++ /dev/null
@@ -1,183 +0,0 @@
-# Copyright 2007 Baptiste Lepilleur and The JsonCpp Authors
-# Distributed under MIT license, or public domain if desired and
-# recognized in your jurisdiction.
-# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
-
-import os
-import os.path
-from fnmatch import fnmatch
-import targz
-
-##def DoxyfileParse(file_contents):
-## """
-## Parse a Doxygen source file and return a dictionary of all the values.
-## Values will be strings and lists of strings.
-## """
-## data = {}
-##
-## import shlex
-## lex = shlex.shlex(instream = file_contents, posix = True)
-## lex.wordchars += "*+./-:"
-## lex.whitespace = lex.whitespace.replace("\n", "")
-## lex.escape = ""
-##
-## lineno = lex.lineno
-## last_backslash_lineno = lineno
-## token = lex.get_token()
-## key = token # the first token should be a key
-## last_token = ""
-## key_token = False
-## next_key = False
-## new_data = True
-##
-## def append_data(data, key, new_data, token):
-## if new_data or len(data[key]) == 0:
-## data[key].append(token)
-## else:
-## data[key][-1] += token
-##
-## while token:
-## if token in ['\n']:
-## if last_token not in ['\\']:
-## key_token = True
-## elif token in ['\\']:
-## pass
-## elif key_token:
-## key = token
-## key_token = False
-## else:
-## if token == "+=":
-## if not data.has_key(key):
-## data[key] = list()
-## elif token == "=":
-## data[key] = list()
-## else:
-## append_data(data, key, new_data, token)
-## new_data = True
-##
-## last_token = token
-## token = lex.get_token()
-##
-## if last_token == '\\' and token != '\n':
-## new_data = False
-## append_data(data, key, new_data, '\\')
-##
-## # compress lists of len 1 into single strings
-## for (k, v) in data.items():
-## if len(v) == 0:
-## data.pop(k)
-##
-## # items in the following list will be kept as lists and not converted to strings
-## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]:
-## continue
-##
-## if len(v) == 1:
-## data[k] = v[0]
-##
-## return data
-##
-##def DoxySourceScan(node, env, path):
-## """
-## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add
-## any files used to generate docs to the list of source files.
-## """
-## default_file_patterns = [
-## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx',
-## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++',
-## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm',
-## '*.py',
-## ]
-##
-## default_exclude_patterns = [
-## '*~',
-## ]
-##
-## sources = []
-##
-## data = DoxyfileParse(node.get_contents())
-##
-## if data.get("RECURSIVE", "NO") == "YES":
-## recursive = True
-## else:
-## recursive = False
-##
-## file_patterns = data.get("FILE_PATTERNS", default_file_patterns)
-## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns)
-##
-## for node in data.get("INPUT", []):
-## if os.path.isfile(node):
-## sources.add(node)
-## elif os.path.isdir(node):
-## if recursive:
-## for root, dirs, files in os.walk(node):
-## for f in files:
-## filename = os.path.join(root, f)
-##
-## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False)
-## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True)
-##
-## if pattern_check and not exclude_check:
-## sources.append(filename)
-## else:
-## for pattern in file_patterns:
-## sources.extend(glob.glob("/".join([node, pattern])))
-## sources = map(lambda path: env.File(path), sources)
-## return sources
-##
-##
-##def DoxySourceScanCheck(node, env):
-## """Check if we should scan this file"""
-## return os.path.isfile(node.path)
-
-def srcDistEmitter(source, target, env):
-## """Doxygen Doxyfile emitter"""
-## # possible output formats and their default values and output locations
-## output_formats = {
-## "HTML": ("YES", "html"),
-## "LATEX": ("YES", "latex"),
-## "RTF": ("NO", "rtf"),
-## "MAN": ("YES", "man"),
-## "XML": ("NO", "xml"),
-## }
-##
-## data = DoxyfileParse(source[0].get_contents())
-##
-## targets = []
-## out_dir = data.get("OUTPUT_DIRECTORY", ".")
-##
-## # add our output locations
-## for (k, v) in output_formats.items():
-## if data.get("GENERATE_" + k, v[0]) == "YES":
-## targets.append(env.Dir(os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))))
-##
-## # don't clobber targets
-## for node in targets:
-## env.Precious(node)
-##
-## # set up cleaning stuff
-## for node in targets:
-## env.Clean(node, node)
-##
-## return (targets, source)
- return (target,source)
-
-def generate(env):
- """
- Add builders and construction variables for the
- SrcDist tool.
- """
-## doxyfile_scanner = env.Scanner(## DoxySourceScan,
-## "DoxySourceScan",
-## scan_check = DoxySourceScanCheck,
-##)
-
- if targz.exists(env):
- srcdist_builder = targz.makeBuilder(srcDistEmitter)
-
- env['BUILDERS']['SrcDist'] = srcdist_builder
-
-def exists(env):
- """
- Make sure srcdist exists.
- """
- return targz.exists(env)
diff --git a/scons-tools/substinfile.py b/scons-tools/substinfile.py
deleted file mode 100644
index 7e9d526..0000000
--- a/scons-tools/substinfile.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Copyright 2010 Baptiste Lepilleur and The JsonCpp Authors
-# Distributed under MIT license, or public domain if desired and
-# recognized in your jurisdiction.
-# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
-
-import re
-from SCons.Script import * # the usual scons stuff you get in a SConscript
-import collections
-
-def generate(env):
- """
- Add builders and construction variables for the
- SubstInFile tool.
-
- Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT
- from the source to the target.
- The values of SUBST_DICT first have any construction variables expanded
- (its keys are not expanded).
- If a value of SUBST_DICT is a python callable function, it is called and
- the result is expanded as the value.
- If there's more than one source and more than one target, each target gets
- substituted from the corresponding source.
- """
- def do_subst_in_file(targetfile, sourcefile, dict):
- """Replace all instances of the keys of dict with their values.
- For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
- then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
- """
- try:
- f = open(sourcefile, 'rb')
- contents = f.read()
- f.close()
- except:
- raise SCons.Errors.UserError("Can't read source file %s"%sourcefile)
- for (k,v) in list(dict.items()):
- contents = re.sub(k, v, contents)
- try:
- f = open(targetfile, 'wb')
- f.write(contents)
- f.close()
- except:
- raise SCons.Errors.UserError("Can't write target file %s"%targetfile)
- return 0 # success
-
- def subst_in_file(target, source, env):
- if 'SUBST_DICT' not in env:
- raise SCons.Errors.UserError("SubstInFile requires SUBST_DICT to be set.")
- d = dict(env['SUBST_DICT']) # copy it
- for (k,v) in list(d.items()):
- if isinstance(v, collections.Callable):
- d[k] = env.subst(v()).replace('\\','\\\\')
- elif SCons.Util.is_String(v):
- d[k] = env.subst(v).replace('\\','\\\\')
- else:
- raise SCons.Errors.UserError("SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)))
- for (t,s) in zip(target, source):
- return do_subst_in_file(str(t), str(s), d)
-
- def subst_in_file_string(target, source, env):
- """This is what gets printed on the console."""
- return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t))
- for (t,s) in zip(target, source)])
-
- def subst_emitter(target, source, env):
- """Add dependency from substituted SUBST_DICT to target.
- Returns original target, source tuple unchanged.
- """
- d = env['SUBST_DICT'].copy() # copy it
- for (k,v) in list(d.items()):
- if isinstance(v, collections.Callable):
- d[k] = env.subst(v())
- elif SCons.Util.is_String(v):
- d[k]=env.subst(v)
- Depends(target, SCons.Node.Python.Value(d))
- return target, source
-
-## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!?
- subst_action = SCons.Action.Action(subst_in_file, subst_in_file_string)
- env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter)
-
-def exists(env):
- """
- Make sure tool exists.
- """
- return True
diff --git a/scons-tools/targz.py b/scons-tools/targz.py
deleted file mode 100644
index f5a98ba..0000000
--- a/scons-tools/targz.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Copyright 2007 Baptiste Lepilleur and The JsonCpp Authors
-# Distributed under MIT license, or public domain if desired and
-# recognized in your jurisdiction.
-# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
-
-"""tarball
-
-Tool-specific initialization for tarball.
-
-"""
-
-## Commands to tackle a command based implementation:
-##to unpack on the fly...
-##gunzip < FILE.tar.gz | tar xvf -
-##to pack on the fly...
-##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz
-
-import os.path
-
-import SCons.Builder
-import SCons.Node.FS
-import SCons.Util
-
-try:
- import gzip
- import tarfile
- internal_targz = 1
-except ImportError:
- internal_targz = 0
-
-TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
-
-if internal_targz:
- def targz(target, source, env):
- def archive_name(path):
- path = os.path.normpath(os.path.abspath(path))
- common_path = os.path.commonprefix((base_dir, path))
- archive_name = path[len(common_path):]
- return archive_name
-
- def visit(tar, dirname, names):
- for name in names:
- path = os.path.join(dirname, name)
- if os.path.isfile(path):
- tar.add(path, archive_name(path))
- compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL)
- base_dir = os.path.normpath(env.get('TARGZ_BASEDIR', env.Dir('.')).abspath)
- target_path = str(target[0])
- fileobj = gzip.GzipFile(target_path, 'wb', compression)
- tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj)
- for source in source:
- source_path = str(source)
- if source.isdir():
- os.path.walk(source_path, visit, tar)
- else:
- tar.add(source_path, archive_name(source_path)) # filename, arcname
- tar.close()
-
- targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR'])
-
- def makeBuilder(emitter = None):
- return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'),
- source_factory = SCons.Node.FS.Entry,
- source_scanner = SCons.Defaults.DirScanner,
- suffix = '$TARGZ_SUFFIX',
- multi = 1)
- TarGzBuilder = makeBuilder()
-
- def generate(env):
- """Add Builders and construction variables for zip to an Environment.
- The following environnement variables may be set:
- TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level).
- TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative
- to something other than top-dir).
- """
- env['BUILDERS']['TarGz'] = TarGzBuilder
- env['TARGZ_COM'] = targzAction
- env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9
- env['TARGZ_SUFFIX'] = '.tar.gz'
- env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory.
-else:
- def generate(env):
- pass
-
-
-def exists(env):
- return internal_targz
diff --git a/src/jsontestrunner/sconscript b/src/jsontestrunner/sconscript
deleted file mode 100644
index 6e68e31..0000000
--- a/src/jsontestrunner/sconscript
+++ /dev/null
@@ -1,9 +0,0 @@
-Import( 'env_testing buildJSONTests' )
-
-buildJSONTests( env_testing, Split( """
- main.cpp
- """ ),
- 'jsontestrunner' )
-
-# For 'check' to work, 'libs' must be built first.
-env_testing.Depends('jsontestrunner', '#libs')
diff --git a/src/lib_json/sconscript b/src/lib_json/sconscript
deleted file mode 100644
index 6e7c6c8..0000000
--- a/src/lib_json/sconscript
+++ /dev/null
@@ -1,8 +0,0 @@
-Import( 'env buildLibrary' )
-
-buildLibrary( env, Split( """
- json_reader.cpp
- json_value.cpp
- json_writer.cpp
- """ ),
- 'json' )
diff --git a/src/test_lib_json/sconscript b/src/test_lib_json/sconscript
deleted file mode 100644
index 915fd01..0000000
--- a/src/test_lib_json/sconscript
+++ /dev/null
@@ -1,10 +0,0 @@
-Import( 'env_testing buildUnitTests' )
-
-buildUnitTests( env_testing, Split( """
- main.cpp
- jsontest.cpp
- """ ),
- 'test_lib_json' )
-
-# For 'check' to work, 'libs' must be built first.
-env_testing.Depends('test_lib_json', '#libs')