Switched CRLF to LF in repo, and added svn:eol-style native. I might have missed a few files though. Just committing what I have so far.
diff --git a/devtools/antglob.py b/devtools/antglob.py
index bbb6fec..30837b5 100644
--- a/devtools/antglob.py
+++ b/devtools/antglob.py
@@ -55,20 +55,20 @@
 
 _ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' )
 
-def ant_pattern_to_re( ant_pattern ):

-    """Generates a regular expression from the ant pattern.

-    Matching convention:

-    **/a: match 'a', 'dir/a', 'dir1/dir2/a'

-    a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'

-    *.py: match 'script.py' but not 'a/script.py'

+def ant_pattern_to_re( ant_pattern ):
+    """Generates a regular expression from the ant pattern.
+    Matching convention:
+    **/a: match 'a', 'dir/a', 'dir1/dir2/a'
+    a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
+    *.py: match 'script.py' but not 'a/script.py'
     """
     rex = ['^']
     next_pos = 0
-    sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )

-##    print 'Converting', ant_pattern

-    for match in _ANT_RE.finditer( ant_pattern ):

-##        print 'Matched', match.group()

-##        print match.start(0), next_pos

+    sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
+##    print 'Converting', ant_pattern
+    for match in _ANT_RE.finditer( ant_pattern ):
+##        print 'Matched', match.group()
+##        print match.start(0), next_pos
         if match.start(0) != next_pos:
             raise ValueError( "Invalid ant pattern" )
         if match.group(1): # /**/
@@ -83,14 +83,14 @@
             rex.append( sep_rex )
         else: # somepath
             rex.append( re.escape(match.group(6)) )
-        next_pos = match.end()

+        next_pos = match.end()
     rex.append('$')
     return re.compile( ''.join( rex ) )
-

-def _as_list( l ):

-    if isinstance(l, basestring):

-        return l.split()

-    return l

+
+def _as_list( l ):
+    if isinstance(l, basestring):
+        return l.split()
+    return l
 
 def glob(dir_path,
          includes = '**/*',
@@ -99,8 +99,8 @@
          prune_dirs = prune_dirs,
          max_depth = 25):
     include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
-    exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]

-    prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]

+    exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
+    prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
     dir_path = dir_path.replace('/',os.path.sep)
     entry_type_filter = entry_type
 
@@ -117,37 +117,37 @@
                 return True
         return False
 
-    def glob_impl( root_dir_path ):

-        child_dirs = [root_dir_path]

-        while child_dirs:

+    def glob_impl( root_dir_path ):
+        child_dirs = [root_dir_path]
+        while child_dirs:
             dir_path = child_dirs.pop()
-            for entry in listdir( dir_path ):

-                full_path = os.path.join( dir_path, entry )

-##                print 'Testing:', full_path,

-                is_dir = os.path.isdir( full_path )

-                if is_dir and not is_pruned_dir( entry ): # explore child directory ?

-##                    print '===> marked for recursion',

-                    child_dirs.append( full_path )

-                included = apply_filter( full_path, include_filter )

-                rejected = apply_filter( full_path, exclude_filter )

-                if not included or rejected: # do not include entry ?

-##                    print '=> not included or rejected'

-                    continue

-                link = os.path.islink( full_path )

-                is_file = os.path.isfile( full_path )

-                if not is_file and not is_dir:

-##                    print '=> unknown entry type'

-                    continue

-                if link:

-                    entry_type = is_file and FILE_LINK or DIR_LINK

-                else:

-                    entry_type = is_file and FILE or DIR

-##                print '=> type: %d' % entry_type, 

-                if (entry_type & entry_type_filter) != 0:

-##                    print ' => KEEP'

-                    yield os.path.join( dir_path, entry )

-##                else:

-##                    print ' => TYPE REJECTED'

+            for entry in listdir( dir_path ):
+                full_path = os.path.join( dir_path, entry )
+##                print 'Testing:', full_path,
+                is_dir = os.path.isdir( full_path )
+                if is_dir and not is_pruned_dir( entry ): # explore child directory ?
+##                    print '===> marked for recursion',
+                    child_dirs.append( full_path )
+                included = apply_filter( full_path, include_filter )
+                rejected = apply_filter( full_path, exclude_filter )
+                if not included or rejected: # do not include entry ?
+##                    print '=> not included or rejected'
+                    continue
+                link = os.path.islink( full_path )
+                is_file = os.path.isfile( full_path )
+                if not is_file and not is_dir:
+##                    print '=> unknown entry type'
+                    continue
+                if link:
+                    entry_type = is_file and FILE_LINK or DIR_LINK
+                else:
+                    entry_type = is_file and FILE or DIR
+##                print '=> type: %d' % entry_type, 
+                if (entry_type & entry_type_filter) != 0:
+##                    print ' => KEEP'
+                    yield os.path.join( dir_path, entry )
+##                else:
+##                    print ' => TYPE REJECTED'
     return list( glob_impl( dir_path ) )
 
 
@@ -155,47 +155,47 @@
     import unittest
 
     class AntPatternToRETest(unittest.TestCase):
-##        def test_conversion( self ):

-##            self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )

-

-        def test_matching( self ):

-            test_cases = [ ( 'path',

-                             ['path'],

-                             ['somepath', 'pathsuffix', '/path', '/path'] ),

-                           ( '*.py',

-                             ['source.py', 'source.ext.py', '.py'],

-                             ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),

-                           ( '**/path',

-                             ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],

-                             ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),

-                           ( 'path/**',

-                             ['path/a', 'path/path/a', 'path//'],

-                             ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),

-                           ( '/**/path',

-                             ['/path', '/a/path', '/a/b/path/path', '/path/path'],

-                             ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),

-                           ( 'a/b',

-                             ['a/b'],

-                             ['somea/b', 'a/bsuffix', 'a/b/c'] ),

-                           ( '**/*.py',

-                             ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],

-                             ['script.pyc', 'script.pyo', 'a.py/b'] ),

-                           ( 'src/**/*.py',

-                             ['src/a.py', 'src/dir/a.py'],

-                             ['a/src/a.py', '/src/a.py'] ),

-                           ]

-            for ant_pattern, accepted_matches, rejected_matches in list(test_cases):

-                def local_path( paths ):

-                    return [ p.replace('/',os.path.sep) for p in paths ]

-                test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )

-            for ant_pattern, accepted_matches, rejected_matches in test_cases:

-                rex = ant_pattern_to_re( ant_pattern )

-                print 'ant_pattern:', ant_pattern, ' => ', rex.pattern

-                for accepted_match in accepted_matches:

-                    print 'Accepted?:', accepted_match

-                    self.assert_( rex.match( accepted_match ) is not None )

-                for rejected_match in rejected_matches:

-                    print 'Rejected?:', rejected_match

-                    self.assert_( rex.match( rejected_match ) is None )

+##        def test_conversion( self ):
+##            self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
+
+        def test_matching( self ):
+            test_cases = [ ( 'path',
+                             ['path'],
+                             ['somepath', 'pathsuffix', '/path', '/path'] ),
+                           ( '*.py',
+                             ['source.py', 'source.ext.py', '.py'],
+                             ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
+                           ( '**/path',
+                             ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
+                             ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
+                           ( 'path/**',
+                             ['path/a', 'path/path/a', 'path//'],
+                             ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
+                           ( '/**/path',
+                             ['/path', '/a/path', '/a/b/path/path', '/path/path'],
+                             ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
+                           ( 'a/b',
+                             ['a/b'],
+                             ['somea/b', 'a/bsuffix', 'a/b/c'] ),
+                           ( '**/*.py',
+                             ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
+                             ['script.pyc', 'script.pyo', 'a.py/b'] ),
+                           ( 'src/**/*.py',
+                             ['src/a.py', 'src/dir/a.py'],
+                             ['a/src/a.py', '/src/a.py'] ),
+                           ]
+            for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
+                def local_path( paths ):
+                    return [ p.replace('/',os.path.sep) for p in paths ]
+                test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
+            for ant_pattern, accepted_matches, rejected_matches in test_cases:
+                rex = ant_pattern_to_re( ant_pattern )
+                print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
+                for accepted_match in accepted_matches:
+                    print 'Accepted?:', accepted_match
+                    self.assert_( rex.match( accepted_match ) is not None )
+                for rejected_match in rejected_matches:
+                    print 'Rejected?:', rejected_match
+                    self.assert_( rex.match( rejected_match ) is None )
 
     unittest.main()
diff --git a/devtools/fixeol.py b/devtools/fixeol.py
index 5d8372d..4fed6ce 100644
--- a/devtools/fixeol.py
+++ b/devtools/fixeol.py
@@ -1,63 +1,63 @@
-import os.path

-

-def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):

-    """Makes sure that all sources have the specified eol sequence (default: unix)."""

-    if not os.path.isfile( path ):

-        raise ValueError( 'Path "%s" is not a file' % path )

-    try:

-        f = open(path, 'rb')

-    except IOError, msg:

-        print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))

-        return False

-    try:

-        raw_lines = f.readlines()

-    finally:

-        f.close()

-    fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]

-    if raw_lines != fixed_lines:

-        print '%s =>' % path,

-        if not is_dry_run:

-            f = open(path, "wb")

-            try:

-                f.writelines(fixed_lines)

-            finally:

-                f.close()

-        if verbose:

-            print is_dry_run and ' NEED FIX' or ' FIXED'

-    return True

-##    

-##    

-##

-##def _do_fix( is_dry_run = True ):

-##    from waftools import antglob

-##    python_sources = antglob.glob( '.',

-##        includes = '**/*.py **/wscript **/wscript_build',

-##        excludes = antglob.default_excludes + './waf.py',

-##        prune_dirs = antglob.prune_dirs + 'waf-* ./build' )

-##    for path in python_sources:

-##        _fix_python_source( path, is_dry_run )

-##

-##    cpp_sources = antglob.glob( '.',

-##        includes = '**/*.cpp **/*.h **/*.inl',

-##        prune_dirs = antglob.prune_dirs + 'waf-* ./build' )

-##    for path in cpp_sources:

-##        _fix_source_eol( path, is_dry_run )

-##

-##

-##def dry_fix(context):

-##    _do_fix( is_dry_run = True )

-##

-##def fix(context):

-##    _do_fix( is_dry_run = False )

-##

-##def shutdown():

-##    pass

-##

-##def check(context):

-##    # Unit tests are run when "check" target is used

-##    ut = UnitTest.unit_test()

-##    ut.change_to_testfile_dir = True

-##    ut.want_to_see_test_output = True

-##    ut.want_to_see_test_error = True

-##    ut.run()

-##    ut.print_results()

+import os.path
+
+def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
+    """Makes sure that all sources have the specified eol sequence (default: unix)."""
+    if not os.path.isfile( path ):
+        raise ValueError( 'Path "%s" is not a file' % path )
+    try:
+        f = open(path, 'rb')
+    except IOError, msg:
+        print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
+        return False
+    try:
+        raw_lines = f.readlines()
+    finally:
+        f.close()
+    fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
+    if raw_lines != fixed_lines:
+        print '%s =>' % path,
+        if not is_dry_run:
+            f = open(path, "wb")
+            try:
+                f.writelines(fixed_lines)
+            finally:
+                f.close()
+        if verbose:
+            print is_dry_run and ' NEED FIX' or ' FIXED'
+    return True
+##    
+##    
+##
+##def _do_fix( is_dry_run = True ):
+##    from waftools import antglob
+##    python_sources = antglob.glob( '.',
+##        includes = '**/*.py **/wscript **/wscript_build',
+##        excludes = antglob.default_excludes + './waf.py',
+##        prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
+##    for path in python_sources:
+##        _fix_python_source( path, is_dry_run )
+##
+##    cpp_sources = antglob.glob( '.',
+##        includes = '**/*.cpp **/*.h **/*.inl',
+##        prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
+##    for path in cpp_sources:
+##        _fix_source_eol( path, is_dry_run )
+##
+##
+##def dry_fix(context):
+##    _do_fix( is_dry_run = True )
+##
+##def fix(context):
+##    _do_fix( is_dry_run = False )
+##
+##def shutdown():
+##    pass
+##
+##def check(context):
+##    # Unit tests are run when "check" target is used
+##    ut = UnitTest.unit_test()
+##    ut.change_to_testfile_dir = True
+##    ut.want_to_see_test_output = True
+##    ut.want_to_see_test_error = True
+##    ut.run()
+##    ut.print_results()
diff --git a/devtools/licenseupdater.py b/devtools/licenseupdater.py
index 03e0467..866eada 100644
--- a/devtools/licenseupdater.py
+++ b/devtools/licenseupdater.py
@@ -1,93 +1,93 @@
-"""Updates the license text in source file.

-"""

-

-# An existing license is found if the file starts with the string below,

-# and ends with the first blank line.

-LICENSE_BEGIN = "// Copyright "

-

-BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur

-// Distributed under MIT license, or public domain if desired and

-// recognized in your jurisdiction.

-// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE

-

-""".replace('\r\n','\n')

-

-def update_license( path, dry_run, show_diff ):

-    """Update the license statement in the specified file.

-    Parameters:

-      path: path of the C++ source file to update.

-      dry_run: if True, just print the path of the file that would be updated,

-               but don't change it.

-      show_diff: if True, print the path of the file that would be modified,

-                 as well as the change made to the file. 

-    """

-    with open( path, 'rt' ) as fin:

-        original_text = fin.read().replace('\r\n','\n')

-        newline = fin.newlines and fin.newlines[0] or '\n'

-    if not original_text.startswith( LICENSE_BEGIN ):

-        # No existing license found => prepend it

-        new_text = BRIEF_LICENSE + original_text

-    else:

-        license_end_index = original_text.index( '\n\n' ) # search first blank line

-        new_text = BRIEF_LICENSE + original_text[license_end_index+2:]

-    if original_text != new_text:

-        if not dry_run:

-            with open( path, 'wb' ) as fout:

-                fout.write( new_text.replace('\n', newline ) )

-        print 'Updated', path

-        if show_diff:

-            import difflib

-            print '\n'.join( difflib.unified_diff( original_text.split('\n'),

-                                                   new_text.split('\n') ) )

-        return True

-    return False

-

-def update_license_in_source_directories( source_dirs, dry_run, show_diff ):

-    """Updates license text in C++ source files found in directory source_dirs.

-    Parameters:

-      source_dirs: list of directory to scan for C++ sources. Directories are

-                   scanned recursively.

-      dry_run: if True, just print the path of the file that would be updated,

-               but don't change it.

-      show_diff: if True, print the path of the file that would be modified,

-                 as well as the change made to the file. 

-    """

-    from devtools import antglob

-    prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'

-    for source_dir in source_dirs:

-        cpp_sources = antglob.glob( source_dir,

-            includes = '''**/*.h **/*.cpp **/*.inl''',

-            prune_dirs = prune_dirs )

-        for source in cpp_sources:

-            update_license( source, dry_run, show_diff )

-

-def main():

-    usage = """%prog DIR [DIR2...]

-Updates license text in sources of the project in source files found

-in the directory specified on the command-line.

-

-Example of call:

-python devtools\licenseupdater.py include src -n --diff

-=> Show change that would be made to the sources.

-

-python devtools\licenseupdater.py include src

-=> Update license statement on all sources in directories include/ and src/.

-"""

-    from optparse import OptionParser

-    parser = OptionParser(usage=usage)

-    parser.allow_interspersed_args = False

-    parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False,

-        help="""Only show what files are updated, do not update the files""")

-    parser.add_option('--diff', dest="show_diff", action='store_true', default=False,

-        help="""On update, show change made to the file.""")

-    parser.enable_interspersed_args()

-    options, args = parser.parse_args()

-    update_license_in_source_directories( args, options.dry_run, options.show_diff )

-    print 'Done'

-

-if __name__ == '__main__':

-    import sys

-    import os.path

-    sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

-    main()

-

+"""Updates the license text in source file.
+"""
+
+# An existing license is found if the file starts with the string below,
+# and ends with the first blank line.
+LICENSE_BEGIN = "// Copyright "
+
+BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+""".replace('\r\n','\n')
+
+def update_license( path, dry_run, show_diff ):
+    """Update the license statement in the specified file.
+    Parameters:
+      path: path of the C++ source file to update.
+      dry_run: if True, just print the path of the file that would be updated,
+               but don't change it.
+      show_diff: if True, print the path of the file that would be modified,
+                 as well as the change made to the file. 
+    """
+    with open( path, 'rt' ) as fin:
+        original_text = fin.read().replace('\r\n','\n')
+        newline = fin.newlines and fin.newlines[0] or '\n'
+    if not original_text.startswith( LICENSE_BEGIN ):
+        # No existing license found => prepend it
+        new_text = BRIEF_LICENSE + original_text
+    else:
+        license_end_index = original_text.index( '\n\n' ) # search first blank line
+        new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
+    if original_text != new_text:
+        if not dry_run:
+            with open( path, 'wb' ) as fout:
+                fout.write( new_text.replace('\n', newline ) )
+        print 'Updated', path
+        if show_diff:
+            import difflib
+            print '\n'.join( difflib.unified_diff( original_text.split('\n'),
+                                                   new_text.split('\n') ) )
+        return True
+    return False
+
+def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
+    """Updates license text in C++ source files found in directory source_dirs.
+    Parameters:
+      source_dirs: list of directory to scan for C++ sources. Directories are
+                   scanned recursively.
+      dry_run: if True, just print the path of the file that would be updated,
+               but don't change it.
+      show_diff: if True, print the path of the file that would be modified,
+                 as well as the change made to the file. 
+    """
+    from devtools import antglob
+    prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
+    for source_dir in source_dirs:
+        cpp_sources = antglob.glob( source_dir,
+            includes = '''**/*.h **/*.cpp **/*.inl''',
+            prune_dirs = prune_dirs )
+        for source in cpp_sources:
+            update_license( source, dry_run, show_diff )
+
+def main():
+    usage = """%prog DIR [DIR2...]
+Updates license text in sources of the project in source files found
+in the directory specified on the command-line.
+
+Example of call:
+python devtools\licenseupdater.py include src -n --diff
+=> Show change that would be made to the sources.
+
+python devtools\licenseupdater.py include src
+=> Update license statement on all sources in directories include/ and src/.
+"""
+    from optparse import OptionParser
+    parser = OptionParser(usage=usage)
+    parser.allow_interspersed_args = False
+    parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False,
+        help="""Only show what files are updated, do not update the files""")
+    parser.add_option('--diff', dest="show_diff", action='store_true', default=False,
+        help="""On update, show change made to the file.""")
+    parser.enable_interspersed_args()
+    options, args = parser.parse_args()
+    update_license_in_source_directories( args, options.dry_run, options.show_diff )
+    print 'Done'
+
+if __name__ == '__main__':
+    import sys
+    import os.path
+    sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+    main()
+
diff --git a/devtools/tarball.py b/devtools/tarball.py
index 182602e..ccbda39 100644
--- a/devtools/tarball.py
+++ b/devtools/tarball.py
@@ -1,53 +1,53 @@
-import os.path

-import gzip

-import tarfile

-

-TARGZ_DEFAULT_COMPRESSION_LEVEL = 9

-

-def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):

-    """Parameters:

-    tarball_path: output path of the .tar.gz file

-    sources: list of sources to include in the tarball, relative to the current directory

-    base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped

-        from path in the tarball.

-    prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''

-        to make them child of root.

-    """

-    base_dir = os.path.normpath( os.path.abspath( base_dir ) )

-    def archive_name( path ):

-        """Makes path relative to base_dir."""

-        path = os.path.normpath( os.path.abspath( path ) )

-        common_path = os.path.commonprefix( (base_dir, path) )

-        archive_name = path[len(common_path):]

-        if os.path.isabs( archive_name ):

-            archive_name = archive_name[1:]

-        return os.path.join( prefix_dir, archive_name )

-    def visit(tar, dirname, names):

-        for name in names:

-            path = os.path.join(dirname, name)

-            if os.path.isfile(path):

-                path_in_tar = archive_name(path)

-                tar.add(path, path_in_tar )

-    compression = TARGZ_DEFAULT_COMPRESSION_LEVEL

-    tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )

-    try:

-        for source in sources:

-            source_path = source

-            if os.path.isdir( source ):

-                os.path.walk(source_path, visit, tar)

-            else:

-                path_in_tar = archive_name(source_path)

-                tar.add(source_path, path_in_tar )      # filename, arcname

-    finally:

-        tar.close()

-

-def decompress( tarball_path, base_dir ):

-    """Decompress the gzipped tarball into directory base_dir.

-    """

-    # !!! This class method is not documented in the online doc

-    # nor is bz2open!

-    tar = tarfile.TarFile.gzopen(tarball_path, mode='r')

-    try:

-        tar.extractall( base_dir )

-    finally:

-        tar.close()

+import os.path
+import gzip
+import tarfile
+
+TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
+
+def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
+    """Parameters:
+    tarball_path: output path of the .tar.gz file
+    sources: list of sources to include in the tarball, relative to the current directory
+    base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
+        from path in the tarball.
+    prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
+        to make them child of root.
+    """
+    base_dir = os.path.normpath( os.path.abspath( base_dir ) )
+    def archive_name( path ):
+        """Makes path relative to base_dir."""
+        path = os.path.normpath( os.path.abspath( path ) )
+        common_path = os.path.commonprefix( (base_dir, path) )
+        archive_name = path[len(common_path):]
+        if os.path.isabs( archive_name ):
+            archive_name = archive_name[1:]
+        return os.path.join( prefix_dir, archive_name )
+    def visit(tar, dirname, names):
+        for name in names:
+            path = os.path.join(dirname, name)
+            if os.path.isfile(path):
+                path_in_tar = archive_name(path)
+                tar.add(path, path_in_tar )
+    compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
+    tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
+    try:
+        for source in sources:
+            source_path = source
+            if os.path.isdir( source ):
+                os.path.walk(source_path, visit, tar)
+            else:
+                path_in_tar = archive_name(source_path)
+                tar.add(source_path, path_in_tar )      # filename, arcname
+    finally:
+        tar.close()
+
+def decompress( tarball_path, base_dir ):
+    """Decompress the gzipped tarball into directory base_dir.
+    """
+    # !!! This class method is not documented in the online doc
+    # nor is bz2open!
+    tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
+    try:
+        tar.extractall( base_dir )
+    finally:
+        tar.close()