rm extra whitespace in python, per PEP8
diff --git a/devtools/antglob.py b/devtools/antglob.py
index 8b7b4ca..afd4c48 100644
--- a/devtools/antglob.py
+++ b/devtools/antglob.py
@@ -54,9 +54,9 @@
 ALL_NO_LINK = DIR | FILE
 ALL = DIR | FILE | LINKS
 
-_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' )
+_ANT_RE = re.compile(r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)')
 
-def ant_pattern_to_re( ant_pattern ):
+def ant_pattern_to_re(ant_pattern):
     """Generates a regular expression from the ant pattern.
     Matching convention:
     **/a: match 'a', 'dir/a', 'dir1/dir2/a'
@@ -65,30 +65,30 @@
     """
     rex = ['^']
     next_pos = 0
-    sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
+    sep_rex = r'(?:/|%s)' % re.escape(os.path.sep)
 ##    print 'Converting', ant_pattern
-    for match in _ANT_RE.finditer( ant_pattern ):
+    for match in _ANT_RE.finditer(ant_pattern):
 ##        print 'Matched', match.group()
 ##        print match.start(0), next_pos
         if match.start(0) != next_pos:
-            raise ValueError( "Invalid ant pattern" )
+            raise ValueError("Invalid ant pattern")
         if match.group(1): # /**/
-            rex.append( sep_rex + '(?:.*%s)?' % sep_rex )
+            rex.append(sep_rex + '(?:.*%s)?' % sep_rex)
         elif match.group(2): # **/
-            rex.append( '(?:.*%s)?' % sep_rex )
+            rex.append('(?:.*%s)?' % sep_rex)
         elif match.group(3): # /**
-            rex.append( sep_rex + '.*' )
+            rex.append(sep_rex + '.*')
         elif match.group(4): # *
-            rex.append( '[^/%s]*' % re.escape(os.path.sep) )
+            rex.append('[^/%s]*' % re.escape(os.path.sep))
         elif match.group(5): # /
-            rex.append( sep_rex )
+            rex.append(sep_rex)
         else: # somepath
-            rex.append( re.escape(match.group(6)) )
+            rex.append(re.escape(match.group(6)))
         next_pos = match.end()
     rex.append('$')
-    return re.compile( ''.join( rex ) )
+    return re.compile(''.join(rex))
 
-def _as_list( l ):
+def _as_list(l):
     if isinstance(l, basestring):
         return l.split()
     return l
@@ -105,37 +105,37 @@
     dir_path = dir_path.replace('/',os.path.sep)
     entry_type_filter = entry_type
 
-    def is_pruned_dir( dir_name ):
+    def is_pruned_dir(dir_name):
         for pattern in prune_dirs:
-            if fnmatch.fnmatch( dir_name, pattern ):
+            if fnmatch.fnmatch(dir_name, pattern):
                 return True
         return False
 
-    def apply_filter( full_path, filter_rexs ):
+    def apply_filter(full_path, filter_rexs):
         """Return True if at least one of the filter regular expression match full_path."""
         for rex in filter_rexs:
-            if rex.match( full_path ):
+            if rex.match(full_path):
                 return True
         return False
 
-    def glob_impl( root_dir_path ):
+    def glob_impl(root_dir_path):
         child_dirs = [root_dir_path]
         while child_dirs:
             dir_path = child_dirs.pop()
-            for entry in listdir( dir_path ):
-                full_path = os.path.join( dir_path, entry )
+            for entry in listdir(dir_path):
+                full_path = os.path.join(dir_path, entry)
 ##                print 'Testing:', full_path,
-                is_dir = os.path.isdir( full_path )
-                if is_dir and not is_pruned_dir( entry ): # explore child directory ?
+                is_dir = os.path.isdir(full_path)
+                if is_dir and not is_pruned_dir(entry): # explore child directory ?
 ##                    print '===> marked for recursion',
-                    child_dirs.append( full_path )
-                included = apply_filter( full_path, include_filter )
-                rejected = apply_filter( full_path, exclude_filter )
+                    child_dirs.append(full_path)
+                included = apply_filter(full_path, include_filter)
+                rejected = apply_filter(full_path, exclude_filter)
                 if not included or rejected: # do not include entry ?
 ##                    print '=> not included or rejected'
                     continue
-                link = os.path.islink( full_path )
-                is_file = os.path.isfile( full_path )
+                link = os.path.islink(full_path)
+                is_file = os.path.isfile(full_path)
                 if not is_file and not is_dir:
 ##                    print '=> unknown entry type'
                     continue
@@ -146,57 +146,57 @@
 ##                print '=> type: %d' % entry_type, 
                 if (entry_type & entry_type_filter) != 0:
 ##                    print ' => KEEP'
-                    yield os.path.join( dir_path, entry )
+                    yield os.path.join(dir_path, entry)
 ##                else:
 ##                    print ' => TYPE REJECTED'
-    return list( glob_impl( dir_path ) )
+    return list(glob_impl(dir_path))
 
 
 if __name__ == "__main__":
     import unittest
 
     class AntPatternToRETest(unittest.TestCase):
-##        def test_conversion( self ):
-##            self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
+##        def test_conversion(self):
+##            self.assertEqual('^somepath$', ant_pattern_to_re('somepath').pattern)
 
-        def test_matching( self ):
-            test_cases = [ ( 'path',
+        def test_matching(self):
+            test_cases = [ ('path',
                              ['path'],
-                             ['somepath', 'pathsuffix', '/path', '/path'] ),
-                           ( '*.py',
+                             ['somepath', 'pathsuffix', '/path', '/path']),
+                           ('*.py',
                              ['source.py', 'source.ext.py', '.py'],
-                             ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
-                           ( '**/path',
+                             ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c']),
+                           ('**/path',
                              ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
-                             ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
-                           ( 'path/**',
+                             ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath']),
+                           ('path/**',
                              ['path/a', 'path/path/a', 'path//'],
-                             ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
-                           ( '/**/path',
+                             ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a']),
+                           ('/**/path',
                              ['/path', '/a/path', '/a/b/path/path', '/path/path'],
-                             ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
-                           ( 'a/b',
+                             ['path', 'path/', 'a/path', '/pathsuffix', '/somepath']),
+                           ('a/b',
                              ['a/b'],
-                             ['somea/b', 'a/bsuffix', 'a/b/c'] ),
-                           ( '**/*.py',
+                             ['somea/b', 'a/bsuffix', 'a/b/c']),
+                           ('**/*.py',
                              ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
-                             ['script.pyc', 'script.pyo', 'a.py/b'] ),
-                           ( 'src/**/*.py',
+                             ['script.pyc', 'script.pyo', 'a.py/b']),
+                           ('src/**/*.py',
                              ['src/a.py', 'src/dir/a.py'],
-                             ['a/src/a.py', '/src/a.py'] ),
+                             ['a/src/a.py', '/src/a.py']),
                            ]
             for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
-                def local_path( paths ):
+                def local_path(paths):
                     return [ p.replace('/',os.path.sep) for p in paths ]
-                test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
+                test_cases.append((ant_pattern, local_path(accepted_matches), local_path(rejected_matches)))
             for ant_pattern, accepted_matches, rejected_matches in test_cases:
-                rex = ant_pattern_to_re( ant_pattern )
+                rex = ant_pattern_to_re(ant_pattern)
                 print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
                 for accepted_match in accepted_matches:
                     print('Accepted?:', accepted_match)
-                    self.assertTrue( rex.match( accepted_match ) is not None )
+                    self.assertTrue(rex.match(accepted_match) is not None)
                 for rejected_match in rejected_matches:
                     print('Rejected?:', rejected_match)
-                    self.assertTrue( rex.match( rejected_match ) is None )
+                    self.assertTrue(rex.match(rejected_match) is None)
 
     unittest.main()
diff --git a/devtools/batchbuild.py b/devtools/batchbuild.py
index 6f57945..0eb0690 100644
--- a/devtools/batchbuild.py
+++ b/devtools/batchbuild.py
@@ -18,62 +18,62 @@
         self.build_type = build_type
         self.generator = generator
 
-    def merged_with( self, build_desc ):
+    def merged_with(self, build_desc):
         """Returns a new BuildDesc by merging field content.
            Prefer build_desc fields to self fields for single valued field.
         """
-        return BuildDesc( self.prepend_envs + build_desc.prepend_envs,
+        return BuildDesc(self.prepend_envs + build_desc.prepend_envs,
                           self.variables + build_desc.variables,
                           build_desc.build_type or self.build_type,
-                          build_desc.generator or self.generator )
+                          build_desc.generator or self.generator)
 
-    def env( self ):
+    def env(self):
         environ = os.environ.copy()
         for values_by_name in self.prepend_envs:
             for var, value in list(values_by_name.items()):
                 var = var.upper()
                 if type(value) is unicode:
-                    value = value.encode( sys.getdefaultencoding() )
+                    value = value.encode(sys.getdefaultencoding())
                 if var in environ:
                     environ[var] = value + os.pathsep + environ[var]
                 else:
                     environ[var] = value
         return environ
 
-    def cmake_args( self ):
+    def cmake_args(self):
         args = ["-D%s" % var for var in self.variables]
         # skip build type for Visual Studio solution as it cause warning
         if self.build_type and 'Visual' not in self.generator:
-            args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type )
+            args.append("-DCMAKE_BUILD_TYPE=%s" % self.build_type)
         if self.generator:
-            args.extend( ['-G', self.generator] )
+            args.extend(['-G', self.generator])
         return args
 
-    def __repr__( self ):
-        return "BuildDesc( %s, build_type=%s )" %  (" ".join( self.cmake_args()), self.build_type)
+    def __repr__(self):
+        return "BuildDesc(%s, build_type=%s)" %  (" ".join(self.cmake_args()), self.build_type)
 
 class BuildData:
-    def __init__( self, desc, work_dir, source_dir ):
+    def __init__(self, desc, work_dir, source_dir):
         self.desc = desc
         self.work_dir = work_dir
         self.source_dir = source_dir
-        self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' )
-        self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' )
+        self.cmake_log_path = os.path.join(work_dir, 'batchbuild_cmake.log')
+        self.build_log_path = os.path.join(work_dir, 'batchbuild_build.log')
         self.cmake_succeeded = False
         self.build_succeeded = False
 
     def execute_build(self):
         print('Build %s' % self.desc)
-        self._make_new_work_dir( )
-        self.cmake_succeeded = self._generate_makefiles( )
+        self._make_new_work_dir()
+        self.cmake_succeeded = self._generate_makefiles()
         if self.cmake_succeeded:
-            self.build_succeeded = self._build_using_makefiles( )
+            self.build_succeeded = self._build_using_makefiles()
         return self.build_succeeded
 
     def _generate_makefiles(self):
         print('  Generating makefiles: ', end=' ')
-        cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )]
-        succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path )
+        cmd = ['cmake'] + self.desc.cmake_args() + [os.path.abspath(self.source_dir)]
+        succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.cmake_log_path)
         print('done' if succeeded else 'FAILED')
         return succeeded
 
@@ -82,58 +82,58 @@
         cmd = ['cmake', '--build', self.work_dir]
         if self.desc.build_type:
             cmd += ['--config', self.desc.build_type]
-        succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path )
+        succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.build_log_path)
         print('done' if succeeded else 'FAILED')
         return succeeded
 
     def _execute_build_subprocess(self, cmd, env, log_path):
-        process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
-                                    env=env )
-        stdout, _ = process.communicate( )
+        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
+                                    env=env)
+        stdout, _ = process.communicate()
         succeeded = (process.returncode == 0)
-        with open( log_path, 'wb' ) as flog:
-            log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
-            flog.write( fix_eol( log ) )
+        with open(log_path, 'wb') as flog:
+            log = ' '.join(cmd) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
+            flog.write(fix_eol(log))
         return succeeded
 
     def _make_new_work_dir(self):
-        if os.path.isdir( self.work_dir ):
+        if os.path.isdir(self.work_dir):
             print('  Removing work directory', self.work_dir)
-            shutil.rmtree( self.work_dir, ignore_errors=True )
-        if not os.path.isdir( self.work_dir ):
-            os.makedirs( self.work_dir )
+            shutil.rmtree(self.work_dir, ignore_errors=True)
+        if not os.path.isdir(self.work_dir):
+            os.makedirs(self.work_dir)
 
-def fix_eol( stdout ):
+def fix_eol(stdout):
     """Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
     """
-    return re.sub( '\r*\n', os.linesep, stdout )
+    return re.sub('\r*\n', os.linesep, stdout)
 
-def load_build_variants_from_config( config_path ):
-    with open( config_path, 'rb' ) as fconfig:
-        data = json.load( fconfig )
+def load_build_variants_from_config(config_path):
+    with open(config_path, 'rb') as fconfig:
+        data = json.load(fconfig)
     variants = data[ 'cmake_variants' ]
-    build_descs_by_axis = collections.defaultdict( list )
+    build_descs_by_axis = collections.defaultdict(list)
     for axis in variants:
         axis_name = axis["name"]
         build_descs = []
         if "generators" in axis:
             for generator_data in axis["generators"]:
                 for generator in generator_data["generator"]:
-                    build_desc = BuildDesc( generator=generator,
-                                            prepend_envs=generator_data.get("env_prepend") )
-                    build_descs.append( build_desc )
+                    build_desc = BuildDesc(generator=generator,
+                                            prepend_envs=generator_data.get("env_prepend"))
+                    build_descs.append(build_desc)
         elif "variables" in axis:
             for variables in axis["variables"]:
-                build_desc = BuildDesc( variables=variables )
-                build_descs.append( build_desc )
+                build_desc = BuildDesc(variables=variables)
+                build_descs.append(build_desc)
         elif "build_types" in axis:
             for build_type in axis["build_types"]:
-                build_desc = BuildDesc( build_type=build_type )
-                build_descs.append( build_desc )
-        build_descs_by_axis[axis_name].extend( build_descs )
+                build_desc = BuildDesc(build_type=build_type)
+                build_descs.append(build_desc)
+        build_descs_by_axis[axis_name].extend(build_descs)
     return build_descs_by_axis
 
-def generate_build_variants( build_descs_by_axis ):
+def generate_build_variants(build_descs_by_axis):
     """Returns a list of BuildDesc generated for the partial BuildDesc for each axis."""
     axis_names = list(build_descs_by_axis.keys())
     build_descs = []
@@ -141,8 +141,8 @@
         if len(build_descs):
             # for each existing build_desc and each axis build desc, create a new build_desc
             new_build_descs = []
-            for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs):
-                new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) )
+            for prototype_build_desc, axis_build_desc in itertools.product(build_descs, axis_build_descs):
+                new_build_descs.append(prototype_build_desc.merged_with(axis_build_desc))
             build_descs = new_build_descs
         else:
             build_descs = axis_build_descs
@@ -174,60 +174,57 @@
 </table>
 </body></html>''')
 
-def generate_html_report( html_report_path, builds ):
-    report_dir = os.path.dirname( html_report_path )
+def generate_html_report(html_report_path, builds):
+    report_dir = os.path.dirname(html_report_path)
     # Vertical axis: generator
     # Horizontal: variables, then build_type
-    builds_by_generator = collections.defaultdict( list )
+    builds_by_generator = collections.defaultdict(list)
     variables = set()
-    build_types_by_variable = collections.defaultdict( set )
+    build_types_by_variable = collections.defaultdict(set)
     build_by_pos_key = {} # { (generator, var_key, build_type): build }
     for build in builds:
-        builds_by_generator[build.desc.generator].append( build )
+        builds_by_generator[build.desc.generator].append(build)
         var_key = tuple(sorted(build.desc.variables))
-        variables.add( var_key )
-        build_types_by_variable[var_key].add( build.desc.build_type )
+        variables.add(var_key)
+        build_types_by_variable[var_key].add(build.desc.build_type)
         pos_key = (build.desc.generator, var_key, build.desc.build_type)
         build_by_pos_key[pos_key] = build
-    variables = sorted( variables )
+    variables = sorted(variables)
     th_vars = []
     th_build_types = []
     for variable in variables:
-        build_types = sorted( build_types_by_variable[variable] )
+        build_types = sorted(build_types_by_variable[variable])
         nb_build_type = len(build_types_by_variable[variable])
-        th_vars.append( '<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) )
+        th_vars.append('<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape(' '.join(variable))))
         for build_type in build_types:
-            th_build_types.append( '<th>%s</th>' % cgi.escape(build_type) )
+            th_build_types.append('<th>%s</th>' % cgi.escape(build_type))
     tr_builds = []
-    for generator in sorted( builds_by_generator ):
-        tds = [ '<td>%s</td>\n' % cgi.escape( generator ) ]
+    for generator in sorted(builds_by_generator):
+        tds = [ '<td>%s</td>\n' % cgi.escape(generator) ]
         for variable in variables:
-            build_types = sorted( build_types_by_variable[variable] )
+            build_types = sorted(build_types_by_variable[variable])
             for build_type in build_types:
                 pos_key = (generator, variable, build_type)
                 build = build_by_pos_key.get(pos_key)
                 if build:
                     cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
                     build_status = 'ok' if build.build_succeeded else 'FAILED'
-                    cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir )
-                    build_log_url = os.path.relpath( build.build_log_path, report_dir )
-                    td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % (
-                        build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
+                    cmake_log_url = os.path.relpath(build.cmake_log_path, report_dir)
+                    build_log_url = os.path.relpath(build.build_log_path, report_dir)
+                    td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % (                        build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
                     if build.cmake_succeeded:
-                        td += '<br><a href="%s" class="%s">Build: %s</a>' % (
-                            build_log_url, build_status.lower(), build_status)
+                        td += '<br><a href="%s" class="%s">Build: %s</a>' % (                            build_log_url, build_status.lower(), build_status)
                     td += '</td>'
                 else:
                     td = '<td></td>'
-                tds.append( td )
-        tr_builds.append( '<tr>%s</tr>' % '\n'.join( tds ) )
-    html = HTML_TEMPLATE.substitute(
-        title='Batch build report',
+                tds.append(td)
+        tr_builds.append('<tr>%s</tr>' % '\n'.join(tds))
+    html = HTML_TEMPLATE.substitute(        title='Batch build report',
         th_vars=' '.join(th_vars),
-        th_build_types=' '.join( th_build_types),
-        tr_builds='\n'.join( tr_builds ) )
-    with open( html_report_path, 'wt' ) as fhtml:
-        fhtml.write( html )
+        th_build_types=' '.join(th_build_types),
+        tr_builds='\n'.join(tr_builds))
+    with open(html_report_path, 'wt') as fhtml:
+        fhtml.write(html)
     print('HTML report generated in:', html_report_path)
 
 def main():
@@ -246,33 +243,33 @@
     parser.enable_interspersed_args()
     options, args = parser.parse_args()
     if len(args) < 3:
-        parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." )
+        parser.error("Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH.")
     work_dir = args[0]
     source_dir = args[1].rstrip('/\\')
     config_paths = args[2:]
     for config_path in config_paths:
-        if not os.path.isfile( config_path ):
-            parser.error( "Can not read: %r" % config_path )
+        if not os.path.isfile(config_path):
+            parser.error("Can not read: %r" % config_path)
 
     # generate build variants
     build_descs = []
     for config_path in config_paths:
-        build_descs_by_axis = load_build_variants_from_config( config_path )
-        build_descs.extend( generate_build_variants( build_descs_by_axis ) )
+        build_descs_by_axis = load_build_variants_from_config(config_path)
+        build_descs.extend(generate_build_variants(build_descs_by_axis))
     print('Build variants (%d):' % len(build_descs))
     # assign build directory for each variant
-    if not os.path.isdir( work_dir ):
-        os.makedirs( work_dir )
+    if not os.path.isdir(work_dir):
+        os.makedirs(work_dir)
     builds = []
-    with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap:
-        for index, build_desc in enumerate( build_descs ):
-            build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) )
-            builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) )
-            fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) )
+    with open(os.path.join(work_dir, 'matrix-dir-map.txt'), 'wt') as fmatrixmap:
+        for index, build_desc in enumerate(build_descs):
+            build_desc_work_dir = os.path.join(work_dir, '%03d' % (index+1))
+            builds.append(BuildData(build_desc, build_desc_work_dir, source_dir))
+            fmatrixmap.write('%s: %s\n' % (build_desc_work_dir, build_desc))
     for build in builds:
         build.execute_build()
-    html_report_path = os.path.join( work_dir, 'batchbuild-report.html' )
-    generate_html_report( html_report_path, builds )
+    html_report_path = os.path.join(work_dir, 'batchbuild-report.html')
+    generate_html_report(html_report_path, builds)
     print('Done')
 
 
diff --git a/devtools/fixeol.py b/devtools/fixeol.py
index 53af761..a76880f 100644
--- a/devtools/fixeol.py
+++ b/devtools/fixeol.py
@@ -1,10 +1,10 @@
 from __future__ import print_function
 import os.path
 
-def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
+def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
     """Makes sure that all sources have the specified eol sequence (default: unix)."""
-    if not os.path.isfile( path ):
-        raise ValueError( 'Path "%s" is not a file' % path )
+    if not os.path.isfile(path):
+        raise ValueError('Path "%s" is not a file' % path)
     try:
         f = open(path, 'rb')
     except IOError as msg:
@@ -29,27 +29,27 @@
 ##    
 ##    
 ##
-##def _do_fix( is_dry_run = True ):
+##def _do_fix(is_dry_run = True):
 ##    from waftools import antglob
-##    python_sources = antglob.glob( '.',
+##    python_sources = antglob.glob('.',
 ##        includes = '**/*.py **/wscript **/wscript_build',
 ##        excludes = antglob.default_excludes + './waf.py',
-##        prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
+##        prune_dirs = antglob.prune_dirs + 'waf-* ./build')
 ##    for path in python_sources:
-##        _fix_python_source( path, is_dry_run )
+##        _fix_python_source(path, is_dry_run)
 ##
-##    cpp_sources = antglob.glob( '.',
+##    cpp_sources = antglob.glob('.',
 ##        includes = '**/*.cpp **/*.h **/*.inl',
-##        prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
+##        prune_dirs = antglob.prune_dirs + 'waf-* ./build')
 ##    for path in cpp_sources:
-##        _fix_source_eol( path, is_dry_run )
+##        _fix_source_eol(path, is_dry_run)
 ##
 ##
 ##def dry_fix(context):
-##    _do_fix( is_dry_run = True )
+##    _do_fix(is_dry_run = True)
 ##
 ##def fix(context):
-##    _do_fix( is_dry_run = False )
+##    _do_fix(is_dry_run = False)
 ##
 ##def shutdown():
 ##    pass
diff --git a/devtools/licenseupdater.py b/devtools/licenseupdater.py
index 8cb71d7..6f82361 100644
--- a/devtools/licenseupdater.py
+++ b/devtools/licenseupdater.py
@@ -13,7 +13,7 @@
 
 """.replace('\r\n','\n')
 
-def update_license( path, dry_run, show_diff ):
+def update_license(path, dry_run, show_diff):
     """Update the license statement in the specified file.
     Parameters:
       path: path of the C++ source file to update.
@@ -22,28 +22,28 @@
       show_diff: if True, print the path of the file that would be modified,
                  as well as the change made to the file. 
     """
-    with open( path, 'rt' ) as fin:
+    with open(path, 'rt') as fin:
         original_text = fin.read().replace('\r\n','\n')
         newline = fin.newlines and fin.newlines[0] or '\n'
-    if not original_text.startswith( LICENSE_BEGIN ):
+    if not original_text.startswith(LICENSE_BEGIN):
         # No existing license found => prepend it
         new_text = BRIEF_LICENSE + original_text
     else:
-        license_end_index = original_text.index( '\n\n' ) # search first blank line
+        license_end_index = original_text.index('\n\n') # search first blank line
         new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
     if original_text != new_text:
         if not dry_run:
-            with open( path, 'wb' ) as fout:
-                fout.write( new_text.replace('\n', newline ) )
+            with open(path, 'wb') as fout:
+                fout.write(new_text.replace('\n', newline))
         print('Updated', path)
         if show_diff:
             import difflib
-            print('\n'.join( difflib.unified_diff( original_text.split('\n'),
-                                                   new_text.split('\n') ) ))
+            print('\n'.join(difflib.unified_diff(original_text.split('\n'),
+                                                   new_text.split('\n'))))
         return True
     return False
 
-def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
+def update_license_in_source_directories(source_dirs, dry_run, show_diff):
     """Updates license text in C++ source files found in directory source_dirs.
     Parameters:
       source_dirs: list of directory to scan for C++ sources. Directories are
@@ -56,11 +56,11 @@
     from devtools import antglob
     prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
     for source_dir in source_dirs:
-        cpp_sources = antglob.glob( source_dir,
+        cpp_sources = antglob.glob(source_dir,
             includes = '''**/*.h **/*.cpp **/*.inl''',
-            prune_dirs = prune_dirs )
+            prune_dirs = prune_dirs)
         for source in cpp_sources:
-            update_license( source, dry_run, show_diff )
+            update_license(source, dry_run, show_diff)
 
 def main():
     usage = """%prog DIR [DIR2...]
@@ -83,7 +83,7 @@
         help="""On update, show change made to the file.""")
     parser.enable_interspersed_args()
     options, args = parser.parse_args()
-    update_license_in_source_directories( args, options.dry_run, options.show_diff )
+    update_license_in_source_directories(args, options.dry_run, options.show_diff)
     print('Done')
 
 if __name__ == '__main__':
diff --git a/devtools/tarball.py b/devtools/tarball.py
index ccbda39..e44d870 100644
--- a/devtools/tarball.py
+++ b/devtools/tarball.py
@@ -13,41 +13,41 @@
     prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
         to make them child of root.
     """
-    base_dir = os.path.normpath( os.path.abspath( base_dir ) )
-    def archive_name( path ):
+    base_dir = os.path.normpath(os.path.abspath(base_dir))
+    def archive_name(path):
         """Makes path relative to base_dir."""
-        path = os.path.normpath( os.path.abspath( path ) )
-        common_path = os.path.commonprefix( (base_dir, path) )
+        path = os.path.normpath(os.path.abspath(path))
+        common_path = os.path.commonprefix((base_dir, path))
         archive_name = path[len(common_path):]
-        if os.path.isabs( archive_name ):
+        if os.path.isabs(archive_name):
             archive_name = archive_name[1:]
-        return os.path.join( prefix_dir, archive_name )
+        return os.path.join(prefix_dir, archive_name)
     def visit(tar, dirname, names):
         for name in names:
             path = os.path.join(dirname, name)
             if os.path.isfile(path):
                 path_in_tar = archive_name(path)
-                tar.add(path, path_in_tar )
+                tar.add(path, path_in_tar)
     compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
-    tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
+    tar = tarfile.TarFile.gzopen(tarball_path, 'w', compresslevel=compression)
     try:
         for source in sources:
             source_path = source
-            if os.path.isdir( source ):
+            if os.path.isdir(source):
                 os.path.walk(source_path, visit, tar)
             else:
                 path_in_tar = archive_name(source_path)
-                tar.add(source_path, path_in_tar )      # filename, arcname
+                tar.add(source_path, path_in_tar)      # filename, arcname
     finally:
         tar.close()
 
-def decompress( tarball_path, base_dir ):
+def decompress(tarball_path, base_dir):
     """Decompress the gzipped tarball into directory base_dir.
     """
     # !!! This class method is not documented in the online doc
     # nor is bz2open!
     tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
     try:
-        tar.extractall( base_dir )
+        tar.extractall(base_dir)
     finally:
         tar.close()