simple py3 changes
diff --git a/amalgamate.py b/amalgamate.py
index 9e3f08c..550f6a6 100644
--- a/amalgamate.py
+++ b/amalgamate.py
@@ -56,7 +56,7 @@
            target_source_path: output .cpp path
            header_include_path: generated header path relative to target_source_path.
     """
-    print ("Amalgating header...")
+    print("Amalgating header...")
     header = AmalgamationFile( source_top_dir )
     header.add_text( "/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/)." )
     header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
@@ -77,12 +77,12 @@
     header.add_text( "#endif //ifndef JSON_AMALGATED_H_INCLUDED" )
 
     target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
-    print ("Writing amalgated header to %r" % target_header_path)
+    print("Writing amalgated header to %r" % target_header_path)
     header.write_to( target_header_path )
 
     base, ext = os.path.splitext( header_include_path )
     forward_header_include_path = base + "-forwards" + ext
-    print ("Amalgating forward header...")
+    print("Amalgating forward header...")
     header = AmalgamationFile( source_top_dir )
     header.add_text( "/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/)." )
     header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path )
@@ -99,10 +99,10 @@
 
     target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
                                                forward_header_include_path )
-    print ("Writing amalgated forward header to %r" % target_forward_header_path)
+    print("Writing amalgated forward header to %r" % target_forward_header_path)
     header.write_to( target_forward_header_path )
 
-    print ("Amalgating source...")
+    print("Amalgating source...")
     source = AmalgamationFile( source_top_dir )
     source.add_text( "/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/)." )
     source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
@@ -118,7 +118,7 @@
     source.add_file( os.path.join(lib_json, "json_value.cpp") )
     source.add_file( os.path.join(lib_json, "json_writer.cpp") )
 
-    print ("Writing amalgated source to %r" % target_source_path)
+    print("Writing amalgated source to %r" % target_source_path)
     source.write_to( target_source_path )
 
 def main():
@@ -144,7 +144,7 @@
         sys.stderr.write( msg + "\n" )
         sys.exit( 1 )
     else:
-        print ("Source succesfully amalagated")
+        print("Source succesfully amalagated")
 
 if __name__ == "__main__":
     main()
diff --git a/devtools/antglob.py b/devtools/antglob.py
index 30837b5..8b7b4ca 100644
--- a/devtools/antglob.py
+++ b/devtools/antglob.py
@@ -2,6 +2,7 @@
 # encoding: utf-8
 # Baptiste Lepilleur, 2009
 
+from __future__ import print_function
 from dircache import listdir
 import re
 import fnmatch
@@ -190,12 +191,12 @@
                 test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
             for ant_pattern, accepted_matches, rejected_matches in test_cases:
                 rex = ant_pattern_to_re( ant_pattern )
-                print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
+                print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
                 for accepted_match in accepted_matches:
-                    print 'Accepted?:', accepted_match
-                    self.assert_( rex.match( accepted_match ) is not None )
+                    print('Accepted?:', accepted_match)
+                    self.assertTrue( rex.match( accepted_match ) is not None )
                 for rejected_match in rejected_matches:
-                    print 'Rejected?:', rejected_match
-                    self.assert_( rex.match( rejected_match ) is None )
+                    print('Rejected?:', rejected_match)
+                    self.assertTrue( rex.match( rejected_match ) is None )
 
     unittest.main()
diff --git a/devtools/fixeol.py b/devtools/fixeol.py
index 8b97e90..53af761 100644
--- a/devtools/fixeol.py
+++ b/devtools/fixeol.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 import os.path
 
 def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
@@ -7,7 +8,7 @@
     try:
         f = open(path, 'rb')
     except IOError as msg:
-        print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
+        print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr)
         return False
     try:
         raw_lines = f.readlines()
@@ -15,7 +16,7 @@
         f.close()
     fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
     if raw_lines != fixed_lines:
-        print '%s =>' % path,
+        print('%s =>' % path, end=' ')
         if not is_dry_run:
             f = open(path, "wb")
             try:
@@ -23,7 +24,7 @@
             finally:
                 f.close()
         if verbose:
-            print is_dry_run and ' NEED FIX' or ' FIXED'
+            print(is_dry_run and ' NEED FIX' or ' FIXED')
     return True
 ##    
 ##    
diff --git a/devtools/licenseupdater.py b/devtools/licenseupdater.py
index 866eada..8cb71d7 100644
--- a/devtools/licenseupdater.py
+++ b/devtools/licenseupdater.py
@@ -1,5 +1,6 @@
 """Updates the license text in source file.
 """
+from __future__ import print_function
 
 # An existing license is found if the file starts with the string below,
 # and ends with the first blank line.
@@ -34,11 +35,11 @@
         if not dry_run:
             with open( path, 'wb' ) as fout:
                 fout.write( new_text.replace('\n', newline ) )
-        print 'Updated', path
+        print('Updated', path)
         if show_diff:
             import difflib
-            print '\n'.join( difflib.unified_diff( original_text.split('\n'),
-                                                   new_text.split('\n') ) )
+            print('\n'.join( difflib.unified_diff( original_text.split('\n'),
+                                                   new_text.split('\n') ) ))
         return True
     return False
 
@@ -83,7 +84,7 @@
     parser.enable_interspersed_args()
     options, args = parser.parse_args()
     update_license_in_source_directories( args, options.dry_run, options.show_diff )
-    print 'Done'
+    print('Done')
 
 if __name__ == '__main__':
     import sys
diff --git a/doxybuild.py b/doxybuild.py
index 588767f..0b61c39 100644
--- a/doxybuild.py
+++ b/doxybuild.py
@@ -1,12 +1,12 @@
 """Script to generate doxygen documentation.
 """
-
+from __future__ import print_function
+from devtools import tarball
 import re
 import os
 import os.path
 import sys
 import shutil
-from devtools import tarball
 
 def find_program(*filenames):
     """find a program in folders path_lst, and sets env[var]
@@ -33,9 +33,9 @@
         contents = f.read()
         f.close()
     except:
-        print "Can't read source file %s"%sourcefile
+        print("Can't read source file %s"%sourcefile)
         raise
-    for (k,v) in dict.items():
+    for (k,v) in list(dict.items()):
         v = v.replace('\\','\\\\') 
         contents = re.sub(k, v, contents)
     try:
@@ -43,7 +43,7 @@
         f.write(contents)
         f.close()
     except:
-        print "Can't write target file %s"%targetfile
+        print("Can't write target file %s"%targetfile)
         raise
 
 def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
@@ -53,12 +53,12 @@
     try:
         os.chdir( working_dir )
         cmd = [doxygen_path, config_file]
-        print 'Running:', ' '.join( cmd )
+        print('Running:', ' '.join( cmd ))
         try:
             import subprocess
         except:
             if os.system( ' '.join( cmd ) ) != 0:
-                print 'Documentation generation failed'
+                print('Documentation generation failed')
                 return False
         else:
             if is_silent:
@@ -67,8 +67,8 @@
                 process = subprocess.Popen( cmd )
             stdout, _ = process.communicate()
             if process.returncode:
-                print 'Documentation generation failed:'
-                print stdout
+                print('Documentation generation failed:')
+                print(stdout)
                 return False
         return True
     finally:
@@ -107,7 +107,7 @@
         }
 
     if os.path.isdir( output_dir ):
-        print 'Deleting directory:', output_dir
+        print('Deleting directory:', output_dir)
         shutil.rmtree( output_dir )
     if not os.path.isdir( output_dir ):
         os.makedirs( output_dir )
@@ -115,15 +115,15 @@
     do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys )
     ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent )
     if not options.silent:
-        print open(warning_log_path, 'rb').read()
+        print(open(warning_log_path, 'rb').read())
     index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
-    print 'Generated documentation can be found in:'
-    print index_path
+    print('Generated documentation can be found in:')
+    print(index_path)
     if options.open:
         import webbrowser
         webbrowser.open( 'file://' + index_path )
     if options.make_tarball:
-        print 'Generating doc tarball to', tarball_path
+        print('Generating doc tarball to', tarball_path)
         tarball_sources = [
             output_dir,
             'README.txt',
diff --git a/makerelease.py b/makerelease.py
index 2b6c564..90276d1 100644
--- a/makerelease.py
+++ b/makerelease.py
@@ -14,6 +14,7 @@
 Note: This was for Subversion. Now that we are in GitHub, we do not
 need to build versioned tarballs anymore, so makerelease.py is defunct.
 """
+from __future__ import print_function
 import os.path
 import subprocess
 import sys
@@ -46,7 +47,7 @@
 
 def svn_command( command, *args ):
     cmd = ['svn', '--non-interactive', command] + list(args)
-    print 'Running:', ' '.join( cmd )
+    print('Running:', ' '.join( cmd ))
     process = subprocess.Popen( cmd,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.STDOUT )
@@ -117,7 +118,7 @@
 def fix_sources_eol( dist_dir ):
     """Set file EOL for tarball distribution.
     """
-    print 'Preparing exported source file EOL for distribution...'
+    print('Preparing exported source file EOL for distribution...')
     prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
     win_sources = antglob.glob( dist_dir, 
         includes = '**/*.sln **/*.vcproj',
@@ -148,7 +149,7 @@
 
 def check_compile( distcheck_top_dir, platform ):
     cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
-    print 'Running:', ' '.join( cmd )
+    print('Running:', ' '.join( cmd ))
     log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform )
     flog = open( log_path, 'wb' )
     try:
@@ -179,9 +180,9 @@
     # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
     cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
     error = None
-    for retry_index in xrange(0, max(1,retry)):
+    for retry_index in range(0, max(1,retry)):
         heading = retry_index == 0 and 'Running:' or 'Retrying:'
-        print heading, ' '.join( cmd )
+        print(heading, ' '.join( cmd ))
         process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
         stdout = process.communicate()[0]
         if process.returncode != 0:
@@ -219,21 +220,21 @@
     upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] )
     paths_to_remove = existing_paths - upload_paths
     if paths_to_remove:
-        print 'Removing the following file from web:'
-        print '\n'.join( paths_to_remove )
+        print('Removing the following file from web:')
+        print('\n'.join( paths_to_remove ))
         stdout = run_sftp_batch( userhost, sftp, """cd htdocs
 rm %s
 exit""" % ' '.join(paths_to_remove) )
-    print 'Uploading %d files:' % len(upload_paths)
+    print('Uploading %d files:' % len(upload_paths))
     batch_size = 10
     upload_paths = list(upload_paths)
     start_time = time.time()
-    for index in xrange(0,len(upload_paths),batch_size):
+    for index in range(0,len(upload_paths),batch_size):
         paths = upload_paths[index:index+batch_size]
         file_per_sec = (time.time() - start_time) / (index+1)
         remaining_files = len(upload_paths) - index
         remaining_sec = file_per_sec * remaining_files
-        print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)
+        print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
         run_sftp_batch( userhost, sftp, """cd htdocs
 lcd %s
 mput %s
@@ -297,7 +298,7 @@
     else:
         msg = check_no_pending_commit()
     if not msg:
-        print 'Setting version to', release_version
+        print('Setting version to', release_version)
         set_version( release_version )
         svn_commit( 'Release ' + release_version )
         tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
@@ -305,11 +306,11 @@
             if options.retag_release:
                 svn_remove_tag( tag_url, 'Overwriting previous tag' )
             else:
-                print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
+                print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url)
                 sys.exit( 1 )
         svn_tag_sandbox( tag_url, 'Release ' + release_version )
 
-        print 'Generated doxygen document...'
+        print('Generated doxygen document...')
 ##        doc_dirname = r'jsoncpp-api-html-0.5.0'
 ##        doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
         doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True )
@@ -323,11 +324,11 @@
         
         source_dir = 'jsoncpp-src-' + release_version
         source_tarball_path = 'dist/%s.tar.gz' % source_dir
-        print 'Generating source tarball to', source_tarball_path
+        print('Generating source tarball to', source_tarball_path)
         tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
 
         amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
-        print 'Generating amalgamation source tarball to', amalgamation_tarball_path
+        print('Generating amalgamation source tarball to', amalgamation_tarball_path)
         amalgamation_dir = 'dist/amalgamation'
         amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' )
         amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
@@ -337,41 +338,41 @@
         # Decompress source tarball, download and install scons-local
         distcheck_dir = 'dist/distcheck'
         distcheck_top_dir = distcheck_dir + '/' + source_dir
-        print 'Decompressing source tarball to', distcheck_dir
+        print('Decompressing source tarball to', distcheck_dir)
         rmdir_if_exist( distcheck_dir )
         tarball.decompress( source_tarball_path, distcheck_dir )
         scons_local_path = 'dist/scons-local.tar.gz'
-        print 'Downloading scons-local to', scons_local_path
+        print('Downloading scons-local to', scons_local_path)
         download( SCONS_LOCAL_URL, scons_local_path )
-        print 'Decompressing scons-local to', distcheck_top_dir
+        print('Decompressing scons-local to', distcheck_top_dir)
         tarball.decompress( scons_local_path, distcheck_top_dir )
 
         # Run compilation
-        print 'Compiling decompressed tarball'
+        print('Compiling decompressed tarball')
         all_build_status = True
         for platform in options.platforms.split(','):
-            print 'Testing platform:', platform
+            print('Testing platform:', platform)
             build_status, log_path = check_compile( distcheck_top_dir, platform )
-            print 'see build log:', log_path
-            print build_status and '=> ok' or '=> FAILED'
+            print('see build log:', log_path)
+            print(build_status and '=> ok' or '=> FAILED')
             all_build_status = all_build_status and build_status
         if not build_status:
-            print 'Testing failed on at least one platform, aborting...'
+            print('Testing failed on at least one platform, aborting...')
             svn_remove_tag( tag_url, 'Removing tag due to failed testing' )
             sys.exit(1)
         if options.user:
             if not options.no_web:
-                print 'Uploading documentation using user', options.user
+                print('Uploading documentation using user', options.user)
                 sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp )
-                print 'Completed documentation upload'
-            print 'Uploading source and documentation tarballs for release using user', options.user
+                print('Completed documentation upload')
+            print('Uploading source and documentation tarballs for release using user', options.user)
             sourceforge_release_tarball( SOURCEFORGE_PROJECT,
                                          [source_tarball_path, doc_tarball_path],
                                          user=options.user, sftp=options.sftp )
-            print 'Source and doc release tarballs uploaded'
+            print('Source and doc release tarballs uploaded')
         else:
-            print 'No upload user specified. Web site and download tarbal were not uploaded.'
-            print 'Tarball can be found at:', doc_tarball_path
+            print('No upload user specified. Web site and download tarbal were not uploaded.')
+            print('Tarball can be found at:', doc_tarball_path)
 
         # Set next version number and commit            
         set_version( next_version )
diff --git a/scons-tools/substinfile.py b/scons-tools/substinfile.py
index 4d30585..ef18b4e 100644
--- a/scons-tools/substinfile.py
+++ b/scons-tools/substinfile.py
@@ -1,5 +1,6 @@
 import re
 from SCons.Script import *  # the usual scons stuff you get in a SConscript
+import collections
 
 def generate(env):
     """
@@ -25,28 +26,28 @@
             contents = f.read()
             f.close()
         except:
-            raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile
-        for (k,v) in dict.items():
+            raise SCons.Errors.UserError("Can't read source file %s"%sourcefile)
+        for (k,v) in list(dict.items()):
             contents = re.sub(k, v, contents)
         try:
             f = open(targetfile, 'wb')
             f.write(contents)
             f.close()
         except:
-            raise SCons.Errors.UserError, "Can't write target file %s"%targetfile
+            raise SCons.Errors.UserError("Can't write target file %s"%targetfile)
         return 0 # success
 
     def subst_in_file(target, source, env):
-        if not env.has_key('SUBST_DICT'):
-            raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set."
+        if 'SUBST_DICT' not in env:
+            raise SCons.Errors.UserError("SubstInFile requires SUBST_DICT to be set.")
         d = dict(env['SUBST_DICT']) # copy it
-        for (k,v) in d.items():
-            if callable(v):
+        for (k,v) in list(d.items()):
+            if isinstance(v, collections.Callable):
                 d[k] = env.subst(v()).replace('\\','\\\\')
             elif SCons.Util.is_String(v):
                 d[k] = env.subst(v).replace('\\','\\\\')
             else:
-                raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v))
+                raise SCons.Errors.UserError("SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)))
         for (t,s) in zip(target, source):
             return do_subst_in_file(str(t), str(s), d)
 
@@ -60,8 +61,8 @@
         Returns original target, source tuple unchanged.
         """
         d = env['SUBST_DICT'].copy() # copy it
-        for (k,v) in d.items():
-            if callable(v):
+        for (k,v) in list(d.items()):
+            if isinstance(v, collections.Callable):
                 d[k] = env.subst(v())
             elif SCons.Util.is_String(v):
                 d[k]=env.subst(v)
diff --git a/test/generate_expected.py b/test/generate_expected.py
index 5b215c4..f668da2 100644
--- a/test/generate_expected.py
+++ b/test/generate_expected.py
@@ -1,11 +1,12 @@
+from __future__ import print_function
 import glob
 import os.path
 for path in glob.glob( '*.json' ):
     text = file(path,'rt').read()
     target = os.path.splitext(path)[0] + '.expected'
     if os.path.exists( target ):
-        print 'skipping:', target
+        print('skipping:', target)
     else:
-        print 'creating:', target
+        print('creating:', target)
         file(target,'wt').write(text)
 
diff --git a/test/pyjsontestrunner.py b/test/pyjsontestrunner.py
index 504f3db..3f08a8a 100644
--- a/test/pyjsontestrunner.py
+++ b/test/pyjsontestrunner.py
@@ -1,12 +1,12 @@
 # Simple implementation of a json test runner to run the test against json-py.
-
+from __future__ import print_function
 import sys
 import os.path
 import json
 import types
 
 if len(sys.argv) != 2:
-    print "Usage: %s input-json-file", sys.argv[0]
+    print("Usage: %s input-json-file", sys.argv[0])
     sys.exit(3)
     
 input_path = sys.argv[1]
diff --git a/test/runjsontests.py b/test/runjsontests.py
index 5fca75a..a1f6082 100644
--- a/test/runjsontests.py
+++ b/test/runjsontests.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
 import sys
 import os
 import os.path
@@ -11,7 +12,7 @@
     actual = actual.strip().replace('\r','').split('\n')
     diff_line = 0
     max_line_to_compare = min( len(expected), len(actual) )
-    for index in xrange(0,max_line_to_compare):
+    for index in range(0,max_line_to_compare):
         if expected[index].strip() != actual[index].strip():
             diff_line = index + 1
             break
@@ -51,7 +52,7 @@
     for input_path in tests + test_jsonchecker:
         expect_failure = os.path.basename( input_path ).startswith( 'fail' )
         is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
-        print 'TESTING:', input_path,
+        print('TESTING:', input_path, end=' ')
         options = is_json_checker_test and '--json-checker' or ''
         pipe = os.popen( "%s%s %s %s" % (
             valgrind_path, jsontest_executable_path, options,
@@ -61,24 +62,24 @@
         if is_json_checker_test:
             if expect_failure:
                 if status is None:
-                    print 'FAILED'
+                    print('FAILED')
                     failed_tests.append( (input_path, 'Parsing should have failed:\n%s' %
                                           safeReadFile(input_path)) )
                 else:
-                    print 'OK'
+                    print('OK')
             else:
                 if status is not None:
-                    print 'FAILED'
+                    print('FAILED')
                     failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
                 else:
-                    print 'OK'
+                    print('OK')
         else:
             base_path = os.path.splitext(input_path)[0]
             actual_output = safeReadFile( base_path + '.actual' )
             actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' )
             file(base_path + '.process-output','wt').write( process_output )
             if status:
-                print 'parsing failed'
+                print('parsing failed')
                 failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
             else:
                 expected_output_path = os.path.splitext(input_path)[0] + '.expected'
@@ -86,23 +87,23 @@
                 detail = ( compareOutputs( expected_output, actual_output, 'input' )
                             or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) )
                 if detail:
-                    print 'FAILED'
+                    print('FAILED')
                     failed_tests.append( (input_path, detail) )
                 else:
-                    print 'OK'
+                    print('OK')
 
     if failed_tests:
-        print
-        print 'Failure details:'
+        print()
+        print('Failure details:')
         for failed_test in failed_tests:
-            print '* Test', failed_test[0]
-            print failed_test[1]
-            print
-        print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests),
-                                                       len(failed_tests) )
+            print('* Test', failed_test[0])
+            print(failed_test[1])
+            print()
+        print('Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests),
+                                                       len(failed_tests) ))
         return 1
     else:
-        print 'All %d tests passed.' % len(tests)
+        print('All %d tests passed.' % len(tests))
         return 0
 
 def main():
diff --git a/test/rununittests.py b/test/rununittests.py
index 366184c..6279f80 100644
--- a/test/rununittests.py
+++ b/test/rununittests.py
@@ -1,8 +1,9 @@
+from __future__ import print_function
+from glob import glob
 import sys
 import os
 import os.path
 import subprocess
-from glob import glob
 import optparse
 
 VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
@@ -28,29 +29,29 @@
     test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
     status, test_names = test_proxy.run( ['--list-tests'] )
     if not status:
-        print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
+        print("Failed to obtain unit tests list:\n" + test_names, file=sys.stderr)
         return 1
     test_names = [name.strip() for name in test_names.strip().split('\n')]
     failures = []
     for name in test_names:
-        print 'TESTING %s:' % name,
+        print('TESTING %s:' % name, end=' ')
         succeed, result = test_proxy.run( ['--test', name] )
         if succeed:
-            print 'OK'
+            print('OK')
         else:
             failures.append( (name, result) )
-            print 'FAILED'
+            print('FAILED')
     failed_count = len(failures)
     pass_count = len(test_names) - failed_count
     if failed_count:
-        print
+        print()
         for name, result in failures:
-            print result
-        print '%d/%d tests passed (%d failure(s))' % (
-            pass_count, len(test_names), failed_count)
+            print(result)
+        print('%d/%d tests passed (%d failure(s))' % (
+            pass_count, len(test_names), failed_count))
         return 1
     else:
-        print 'All %d tests passed' % len(test_names)
+        print('All %d tests passed' % len(test_names))
         return 0
 
 def main():