- doc is now generated in dist/doxygen
- makerelease now decompress the tarball, download and install scons, run scons check on provided platforms, decompress doc tarball and upload the doc on the project web
diff --git a/doxybuild.py b/doxybuild.py
index 792bff7..0a2a6c7 100644
--- a/doxybuild.py
+++ b/doxybuild.py
@@ -8,22 +8,19 @@
import shutil
from devtools import tarball
-def find_program(filename):
+def find_program(*filenames):
"""find a program in folders path_lst, and sets env[var]
- @param env: environmentA
- @param filename: name of the program to search for
- @param path_list: list of directories to search for filename
- @param var: environment value to be checked for in env or os.environ
- @return: either the value that is referenced with [var] in env or os.environ
- or the first occurrence filename or '' if filename could not be found
+ @param filenames: a list of possible names of the program to search for
+ @return: the full path of the filename if found, or '' if filename could not be found
"""
paths = os.environ.get('PATH', '').split(os.pathsep)
suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or ''
- for name in [filename+ext for ext in suffixes.split()]:
- for directory in paths:
- full_path = os.path.join(directory, name)
- if os.path.isfile(full_path):
- return full_path
+ for filename in filenames:
+ for name in [filename+ext for ext in suffixes.split()]:
+ for directory in paths:
+ full_path = os.path.join(directory, name)
+ if os.path.isfile(full_path):
+ return full_path
return ''
def do_subst_in_file(targetfile, sourcefile, dict):
@@ -87,36 +84,38 @@
options.silent = True
version = open('version','rt').read().strip()
- output_dir = '../build/doxygen' # relative to doc/doxyfile location.
+ output_dir = 'dist/doxygen' # relative to doc/doxyfile location.
+ if not os.path.isdir( output_dir ):
+ os.makedirs( output_dir )
top_dir = os.path.abspath( '.' )
html_output_dirname = 'jsoncpp-api-html-' + version
tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' )
warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' )
+ html_output_path = os.path.join( output_dir, html_output_dirname )
def yesno( bool ):
return bool and 'YES' or 'NO'
subst_keys = {
'%JSONCPP_VERSION%': version,
'%DOC_TOPDIR%': '',
'%TOPDIR%': top_dir,
- '%HTML_OUTPUT%': os.path.join( output_dir, html_output_dirname ),
+ '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ),
'%HAVE_DOT%': yesno(options.with_dot),
'%DOT_PATH%': os.path.split(options.dot_path)[0],
'%HTML_HELP%': yesno(options.with_html_help),
'%UML_LOOK%': yesno(options.with_uml_look),
- '%WARNING_LOG_PATH%': warning_log_path
+ '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path )
}
- full_output_dir = os.path.join( 'doc', output_dir )
- if os.path.isdir( full_output_dir ):
- print 'Deleting directory:', full_output_dir
- shutil.rmtree( full_output_dir )
- if not os.path.isdir( full_output_dir ):
- os.makedirs( full_output_dir )
+ if os.path.isdir( output_dir ):
+ print 'Deleting directory:', output_dir
+ shutil.rmtree( output_dir )
+ if not os.path.isdir( output_dir ):
+ os.makedirs( output_dir )
do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys )
ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent )
if not options.silent:
- print open(os.path.join('doc', warning_log_path), 'rb').read()
+ print open(warning_log_path, 'rb').read()
index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html'))
print 'Generated documentation can be found in:'
print index_path
@@ -126,12 +125,13 @@
if options.make_tarball:
print 'Generating doc tarball to', tarball_path
tarball_sources = [
- full_output_dir,
+ output_dir,
'README.txt',
'version'
]
- tarball_basedir = os.path.join( full_output_dir, html_output_dirname )
+ tarball_basedir = os.path.join( output_dir, html_output_dirname )
tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname )
+ return tarball_path, html_output_dirname
def main():
usage = """%prog
diff --git a/makerelease.py b/makerelease.py
index 9dcdcf6..80a2edb 100644
--- a/makerelease.py
+++ b/makerelease.py
@@ -3,7 +3,7 @@
Requires Python 2.6
Example of invocation (use to test the script):
-python makerelease.py --force --retag 0.5.0 0.6.0-dev
+python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep 0.5.0 0.6.0-dev
Example of invocation when doing a release:
python makerelease.py 0.5.0 0.6.0-dev
@@ -15,15 +15,25 @@
import subprocess
import xml.etree.ElementTree as ElementTree
import shutil
+import urllib2
+import tempfile
+import os
+import time
from devtools import antglob, fixeol, tarball
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
+SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download'
+SOURCEFORGE_PROJECT = 'jsoncpp'
def set_version( version ):
with open('version','wb') as f:
f.write( version.strip() )
+def rmdir_if_exist( dir_path ):
+ if os.path.isdir( dir_path ):
+ shutil.rmtree( dir_path )
+
class SVNError(Exception):
pass
@@ -89,8 +99,7 @@
Target directory, including its parent is created if it does not exist.
If the directory export_dir exist, it is deleted before export proceed.
"""
- if os.path.isdir( export_dir ):
- shutil.rmtree( export_dir )
+ rmdir_if_exist( export_dir )
svn_command( 'export', tag_url, export_dir )
def fix_sources_eol( dist_dir ):
@@ -111,6 +120,114 @@
for path in unix_sources:
fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' )
+def download( url, target_path ):
+ """Download file represented by url to target_path.
+ """
+ f = urllib2.urlopen( url )
+ try:
+ data = f.read()
+ finally:
+ f.close()
+ fout = open( target_path, 'wb' )
+ try:
+ fout.write( data )
+ finally:
+ fout.close()
+
+def check_compile( distcheck_top_dir, platform ):
+ cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
+ print 'Running:', ' '.join( cmd )
+ log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform )
+ flog = open( log_path, 'wb' )
+ try:
+ process = subprocess.Popen( cmd,
+ stdout=flog,
+ stderr=subprocess.STDOUT,
+ cwd=distcheck_top_dir )
+ stdout = process.communicate()[0]
+ status = (process.returncode == 0)
+ finally:
+ flog.close()
+ return (status, log_path)
+
+def write_tempfile( content, **kwargs ):
+ fd, path = tempfile.mkstemp( **kwargs )
+ f = os.fdopen( fd, 'wt' )
+ try:
+ f.write( content )
+ finally:
+ f.close()
+ return path
+
+class SFTPError(Exception):
+ pass
+
+def run_sftp_batch( userhost, sftp, batch, retry=0 ):
+ path = write_tempfile( batch, suffix='.sftp', text=True )
+ # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
+ cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
+ error = None
+ for retry_index in xrange(0, max(1,retry)):
+ heading = retry_index == 0 and 'Running:' or 'Retrying:'
+ print heading, ' '.join( cmd )
+ process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
+ stdout = process.communicate()[0]
+ if process.returncode != 0:
+ error = SFTPError( 'SFTP batch failed:\n' + stdout )
+ else:
+ break
+ if error:
+ raise error
+ return stdout
+
+def sourceforge_web_synchro( sourceforge_project, doc_dir,
+ user=None, sftp='sftp' ):
+ """Notes: does not synchronize sub-directory of doc-dir.
+ """
+ userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project)
+ stdout = run_sftp_batch( userhost, sftp, """
+cd htdocs
+dir
+exit
+""" )
+ existing_paths = set()
+ collect = 0
+ for line in stdout.split('\n'):
+ line = line.strip()
+ if not collect and line.endswith('> dir'):
+ collect = True
+ elif collect and line.endswith('> exit'):
+ break
+ elif collect == 1:
+ collect = 2
+ elif collect == 2:
+ path = line.strip().split()[-1:]
+ if path and path[0] not in ('.', '..'):
+ existing_paths.add( path[0] )
+ upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] )
+ paths_to_remove = existing_paths - upload_paths
+ if paths_to_remove:
+ print 'Removing the following file from web:'
+ print '\n'.join( paths_to_remove )
+ stdout = run_sftp_batch( userhost, sftp, """cd htdocs
+rm %s
+exit""" % ' '.join(paths_to_remove) )
+ print 'Uploading %d files:' % len(upload_paths)
+ batch_size = 10
+ upload_paths = list(upload_paths)
+ start_time = time.time()
+ for index in xrange(0,len(upload_paths),batch_size):
+ paths = upload_paths[index:index+batch_size]
+ file_per_sec = (time.time() - start_time) / (index+1)
+ remaining_files = len(upload_paths) - index
+ remaining_sec = file_per_sec * remaining_files
+ print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)
+ run_sftp_batch( userhost, sftp, """cd htdocs
+lcd %s
+mput %s
+exit""" % (doc_dir, ' '.join(paths) ), retry=3 )
+
+
def main():
usage = """%prog release_version next_dev_version
Update 'version' file to release_version and commit.
@@ -120,7 +237,9 @@
Performs an svn export of tag release version, and build a source tarball.
-Must be started in the project top directory.
+Must be started in the project top directory.
+
+Warning: --force should only be used when developping/testing the release script.
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
@@ -133,6 +252,14 @@
help="""Ignore pending commit. [Default: %default]""")
parser.add_option('--retag', dest="retag_release", action='store_true', default=False,
help="""Overwrite release existing tag if it exist. [Default: %default]""")
+ parser.add_option('-p', '--platforms', dest="platforms", action='store', default='',
+ help="""Comma separated list of platform passed to scons for build check.""")
+ parser.add_option('--no-test', dest="no_test", action='store', default=False,
+ help="""Skips build check.""")
+ parser.add_option('-u', '--upload-user', dest="user", action='store',
+ help="""Sourceforge user for SFTP documentation upload.""")
+ parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'),
+ help="""Path of the SFTP compatible binary used to upload the documentation.""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
@@ -140,6 +267,9 @@
parser.error( 'release_version missing on command-line.' )
release_version = args[0]
+ if not options.platforms and not options.no_test:
+ parser.error( 'You must specify either --platform or --no-test option.' )
+
if options.ignore_pending_commit:
msg = ''
else:
@@ -157,7 +287,12 @@
svn_tag_sandbox( tag_url, 'Release ' + release_version )
print 'Generated doxygen document...'
- doxybuild.build_doc( options, make_release=True )
+## doc_dirname = r'jsoncpp-api-html-0.5.0'
+## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
+ doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True )
+ doc_distcheck_dir = 'dist/doccheck'
+ tarball.decompress( doc_tarball_path, doc_distcheck_dir )
+ doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname )
export_dir = 'dist/export'
svn_export( tag_url, export_dir )
@@ -168,12 +303,40 @@
print 'Generating source tarball to', source_tarball_path
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
+ # Decompress source tarball, download and install scons-local
distcheck_dir = 'dist/distcheck'
+ distcheck_top_dir = distcheck_dir + '/' + source_dir
print 'Decompressing source tarball to', distcheck_dir
+ rmdir_if_exist( distcheck_dir )
tarball.decompress( source_tarball_path, distcheck_dir )
+ scons_local_path = 'dist/scons-local.tar.gz'
+ print 'Downloading scons-local to', scons_local_path
+ download( SCONS_LOCAL_URL, scons_local_path )
+ print 'Decompressing scons-local to', distcheck_top_dir
+ tarball.decompress( scons_local_path, distcheck_top_dir )
+
+ # Run compilation
+ print 'Compiling decompressed tarball'
+ all_build_status = True
+ for platform in options.platforms.split(','):
+ print 'Testing platform:', platform
+ build_status, log_path = check_compile( distcheck_top_dir, platform )
+ print 'see build log:', log_path
+ print build_status and '=> ok' or '=> FAILED'
+ all_build_status = all_build_status and build_status
+ if not build_status:
+ print 'Testing failed on at least one platform, aborting...'
+ svn_remove_tag( tag_url, 'Removing tag due to failed testing' )
+ sys.exit(1)
+ if options.user:
+ print 'Uploading documentation using user', options.user
+ sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp )
+ print 'Completed documentatio upload'
+ else:
+ print 'No upload user specified. Documentation was not upload.'
+ print 'Tarball can be found at:', doc_tarball_path
#@todo:
- # ?compile & run & check
- # ?upload documentation
+ #upload source & doc tarballs
else:
sys.stderr.write( msg + '\n' )