• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 """Tag the sandbox for release, make source and doc tarballs.
2 
3 Requires Python 2.6
4 
5 Example of invocation (use to test the script):
6 python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev
7 
8 When testing this script:
9 python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev
10 
11 Example of invocation when doing a release:
12 python makerelease.py 0.5.0 0.6.0-dev
13 """
14 import os.path
15 import subprocess
16 import sys
17 import doxybuild
18 import subprocess
19 import xml.etree.ElementTree as ElementTree
20 import shutil
21 import urllib2
22 import tempfile
23 import os
24 import time
25 from devtools import antglob, fixeol, tarball
26 import amalgamate
27 
28 SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
29 SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
30 SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download'
31 SOURCEFORGE_PROJECT = 'jsoncpp'
32 
33 def set_version( version ):
34     with open('version','wb') as f:
35         f.write( version.strip() )
36 
37 def rmdir_if_exist( dir_path ):
38     if os.path.isdir( dir_path ):
39         shutil.rmtree( dir_path )
40 
41 class SVNError(Exception):
42     pass
43 
44 def svn_command( command, *args ):
45     cmd = ['svn', '--non-interactive', command] + list(args)
46     print 'Running:', ' '.join( cmd )
47     process = subprocess.Popen( cmd,
48                                 stdout=subprocess.PIPE,
49                                 stderr=subprocess.STDOUT )
50     stdout = process.communicate()[0]
51     if process.returncode:
52         error = SVNError( 'SVN command failed:\n' + stdout )
53         error.returncode = process.returncode
54         raise error
55     return stdout
56 
57 def check_no_pending_commit():
58     """Checks that there is no pending commit in the sandbox."""
59     stdout = svn_command( 'status', '--xml' )
60     etree = ElementTree.fromstring( stdout )
61     msg = []
62     for entry in etree.getiterator( 'entry' ):
63         path = entry.get('path')
64         status = entry.find('wc-status').get('item')
65         if status != 'unversioned' and path != 'version':
66             msg.append( 'File "%s" has pending change (status="%s")' % (path, status) )
67     if msg:
68         msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' )
69     return '\n'.join( msg )
70 
71 def svn_join_url( base_url, suffix ):
72     if not base_url.endswith('/'):
73         base_url += '/'
74     if suffix.startswith('/'):
75         suffix = suffix[1:]
76     return base_url + suffix
77 
78 def svn_check_if_tag_exist( tag_url ):
79     """Checks if a tag exist.
80     Returns: True if the tag exist, False otherwise.
81     """
82     try:
83         list_stdout = svn_command( 'list', tag_url )
84     except SVNError, e:
85         if e.returncode != 1 or not str(e).find('tag_url'):
86             raise e
87         # otherwise ignore error, meaning tag does not exist
88         return False
89     return True
90 
91 def svn_commit( message ):
92     """Commit the sandbox, providing the specified comment.
93     """
94     svn_command( 'ci', '-m', message )
95 
96 def svn_tag_sandbox( tag_url, message ):
97     """Makes a tag based on the sandbox revisions.
98     """
99     svn_command( 'copy', '-m', message, '.', tag_url )
100 
101 def svn_remove_tag( tag_url, message ):
102     """Removes an existing tag.
103     """
104     svn_command( 'delete', '-m', message, tag_url )
105 
106 def svn_export( tag_url, export_dir ):
107     """Exports the tag_url revision to export_dir.
108        Target directory, including its parent is created if it does not exist.
109        If the directory export_dir exist, it is deleted before export proceed.
110     """
111     rmdir_if_exist( export_dir )
112     svn_command( 'export', tag_url, export_dir )
113 
114 def fix_sources_eol( dist_dir ):
115     """Set file EOL for tarball distribution.
116     """
117     print 'Preparing exported source file EOL for distribution...'
118     prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
119     win_sources = antglob.glob( dist_dir,
120         includes = '**/*.sln **/*.vcproj',
121         prune_dirs = prune_dirs )
122     unix_sources = antglob.glob( dist_dir,
123         includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
124         sconscript *.json *.expected AUTHORS LICENSE''',
125         excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
126         prune_dirs = prune_dirs )
127     for path in win_sources:
128         fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' )
129     for path in unix_sources:
130         fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' )
131 
132 def download( url, target_path ):
133     """Download file represented by url to target_path.
134     """
135     f = urllib2.urlopen( url )
136     try:
137         data = f.read()
138     finally:
139         f.close()
140     fout = open( target_path, 'wb' )
141     try:
142         fout.write( data )
143     finally:
144         fout.close()
145 
146 def check_compile( distcheck_top_dir, platform ):
147     cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
148     print 'Running:', ' '.join( cmd )
149     log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform )
150     flog = open( log_path, 'wb' )
151     try:
152         process = subprocess.Popen( cmd,
153                                     stdout=flog,
154                                     stderr=subprocess.STDOUT,
155                                     cwd=distcheck_top_dir )
156         stdout = process.communicate()[0]
157         status = (process.returncode == 0)
158     finally:
159         flog.close()
160     return (status, log_path)
161 
162 def write_tempfile( content, **kwargs ):
163     fd, path = tempfile.mkstemp( **kwargs )
164     f = os.fdopen( fd, 'wt' )
165     try:
166         f.write( content )
167     finally:
168         f.close()
169     return path
170 
171 class SFTPError(Exception):
172     pass
173 
174 def run_sftp_batch( userhost, sftp, batch, retry=0 ):
175     path = write_tempfile( batch, suffix='.sftp', text=True )
176     # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
177     cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
178     error = None
179     for retry_index in xrange(0, max(1,retry)):
180         heading = retry_index == 0 and 'Running:' or 'Retrying:'
181         print heading, ' '.join( cmd )
182         process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
183         stdout = process.communicate()[0]
184         if process.returncode != 0:
185             error = SFTPError( 'SFTP batch failed:\n' + stdout )
186         else:
187             break
188     if error:
189         raise error
190     return stdout
191 
192 def sourceforge_web_synchro( sourceforge_project, doc_dir,
193                              user=None, sftp='sftp' ):
194     """Notes: does not synchronize sub-directory of doc-dir.
195     """
196     userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project)
197     stdout = run_sftp_batch( userhost, sftp, """
198 cd htdocs
199 dir
200 exit
201 """ )
202     existing_paths = set()
203     collect = 0
204     for line in stdout.split('\n'):
205         line = line.strip()
206         if not collect and line.endswith('> dir'):
207             collect = True
208         elif collect and line.endswith('> exit'):
209             break
210         elif collect == 1:
211             collect = 2
212         elif collect == 2:
213             path = line.strip().split()[-1:]
214             if path and path[0] not in ('.', '..'):
215                 existing_paths.add( path[0] )
216     upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] )
217     paths_to_remove = existing_paths - upload_paths
218     if paths_to_remove:
219         print 'Removing the following file from web:'
220         print '\n'.join( paths_to_remove )
221         stdout = run_sftp_batch( userhost, sftp, """cd htdocs
222 rm %s
223 exit""" % ' '.join(paths_to_remove) )
224     print 'Uploading %d files:' % len(upload_paths)
225     batch_size = 10
226     upload_paths = list(upload_paths)
227     start_time = time.time()
228     for index in xrange(0,len(upload_paths),batch_size):
229         paths = upload_paths[index:index+batch_size]
230         file_per_sec = (time.time() - start_time) / (index+1)
231         remaining_files = len(upload_paths) - index
232         remaining_sec = file_per_sec * remaining_files
233         print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)
234         run_sftp_batch( userhost, sftp, """cd htdocs
235 lcd %s
236 mput %s
237 exit""" % (doc_dir, ' '.join(paths) ), retry=3 )
238 
239 def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ):
240     userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project)
241     run_sftp_batch( userhost, sftp, """
242 mput %s
243 exit
244 """ % (' '.join(paths),) )
245 
246 
247 def main():
248     usage = """%prog release_version next_dev_version
249 Update 'version' file to release_version and commit.
250 Generates the document tarball.
251 Tags the sandbox revision with release_version.
252 Update 'version' file to next_dev_version and commit.
253 
254 Performs an svn export of tag release version, and build a source tarball.
255 
256 Must be started in the project top directory.
257 
258 Warning: --force should only be used when developping/testing the release script.
259 """
260     from optparse import OptionParser
261     parser = OptionParser(usage=usage)
262     parser.allow_interspersed_args = False
263     parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'),
264         help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
265     parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'),
266         help="""Path to Doxygen tool. [Default: %default]""")
267     parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False,
268         help="""Ignore pending commit. [Default: %default]""")
269     parser.add_option('--retag', dest="retag_release", action='store_true', default=False,
270         help="""Overwrite release existing tag if it exist. [Default: %default]""")
271     parser.add_option('-p', '--platforms', dest="platforms", action='store', default='',
272         help="""Comma separated list of platform passed to scons for build check.""")
273     parser.add_option('--no-test', dest="no_test", action='store_true', default=False,
274         help="""Skips build check.""")
275     parser.add_option('--no-web', dest="no_web", action='store_true', default=False,
276         help="""Do not update web site.""")
277     parser.add_option('-u', '--upload-user', dest="user", action='store',
278                       help="""Sourceforge user for SFTP documentation upload.""")
279     parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'),
280                       help="""Path of the SFTP compatible binary used to upload the documentation.""")
281     parser.enable_interspersed_args()
282     options, args = parser.parse_args()
283 
284     if len(args) != 2:
285         parser.error( 'release_version missing on command-line.' )
286     release_version = args[0]
287     next_version = args[1]
288 
289     if not options.platforms and not options.no_test:
290         parser.error( 'You must specify either --platform or --no-test option.' )
291 
292     if options.ignore_pending_commit:
293         msg = ''
294     else:
295         msg = check_no_pending_commit()
296     if not msg:
297         print 'Setting version to', release_version
298         set_version( release_version )
299         svn_commit( 'Release ' + release_version )
300         tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
301         if svn_check_if_tag_exist( tag_url ):
302             if options.retag_release:
303                 svn_remove_tag( tag_url, 'Overwriting previous tag' )
304             else:
305                 print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
306                 sys.exit( 1 )
307         svn_tag_sandbox( tag_url, 'Release ' + release_version )
308 
309         print 'Generated doxygen document...'
310 ##        doc_dirname = r'jsoncpp-api-html-0.5.0'
311 ##        doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
312         doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True )
313         doc_distcheck_dir = 'dist/doccheck'
314         tarball.decompress( doc_tarball_path, doc_distcheck_dir )
315         doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname )
316 
317         export_dir = 'dist/export'
318         svn_export( tag_url, export_dir )
319         fix_sources_eol( export_dir )
320 
321         source_dir = 'jsoncpp-src-' + release_version
322         source_tarball_path = 'dist/%s.tar.gz' % source_dir
323         print 'Generating source tarball to', source_tarball_path
324         tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
325 
326         amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
327         print 'Generating amalgamation source tarball to', amalgamation_tarball_path
328         amalgamation_dir = 'dist/amalgamation'
329         amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' )
330         amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
331         tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir],
332                               amalgamation_dir, prefix_dir=amalgamation_source_dir )
333 
334         # Decompress source tarball, download and install scons-local
335         distcheck_dir = 'dist/distcheck'
336         distcheck_top_dir = distcheck_dir + '/' + source_dir
337         print 'Decompressing source tarball to', distcheck_dir
338         rmdir_if_exist( distcheck_dir )
339         tarball.decompress( source_tarball_path, distcheck_dir )
340         scons_local_path = 'dist/scons-local.tar.gz'
341         print 'Downloading scons-local to', scons_local_path
342         download( SCONS_LOCAL_URL, scons_local_path )
343         print 'Decompressing scons-local to', distcheck_top_dir
344         tarball.decompress( scons_local_path, distcheck_top_dir )
345 
346         # Run compilation
347         print 'Compiling decompressed tarball'
348         all_build_status = True
349         for platform in options.platforms.split(','):
350             print 'Testing platform:', platform
351             build_status, log_path = check_compile( distcheck_top_dir, platform )
352             print 'see build log:', log_path
353             print build_status and '=> ok' or '=> FAILED'
354             all_build_status = all_build_status and build_status
355         if not build_status:
356             print 'Testing failed on at least one platform, aborting...'
357             svn_remove_tag( tag_url, 'Removing tag due to failed testing' )
358             sys.exit(1)
359         if options.user:
360             if not options.no_web:
361                 print 'Uploading documentation using user', options.user
362                 sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp )
363                 print 'Completed documentation upload'
364             print 'Uploading source and documentation tarballs for release using user', options.user
365             sourceforge_release_tarball( SOURCEFORGE_PROJECT,
366                                          [source_tarball_path, doc_tarball_path],
367                                          user=options.user, sftp=options.sftp )
368             print 'Source and doc release tarballs uploaded'
369         else:
370             print 'No upload user specified. Web site and download tarbal were not uploaded.'
371             print 'Tarball can be found at:', doc_tarball_path
372 
373         # Set next version number and commit
374         set_version( next_version )
375         svn_commit( 'Released ' + release_version )
376     else:
377         sys.stderr.write( msg + '\n' )
378 
379 if __name__ == '__main__':
380     main()
381