diff --git a/bin/cleanlogs.py b/bin/clean_logs.py similarity index 100% rename from bin/cleanlogs.py rename to bin/clean_logs.py diff --git a/bin/findDisabledTests.py b/bin/findDisabledTests.py deleted file mode 100755 index 0afbbdc4d332..000000000000 --- a/bin/findDisabledTests.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/python -import re -import time -import sys -from pythonTools import * - -def main(): - startTime = time.clock() - disabledTestFiles = [] - - testAnnotationMatcher = re.compile('^\s*@Test') - disabledMatcher = re.compile('enabled\s*=\s*false') - - for testFile in GlobDirectoryWalker(getSearchPath(sys.argv[0]), '*Test.java'): - tf = open(testFile) - try: - for line in tf: - if testAnnotationMatcher.search(line) and disabledMatcher.search(line): - disabledTestFiles.append(testFile) - break - finally: - tf.close() - - print "Files containing disabled tests: \n" - uniqueTests=toSet(disabledTestFiles) - i = 1 - for f in uniqueTests: - zeropad="" - if i < 10 and len(uniqueTests) > 9: - zeropad = " " - print "%s%s. %s" % (zeropad, str(i), stripLeadingDots(f)) - i += 1 - - print "\n (finished in " + str(time.clock() - startTime) + " seconds)" - -if __name__ == '__main__': - main() - diff --git a/bin/find_disabled_tests.py b/bin/find_disabled_tests.py new file mode 100755 index 000000000000..3761ec8998b5 --- /dev/null +++ b/bin/find_disabled_tests.py @@ -0,0 +1,38 @@ +#!/usr/bin/python +import re +import time +import sys +from utils import * + +def main(): + start_time = time.clock() + disabled_test_files = [] + + test_annotation_matcher = re.compile('^\s*@Test') + disabled_matcher = re.compile('enabled\s*=\s*false') + + for test_file in GlobDirectoryWalker(get_search_path(sys.argv[0]), '*Test.java'): + tf = open(test_file) + try: + for line in tf: + if test_annotation_matcher.search(line) and disabled_matcher.search(line): + disabled_test_files.append(test_file) + break + finally: + tf.close() + + print "Files containing disabled tests: \n" + unique_tests=to_set(disabled_test_files) + i = 1 + for f in unique_tests: + zeropad="" + if i < 10 and len(unique_tests) > 9: + zeropad = " " + print "%s%s. %s" % (zeropad, str(i), strip_leading_dots(f)) + i += 1 + + print "\n (finished in " + str(time.clock() - start_time) + " seconds)" + +if __name__ == '__main__': + main() + diff --git a/bin/listCommandIDs.py b/bin/list_command_ids.py similarity index 62% rename from bin/listCommandIDs.py rename to bin/list_command_ids.py index 757b0db84f38..c7ac332a6f7f 100755 --- a/bin/listCommandIDs.py +++ b/bin/list_command_ids.py @@ -2,11 +2,11 @@ import re import sys -from pythonTools import * +from utils import * command_file_name = re.compile('(commands/[a-zA-Z0-9/]*Command.java)') -def trimName(nm): +def trim_name(nm): res = command_file_name.search(nm) if res: return res.group(1) @@ -26,14 +26,14 @@ def get_next(ids_used): command_ids = {} warnings = [] -for testFile in GlobDirectoryWalker(getSearchPath(sys.argv[0]) + 'core/src/main/java/org/infinispan/commands', '*Command.java'): - tf = open(testFile) +for test_file in GlobDirectoryWalker(get_search_path(sys.argv[0]) + 'core/src/main/java/org/infinispan/commands', '*Command.java'): + tf = open(test_file) try: for line in tf: mo = command_line_regexp.search(line) if mo: id = int(mo.group(1)) - trimmed_name = trimName(testFile) + trimmed_name = trim_name(test_file) if id in command_ids: warnings.append("Saw duplicate COMMAND_IDs in files [%s] and [%s]" % (trimmed_name, command_ids[id])) command_ids[id] = trimmed_name @@ -42,15 +42,15 @@ def get_next(ids_used): print 'Scanned %s Command source files. IDs are (in order):' % len(command_ids) -sortedKeys = command_ids.keys() -sortedKeys.sort() +sorted_keys = command_ids.keys() +sorted_keys.sort() i=1 -for k in sortedKeys: +for k in sorted_keys: zeropad = "" - if (i < 10 and len(sortedKeys) > 9): + if (i < 10 and len(sorted_keys) > 9): zeropad = " " - print ' %s%s) Class [%s] has COMMAND_ID [%s]' % (zeropad, i, command_ids[k], k) + print ' %s%s) Class [%s%s%s] has COMMAND_ID [%s%s%s]' % (zeropad, i, Colors.green(), command_ids[k], Colors.end_color(), Colors.yellow(), k, Colors.end_color()) i += 1 print "\n" @@ -60,4 +60,4 @@ def get_next(ids_used): print " *** %s" % w print "\n" -print "Next available ID is %s" % get_next(sortedKeys) +print "Next available ID is %s%s%s" % (Colors.cyan(), get_next(sorted_keys), Colors.end_color()) diff --git a/bin/release.py b/bin/release.py index e1c7aa0662c5..3f8aa32b53f5 100755 --- a/bin/release.py +++ b/bin/release.py @@ -6,143 +6,143 @@ import shutil from datetime import * from multiprocessing import Process +from utils import * try: from xml.etree.ElementTree import ElementTree except: - print ''' + prettyprint(''' Welcome to the Infinispan Release Script. This release script requires that you use at least Python 2.5.0. It appears that you do not thave the ElementTree XML APIs available, which are available by default in Python 2.5.0. - ''' + ''', Levels.FATAL) sys.exit(1) - -from pythonTools import * modules = [] uploader = None -svn_conn = None +git = None -def getModules(directory): - # look at the pom.xml file +def get_modules(directory): + '''Analyses the pom.xml file and extracts declared modules''' tree = ElementTree() f = directory + "/pom.xml" - print "Parsing %s to get a list of modules in project" % f + if settings['verbose']: + print "Parsing %s to get a list of modules in project" % f tree.parse(f) mods = tree.findall(".//{%s}module" % maven_pom_xml_namespace) for m in mods: modules.append(m.text) -def helpAndExit(): - print ''' +def help_and_exit(): + prettyprint(''' Welcome to the Infinispan Release Script. - Usage: +%s Usage:%s $ bin/release.py - E.g., +%s E.g.,%s - $ bin/release.py 4.1.1.BETA1 <-- this will tag off trunk. + $ bin/release.py 4.1.1.BETA1 %s<-- this will tag off master.%s - $ bin/release.py 4.1.1.BETA1 branches/4.1.x <-- this will use the appropriate branch + $ bin/release.py 4.1.1.BETA1 4.1.x %s<-- this will use the appropriate branch.%s - Please ensure you have edited bin/release.py to suit your ennvironment. - There are configurable variables at the start of this file that is - specific to your environment. - ''' + ''' % (Colors.yellow(), Colors.end_color(), Colors.yellow(), Colors.end_color(), Colors.green(), Colors.end_color(), Colors.green(), Colors.end_color()), Levels.INFO) sys.exit(0) -def validateVersion(version): - versionPattern = get_version_pattern() - if versionPattern.match(version): +def validate_version(version): + version_pattern = get_version_pattern() + if version_pattern.match(version): return version.strip().upper() else: - print "Invalid version '"+version+"'!\n" - helpAndExit() + prettyprint("Invalid version '"+version+"'!\n", Levels.FATAL) + help_and_exit() -def tagInSubversion(version, newVersion, branch): - try: - svn_conn.tag("%s/%s" % (settings[svn_base_key], branch), newVersion, version) - except: - print "FATAL: Unable to tag. Perhaps branch %s does not exist on Subversion URL %s." % (branch, settings[svn_base_key]) - print "FATAL: Cannot continue!" - sys.exit(200) +def tag_release(version, branch): + if git.remote_branch_exists(): + git.switch_to_branch() + git.create_tag_branch() + else: + prettyprint("Branch %s cannot be found on upstream repository. Aborting!" % branch, Levels.FATAL) + sys.exit(100) -def getProjectVersionTag(tree): +def get_project_version_tag(tree): return tree.find("./{%s}version" % (maven_pom_xml_namespace)) -def getParentVersionTag(tree): +def get_parent_version_tag(tree): return tree.find("./{%s}parent/{%s}version" % (maven_pom_xml_namespace, maven_pom_xml_namespace)) -def getPropertiesVersionTag(tree): +def get_properties_version_tag(tree): return tree.find("./{%s}properties/{%s}project-version" % (maven_pom_xml_namespace, maven_pom_xml_namespace)) -def writePom(tree, pomFile): +def write_pom(tree, pom_file): tree.write("tmp.xml", 'UTF-8') in_f = open("tmp.xml") - out_f = open(pomFile, "w") + out_f = open(pom_file, "w") try: for l in in_f: newstr = l.replace("ns0:", "").replace(":ns0", "").replace("ns1", "xsi") out_f.write(newstr) finally: in_f.close() - out_f.close() + out_f.close() + os.remove("tmp.xml") if settings['verbose']: - print " ... updated %s" % pomFile + prettyprint(" ... updated %s" % pom_file, Levels.INFO) -def patch(pomFile, version): - ## Updates the version in a POM file - ## We need to locate //project/parent/version, //project/version and //project/properties/project-version - ## And replace the contents of these with the new version +def patch(pom_file, version): + '''Updates the version in a POM file. We need to locate //project/parent/version, //project/version and + //project/properties/project-version and replace the contents of these with the new version''' if settings['verbose']: - print "Patching %s" % pomFile + prettyprint("Patching %s" % pom_file, Levels.DEBUG) tree = ElementTree() - tree.parse(pomFile) + tree.parse(pom_file) need_to_write = False tags = [] - tags.append(getParentVersionTag(tree)) - tags.append(getProjectVersionTag(tree)) - tags.append(getPropertiesVersionTag(tree)) + tags.append(get_parent_version_tag(tree)) + tags.append(get_project_version_tag(tree)) + tags.append(get_properties_version_tag(tree)) for tag in tags: if tag != None: if settings['verbose']: - print "%s is %s. Setting to %s" % (str(tag), tag.text, version) + prettyprint("%s is %s. Setting to %s" % (str(tag), tag.text, version), Levels.DEBUG) tag.text=version need_to_write = True if need_to_write: # write to file again! - writePom(tree, pomFile) + write_pom(tree, pom_file) + return True else: if settings['verbose']: - print "File doesn't need updating; nothing replaced!" + prettyprint("File doesn't need updating; nothing replaced!", Levels.DEBUG) + return False -def get_poms_to_patch(workingDir): - getModules(workingDir) - print 'Available modules are ' + str(modules) - pomsToPatch = [workingDir + "/pom.xml"] +def get_poms_to_patch(working_dir): + get_modules(working_dir) + if settings['verbose']: + prettyprint('Available modules are ' + str(modules), Levels.DEBUG) + poms_to_patch = [working_dir + "/pom.xml"] for m in modules: - pomsToPatch.append(workingDir + "/" + m + "/pom.xml") + poms_to_patch.append(working_dir + "/" + m + "/pom.xml") # Look for additional POMs that are not directly referenced! - for additionalPom in GlobDirectoryWalker(workingDir, 'pom.xml'): - if additionalPom not in pomsToPatch: - pomsToPatch.append(additionalPom) + for additionalPom in GlobDirectoryWalker(working_dir, 'pom.xml'): + if additionalPom not in poms_to_patch: + poms_to_patch.append(additionalPom) - return pomsToPatch + return poms_to_patch -def updateVersions(version, workingDir, trunkDir): - svn_conn.checkout(settings[svn_base_key] + "/tags/" + version, workingDir) - - pomsToPatch = get_poms_to_patch(workingDir) - - for pom in pomsToPatch: - patch(pom, version) - +def update_versions(version): + poms_to_patch = get_poms_to_patch(".") + + modified_files = [] + for pom in poms_to_patch: + if patch(pom, version): + modified_files.append(pom) + ## Now look for Version.java version_bytes = '{' for ch in version: @@ -150,7 +150,9 @@ def updateVersions(version, workingDir, trunkDir): version_bytes += "'%s', " % ch version_bytes = version_bytes[:-2] version_bytes += "}" - version_java = workingDir + "/core/src/main/java/org/infinispan/Version.java" + version_java = "./core/src/main/java/org/infinispan/Version.java" + modified_files.append(version_java) + f_in = open(version_java) f_out = open(version_java+".tmp", "w") try: @@ -169,27 +171,22 @@ def updateVersions(version, workingDir, trunkDir): os.rename(version_java+".tmp", version_java) - # Now make sure this goes back into SVN. - checkInMessage = "Infinispan Release Script: Updated version numbers" - svn_conn.checkin(workingDir, checkInMessage) + # Now make sure this goes back into the repository. + git.commit(modified_files) -def buildAndTest(workingDir): - os.chdir(workingDir) - maven_build_distribution() - -def getModuleName(pomFile): +def get_module_name(pom_file): tree = ElementTree() - tree.parse(pomFile) + tree.parse(pom_file) return tree.findtext("./{%s}artifactId" % maven_pom_xml_namespace) -def uploadArtifactsToSourceforge(version): +def upload_artifacts_to_sourceforge(base_dir, version): shutil.rmtree(".tmp", ignore_errors = True) os.mkdir(".tmp") os.mkdir(".tmp/%s" % version) os.chdir(".tmp") - dist_dir = "%s/%s/target/distribution" % (settings[local_tags_dir_key], version) - print "Copying from %s to %s" % (dist_dir, version) + dist_dir = "%s/target/distribution" % base_dir + prettyprint("Copying from %s to %s" % (dist_dir, version), Levels.INFO) for item in os.listdir(dist_dir): full_name = "%s/%s" % (dist_dir, item) if item.strip().lower().endswith(".zip") and os.path.isfile(full_name): @@ -197,8 +194,8 @@ def uploadArtifactsToSourceforge(version): uploader.upload_scp(version, "sourceforge_frs:/home/frs/project/i/in/infinispan/infinispan") shutil.rmtree(".tmp", ignore_errors = True) -def unzip_archive(workingDir, version): - os.chdir("%s/target/distribution" % workingDir) +def unzip_archive(version): + os.chdir("./target/distribution") ## Grab the distribution archive and un-arch it shutil.rmtree("infinispan-%s" % version, ignore_errors = True) if settings['verbose']: @@ -206,25 +203,27 @@ def unzip_archive(workingDir, version): else: subprocess.check_call(["unzip", "-q", "infinispan-%s-all.zip" % version]) -def uploadJavadocs(workingDir, version): +def upload_javadocs(base_dir, version): """Javadocs get rsync'ed to filemgmt.jboss.org, in the docs_htdocs/infinispan directory""" version_short = get_version_major_minor(version) - os.chdir("%s/target/distribution/infinispan-%s/doc" % (workingDir, version)) + os.chdir("%s/target/distribution/infinispan-%s/doc" % (base_dir, version)) ## "Fix" the docs to use the appropriate analytics tracker ID - subprocess.check_call(["%s/bin/updateTracker.sh" % workingDir]) + subprocess.check_call(["%s/bin/updateTracker.sh" % base_dir]) os.mkdir(version_short) os.rename("apidocs", "%s/apidocs" % version_short) ## rsync this stuff to filemgmt.jboss.org uploader.upload_rsync(version_short, "infinispan@filemgmt.jboss.org:/docs_htdocs/infinispan", flags = ['-rv', '--protocol=28']) + os.chdir(base_dir) -def uploadSchema(workingDir, version): +def upload_schema(base_dir, version): """Schema gets rsync'ed to filemgmt.jboss.org, in the docs_htdocs/infinispan/schemas directory""" - os.chdir("%s/target/distribution/infinispan-%s/etc/schema" % (workingDir, version)) + os.chdir("%s/target/distribution/infinispan-%s/etc/schema" % (base_dir, version)) ## rsync this stuff to filemgmt.jboss.org - uploader.upload_rsync('.', "infinispan@filemgmt.jboss.org:/docs_htdocs/infinispan/schemas", ['-rv', '--protocol=28']) + uploader.upload_rsync('.', "infinispan@filemgmt.jboss.org:/docs_htdocs/infinispan/schemas", flags = ['-rv', '--protocol=28']) + os.chdir(base_dir) def do_task(target, args, async_processes): if settings['multi_threaded']: @@ -236,70 +235,71 @@ def do_task(target, args, async_processes): def release(): global settings global uploader - global svn_conn + global git assert_python_minimum_version(2, 5) require_settings_file() # We start by determining whether the version passed in is a valid one if len(sys.argv) < 2: - helpAndExit() + help_and_exit() base_dir = os.getcwd() - version = validateVersion(sys.argv[1]) - branch = "trunk" + version = validate_version(sys.argv[1]) + branch = "master" if len(sys.argv) > 2: branch = sys.argv[2] - print "Releasing Infinispan version %s from branch '%s'" % (version, branch) - print "Please stand by!" + prettyprint("Releasing Infinispan version %s from branch '%s'" % (version, branch), Levels.INFO) + sure = input_with_default("Are you sure you want to continue?", "N") + if not sure.upper().startswith("Y"): + prettyprint("... User Abort!", Levels.WARNING) + sys.exit(1) + prettyprint("OK, releasing! Please stand by ...", Levels.INFO) ## Set up network interactive tools if settings['dry_run']: # Use stubs - print "*** This is a DRY RUN. No changes will be committed. Used to test this release script only. ***" - print "Your settings are %s" % settings + prettyprint("*** This is a DRY RUN. No changes will be committed. Used to test this release script only. ***", Levels.DEBUG) + prettyprint("Your settings are %s" % settings, Levels.DEBUG) uploader = DryRunUploader() - svn_conn = DryRunSvnConn() else: uploader = Uploader() - svn_conn= SvnConn() + + git= Git(branch, version.upper()) ## Release order: - # Step 1: Tag in SVN - newVersion = "%s/tags/%s" % (settings[svn_base_key], version) - print "Step 1: Tagging %s in SVN as %s" % (branch, newVersion) - tagInSubversion(version, newVersion, branch) - print "Step 1: Complete" + # Step 1: Tag in Git + prettyprint("Step 1: Tagging %s in git as %s" % (branch, version), Levels.INFO) + tag_release(version, branch) + prettyprint("Step 1: Complete", Levels.INFO) - workingDir = settings[local_tags_dir_key] + "/" + version - # Step 2: Update version in tagged files - print "Step 2: Updating version number in source files" - updateVersions(version, workingDir, base_dir) - print "Step 2: Complete" + prettyprint("Step 2: Updating version number in source files", Levels.INFO) + update_versions(version) + prettyprint("Step 2: Complete", Levels.INFO) # Step 3: Build and test in Maven2 - print "Step 3: Build and test in Maven2" - buildAndTest(workingDir) - print "Step 3: Complete" + prettyprint("Step 3: Build and test in Maven2", Levels.INFO) + maven_build_distribution() + prettyprint("Step 3: Complete", Levels.INFO) async_processes = [] ##Unzip the newly built archive now - unzip_archive(workingDir, version) + unzip_archive(version) # Step 4: Upload javadocs to FTP - print "Step 4: Uploading Javadocs" - do_task(uploadJavadocs, [workingDir, version], async_processes) - print "Step 4: Complete" + prettyprint("Step 4: Uploading Javadocs", Levels.INFO) + do_task(upload_javadocs, [base_dir, version], async_processes) + prettyprint("Step 4: Complete", Levels.INFO) - print "Step 5: Uploading to Sourceforge" - do_task(uploadArtifactsToSourceforge, [version], async_processes) - print "Step 5: Complete" + prettyprint("Step 5: Uploading to Sourceforge", Levels.INFO) + do_task(upload_artifacts_to_sourceforge, [base_dir, version], async_processes) + prettyprint("Step 5: Complete", Levels.INFO) - print "Step 6: Uploading to configuration XML schema" - do_task(uploadSchema, [workingDir, version], async_processes) - print "Step 6: Complete" + prettyprint("Step 6: Uploading to configuration XML schema", Levels.INFO) + do_task(upload_schema, [base_dir, version], async_processes) + prettyprint("Step 6: Complete", Levels.INFO) ## Wait for processes to finish for p in async_processes: @@ -308,7 +308,15 @@ def release(): for p in async_processes: p.join() - print "\n\n\nDone! Now all you need to do is the remaining post-release tasks as outlined in https://docspace.corp.redhat.com/docs/DOC-28594" + ## Clean up in git + git.tag_for_release() + if not settings['dry_run']: + git.push_to_origin() + git.cleanup() + else: + prettyprint("In dry-run mode. Not pushing tag to remote origin and not removing temp release branch %s." % git.working_branch, Levels.DEBUG) + + prettyprint("\n\n\nDone! Now all you need to do is the remaining post-release tasks as outlined in https://docspace.corp.redhat.com/docs/DOC-28594", Levels.INFO) if __name__ == "__main__": release() diff --git a/bin/pythonTools.py b/bin/utils.py similarity index 51% rename from bin/pythonTools.py rename to bin/utils.py index 21292cfa51b0..fc95cfc024b1 100755 --- a/bin/pythonTools.py +++ b/bin/utils.py @@ -5,6 +5,7 @@ import sys import readline import shutil +import random settings_file = '%s/.infinispan_dev_settings' % os.getenv('HOME') ### Known config keys @@ -12,26 +13,93 @@ local_tags_dir_key = "local_tags_dir" local_mvn_repo_dir_key = "local_mvn_repo_dir" maven_pom_xml_namespace = "http://maven.apache.org/POM/4.0.0" -default_settings = {'dry_run': False, 'multi_threaded': False, 'verbose': False} +default_settings = {'dry_run': False, 'multi_threaded': False, 'verbose': False, 'use_colors': True} boolean_keys = ['dry_run', 'multi_threaded', 'verbose'] +class Colors(object): + MAGENTA = '\033[95m' + GREEN = '\033[92m' + YELLOW = '\033[93m' + RED = '\033[91m' + CYAN = '\033[96m' + END = '\033[0m' + + @staticmethod + def magenta(): + if use_colors(): + return Colors.MAGENTA + else: + return "" + + @staticmethod + def green(): + if use_colors(): + return Colors.GREEN + else: + return "" + + @staticmethod + def yellow(): + if use_colors(): + return Colors.YELLOW + else: + return "" + + @staticmethod + def red(): + if use_colors(): + return Colors.RED + else: + return "" + + @staticmethod + def cyan(): + if use_colors(): + return Colors.CYAN + else: + return "" + + @staticmethod + def end_color(): + if use_colors(): + return Colors.END + else: + return "" + +class Levels(Colors): + C_DEBUG = Colors.CYAN + C_INFO = Colors.GREEN + C_WARNING = Colors.YELLOW + C_FATAL = Colors.RED + C_ENDC = Colors.END + + DEBUG = "DEBUG" + INFO = "INFO" + WARNING = "WARNING" + FATAL = "FATAL" + + @staticmethod + def get_color(level): + if use_colors(): + return getattr(Levels, "C_" + level) + else: + return "" + +def use_colors(): + return ('use_colors' in settings and settings['use_colors']) or ('use_colors' not in settings) + +def prettyprint(message, level): + start_color = Levels.get_color(level) + end_color = Levels.end_color() + + print "[%s%s%s] %s" % (start_color, level, end_color, message) + def apply_defaults(s): for e in default_settings.items(): if e[0] not in s: s[e[0]] = e[1] return s -def check_mandatory_settings(s): - missing_keys = [] - required_keys = [svn_base_key, local_tags_dir_key] - for k in required_keys: - if k not in s: - missing_keys.append(k) - - if len(missing_keys) > 0: - print "Entries %s are missing in configuration file %s! Cannot proceed!" % (missing_keys, settings_file) - sys.exit(2) - def to_bool(x): if type(x) == bool: return x @@ -50,7 +118,6 @@ def get_settings(): if kvp and len(kvp) > 0 and kvp[0] and len(kvp) > 1: settings[kvp[0].strip()] = kvp[1].strip() settings = apply_defaults(settings) - check_mandatory_settings(settings) for k in boolean_keys: settings[k] = to_bool(settings[k]) return settings @@ -63,24 +130,23 @@ def get_settings(): settings = get_settings() def input_with_default(msg, default): - i = raw_input("%s [%s]: " % (msg, default)) + i = raw_input("%s %s[%s]%s: " % (msg, Colors.magenta(), default, Colors.end_color())) if i.strip() == "": i = default return i def handle_release_virgin(): """This sounds dirty!""" - print """ + prettyprint(""" It appears that this is the first time you are using this script. I need to ask you a few questions before we can proceed. Default values are in brackets, just hitting ENTER will accept the default value. Lets get started! - """ + """, Levels.WARNING) s = {} - s["svn_base"] = input_with_default("Base Subversion URL to use", "https://svn.jboss.org/repos/infinispan") - s["local_tags_dir"] = input_with_default("Local tags directory to use", "%s/Code/infinispan/tags" % os.getenv("HOME")) s["verbose"] = input_with_default("Be verbose?", False) s["multi_threaded"] = input_with_default("Run multi-threaded? (Disable to debug)", True) + s["use_colors"] = input_with_default("Use colors?", True) s = apply_defaults(s) f = open(settings_file, "w") try: @@ -98,39 +164,39 @@ def require_settings_file(recursive = False): if not recursive: handle_release_virgin() require_settings_file(True) - print "User-specific environment settings file %s created! Please start this script again!" % settings_file + prettyprint("User-specific environment settings file %s created! Please start this script again!" % settings_file, Levels.INFO) sys.exit(4) else: - print "User-specific environment settings file %s is missing! Cannot proceed!" % settings_file - print "Please create a file called %s with the following lines:" % settings_file - print ''' - svn_base = https://svn.jboss.org/repos/infinispan - local_tags_dir = /PATH/TO/infinispan/tags - multi_threaded = False - ''' + prettyprint("User-specific environment settings file %s is missing! Cannot proceed!" % settings_file, Levels.FATAL) + prettyprint("Please create a file called %s with the following lines:" % settings_file, Levels.FATAL) + prettyprint( ''' + verbose = False + use_colors = True + multi_threaded = True + ''', Levels.INFO) sys.exit(3) finally: if f: f.close() -def toSet(list): - """Crappy implementation of creating a Set from a List. To cope with older Python versions""" - tempDict = {} - for entry in list: - tempDict[entry] = "dummy" - return tempDict.keys() - -def getSearchPath(executable): - """Retrieves a search path based on where teh current executable is located. Returns a string to be prepended to address any file in the Infinispan src directory.""" - inBinDir = re.compile('^.*/?bin/.*.py') - if inBinDir.search(executable): +def get_search_path(executable): + """Retrieves a search path based on where the current executable is located. Returns a string to be prepended to add""" + in_bin_dir = re.compile('^.*/?bin/.*.py') + if in_bin_dir.search(executable): return "./" else: return "../" -def stripLeadingDots(filename): +def strip_leading_dots(filename): return filename.strip('/. ') +def to_set(list): + """Crappy implementation of creating a Set from a List. To cope with older Python versions""" + temp_dict = {} + for entry in list: + temp_dict[entry] = "dummy" + return temp_dict.keys() + class GlobDirectoryWalker: """A forward iterator that traverses a directory tree""" def __init__(self, directory, pattern="*"): @@ -157,47 +223,77 @@ def __getitem__(self, index): if fnmatch.fnmatch(file, self.pattern): return fullname -class SvnConn(object): - """An SVN cnnection making use of the command-line SVN client. Replacement for PySVN which sucked for various reasons.""" + +class Git(object): + '''Encapsulates git functionality necessary for releasing Infinispan''' - def __init__(self): + def __init__(self, branch, tag_name): + self.cmd = "git" + self.branch = branch + self.tag = tag_name + self.verbose = False if settings['verbose']: - self.svn_cmd = ['svn'] - else: - self.svn_cmd = ['svn', '-q'] + self.verbose = True + rand = '%x'.upper() % (random.random() * 100000) + self.working_branch='__temp_%s' % rand - def do_svn(self, params): - commands = [] - for e in self.svn_cmd: - commands.append(e) - for e in params: - commands.append(e) - subprocess.check_call(commands) + def run_git(self, opts): + call = [self.cmd] + if type(opts) == list: + for o in opts: + call.append(o) + elif type(opts) == str: + for o in opts.split(' '): + if o != '': + call.append(o) + else: + raise Error("Cannot handle argument of type %s" % type(opts)) + if settings['verbose']: + prettyprint( 'Executing %s' % call, Levels.DEBUG ) + return subprocess.Popen(call, stdout=subprocess.PIPE).communicate()[0].split('\n') - def tag(self, fr_url, to_url, version): - """Tags a release.""" - checkInMessage = "Infinispan Release Script: Tagging " + version - self.do_svn(["cp", fr_url, to_url, "-m", checkInMessage]) + def clean_branches(self, raw_branch_list): + def clean(e): return e.replace(' ', '').replace('*', '').strip() + def no_empty(e): return e != '' - def checkout(self, url, to_dir): - """Checks out a URL to the given directory""" - self.do_svn(["checkout", url, to_dir]) + return map(clean, filter(no_empty, raw_branch_list)) + + def remote_branch_exists(self): + '''Tests whether the branch exists on the remote origin''' + branches = self.clean_branches(self.run_git("branch -r")) + def replace_origin(b): return b.replace('origin/', '') - def checkin(self, working_dir, msg): - """Checks in a working directory with the appropriate message""" - self.do_svn(["commit", "-m", msg, working_dir]) + return self.branch in map(replace_origin, branches) + + def switch_to_branch(self): + '''Switches the local repository to the specified branch. Creates it if it doesn't already exist.''' + local_branches = self.clean_branches(self.run_git("branch")) + if self.branch not in local_branches: + self.run_git("branch %s origin/%s" % (self.branch, self.branch)) + self.run_git("checkout %s" % self.branch) + + def create_tag_branch(self): + '''Creates and switches to a temp tagging branch, based off the release branch.''' + self.run_git("checkout -b %s %s" % (self.working_branch, self.branch)) + + def commit(self, files): + '''Commits the set of files to the current branch with a generated commit message.''' + for f in files: + self.run_git("add %s" % f) - def add(self, directory): - """Adds a directory or file to SVN. Directory can either be the name of a file or dir, or a list of either.""" - if directory: - call_params = ["add"] - if isinstance(directory, str): - call_params.append(directory) - else: - for d in directory: - call_params.append(d) - self.do_svn(call_params) - + self.run_git(["commit", "-m", "'Release Script: update versions for %s'" % self.tag]) + + def tag_for_release(self): + '''Tags the current branch for release using the tag name.''' + self.run_git(["tag", "-a", "-m", "'Release Script: tag %s'" % self.tag, self.tag]) + + def push_to_origin(self): + '''Pushes the updated tags to origin''' + self.run_git("push origin --tags") + + def cleanup(self): + '''Cleans up any temporary branches created''' + self.run_git("branch -D %s" % self.working_branch) class DryRun(object): location_root = "%s/%s" % (os.getenv("HOME"), "infinispan_release_dry_run") @@ -211,39 +307,12 @@ def find_version(self, url): return os.path.split(url)[1] def copy(self, src, dst): - print " DryRun: Executing %s" % ['rsync', self.flags, src, dst] + prettyprint( " DryRun: Executing %s" % ['rsync', self.flags, src, dst], Levels.DEBUG) try: os.makedirs(dst) except: pass - subprocess.check_call(['rsync', self.flags, src, dst]) - - -class DryRunSvnConn(DryRun): - urls = {} - def tag(self, fr_url, to_url, version): - self.urls[version] = '%s/svn/%s' % (self.location_root, version) - trunk_dir = settings[local_tags_dir_key].replace('/tags', '/trunk') - if os.path.isdir(trunk_dir) and is_in_svn(trunk_dir): - self.copy(trunk_dir, '%s/svn' % self.location_root) - os.rename('%s/svn/trunk' % self.location_root, self.urls[version]) - else: - subprocess.check_call(["svn", "export", fr_url, self.urls[version]]) - - def checkout(self, url, to_dir): - ver = self.find_version(url) - if ver in self.urls: - elems = os.path.split(to_dir) - self.copy(self.urls[ver], elems[0]) - else: - subprocess.check_call(["svn", "export", url, to_dir]) - def checkin(self, working_dir, msg): - ver = self.find_version(working_dir) - subprocess.check_call(['rsync', working_dir, self.urls[ver]]) - def add(self, directory): - print " DryRunSvnConn: Adding " + directory - pass - + subprocess.check_call(['rsync', self.flags, src, dst]) class Uploader(object): def __init__(self): @@ -280,8 +349,8 @@ def upload(self, fr, to, type): self.copy(fr, "%s/%s/%s" % (self.location_root, type, to)) -def is_in_svn(directory): - return os.path.isdir(directory + "/.svn") +def is_git_repo(directory): + return os.path.isdir(directory + "/.git") def maven_build_distribution(): """Builds the distribution in the current working dir""" @@ -311,7 +380,7 @@ def assert_python_minimum_version(major, minor): major_ok = int(m.group(1)) == major minor_ok = int(m.group(2)) >= minor if not (minor_ok and major_ok): - print "This script requires Python >= %s.%s.0. You have %s" % (major, minor, sys.version) + prettyprint( "This script requires Python >= %s.%s.0. You have %s" % (major, minor, sys.version), Levels.FATAL) sys.exit(3)