Permalink
Browse files

Make Ctrl+C kill gyp when parallel processing is on.

Manually tested these cases: 
* Normal success case 
* Ctrl+C during input file processing 
* Syntax error in input file 
* Ctrl+C during ninja generation 
* Bug in Python code in ninja generation 

Solution to Ctrl+C adapted from John Reese's solution here: 
http://stackoverflow.com/questions/1408356/keyboard-interrupts-with-pythons-multiprocessing-pool

Review url: http://codereview.chromium.org/11098023/



git-svn-id: http://gyp.googlecode.com/svn/trunk@1519 78cadc50-ecff-11dd-a971-7dbc132099af
  • Loading branch information...
1 parent 5a10131 commit 523297f43e0c96a84e53306f8fddebeb483b27f1 dmazzoni@chromium.org committed Oct 9, 2012
Showing with 96 additions and 59 deletions.
  1. +16 −5 pylib/gyp/generator/ninja.py
  2. +80 −54 pylib/gyp/input.py
View
21 pylib/gyp/generator/ninja.py
@@ -7,6 +7,7 @@
import multiprocessing
import os.path
import re
+import signal
import subprocess
import sys
import gyp
@@ -1749,9 +1750,14 @@ def PerformBuild(data, configurations, params):
def CallGenerateOutputForConfig(arglist):
+ # Ignore the interrupt signal so that the parent process catches it and
+ # kills all multiprocessing children.
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
(target_list, target_dicts, data, params, config_name) = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
+
def GenerateOutput(target_list, target_dicts, data, params):
user_config = params.get('generator_flags', {}).get('config', None)
if user_config:
@@ -1760,11 +1766,16 @@ def GenerateOutput(target_list, target_dicts, data, params):
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
- pool = multiprocessing.Pool(len(config_names))
- arglists = []
- for config_name in config_names:
- arglists.append((target_list, target_dicts, data, params, config_name))
- pool.map(CallGenerateOutputForConfig, arglists)
+ try:
+ pool = multiprocessing.Pool(len(config_names))
+ arglists = []
+ for config_name in config_names:
+ arglists.append(
+ (target_list, target_dicts, data, params, config_name))
+ pool.map(CallGenerateOutputForConfig, arglists)
+ except KeyboardInterrupt, e:
+ pool.terminate()
+ raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
View
134 pylib/gyp/input.py
@@ -17,6 +17,7 @@
import os.path
import re
import shlex
+import signal
import subprocess
import sys
import threading
@@ -456,39 +457,49 @@ def CallLoadTargetBuildFile(global_flags,
a worker process.
"""
- # Apply globals so that the worker process behaves the same.
- for key, value in global_flags.iteritems():
- globals()[key] = value
+ try:
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
- # Save the keys so we can return data that changed.
- data_keys = set(data)
- aux_data_keys = set(aux_data)
+ # Apply globals so that the worker process behaves the same.
+ for key, value in global_flags.iteritems():
+ globals()[key] = value
- result = LoadTargetBuildFile(build_file_path, data,
- aux_data, variables,
- includes, depth, check, False)
- if not result:
- return result
+ # Save the keys so we can return data that changed.
+ data_keys = set(data)
+ aux_data_keys = set(aux_data)
- (build_file_path, dependencies) = result
+ result = LoadTargetBuildFile(build_file_path, data,
+ aux_data, variables,
+ includes, depth, check, False)
+ if not result:
+ return result
+
+ (build_file_path, dependencies) = result
+
+ data_out = {}
+ for key in data:
+ if key == 'target_build_files':
+ continue
+ if key not in data_keys:
+ data_out[key] = data[key]
+ aux_data_out = {}
+ for key in aux_data:
+ if key not in aux_data_keys:
+ aux_data_out[key] = aux_data[key]
+
+ # This gets serialized and sent back to the main process via a pipe.
+ # It's handled in LoadTargetBuildFileCallback.
+ return (build_file_path,
+ data_out,
+ aux_data_out,
+ dependencies)
+ except Exception, e:
+ print "Exception: ", e
+ return None
- data_out = {}
- for key in data:
- if key == 'target_build_files':
- continue
- if key not in data_keys:
- data_out[key] = data[key]
- aux_data_out = {}
- for key in aux_data:
- if key not in aux_data_keys:
- aux_data_out[key] = aux_data[key]
- # This gets serialized and sent back to the main process via a pipe.
- # It's handled in LoadTargetBuildFileCallback.
- return (build_file_path,
- data_out,
- aux_data_out,
- dependencies)
+class ParallelProcessingError(Exception):
+ pass
class ParallelState(object):
@@ -517,12 +528,19 @@ def __init__(self):
self.scheduled = set()
# A list of dependency build file paths that haven't been scheduled yet.
self.dependencies = []
+ # Flag to indicate if there was an error in a child process.
+ self.error = False
def LoadTargetBuildFileCallback(self, result):
"""Handle the results of running LoadTargetBuildFile in another process.
"""
- (build_file_path0, data0, aux_data0, dependencies0) = result
self.condition.acquire()
+ if not result:
+ self.error = True
+ self.condition.notify()
+ self.condition.release()
+ return
+ (build_file_path0, data0, aux_data0, dependencies0) = result
self.data['target_build_files'].add(build_file_path0)
for key in data0:
self.data[key] = data0[key]
@@ -547,34 +565,42 @@ def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
parallel_state.data = data
parallel_state.aux_data = aux_data
- parallel_state.condition.acquire()
- while parallel_state.dependencies or parallel_state.pending:
- if not parallel_state.dependencies:
- parallel_state.condition.wait()
- continue
+ try:
+ parallel_state.condition.acquire()
+ while parallel_state.dependencies or parallel_state.pending:
+ if parallel_state.error:
+ break
+ if not parallel_state.dependencies:
+ parallel_state.condition.wait()
+ continue
- dependency = parallel_state.dependencies.pop()
-
- parallel_state.pending += 1
- data_in = {}
- data_in['target_build_files'] = data['target_build_files']
- aux_data_in = {}
- global_flags = {
- 'path_sections': globals()['path_sections'],
- 'non_configuration_keys': globals()['non_configuration_keys'],
- 'absolute_build_file_paths': globals()['absolute_build_file_paths'],
- 'multiple_toolsets': globals()['multiple_toolsets']}
-
- if not parallel_state.pool:
- parallel_state.pool = multiprocessing.Pool(8)
- parallel_state.pool.apply_async(
- CallLoadTargetBuildFile,
- args = (global_flags, dependency,
- data_in, aux_data_in,
- variables, includes, depth, check),
- callback = parallel_state.LoadTargetBuildFileCallback)
+ dependency = parallel_state.dependencies.pop()
+
+ parallel_state.pending += 1
+ data_in = {}
+ data_in['target_build_files'] = data['target_build_files']
+ aux_data_in = {}
+ global_flags = {
+ 'path_sections': globals()['path_sections'],
+ 'non_configuration_keys': globals()['non_configuration_keys'],
+ 'absolute_build_file_paths': globals()['absolute_build_file_paths'],
+ 'multiple_toolsets': globals()['multiple_toolsets']}
+
+ if not parallel_state.pool:
+ parallel_state.pool = multiprocessing.Pool(8)
+ parallel_state.pool.apply_async(
+ CallLoadTargetBuildFile,
+ args = (global_flags, dependency,
+ data_in, aux_data_in,
+ variables, includes, depth, check),
+ callback = parallel_state.LoadTargetBuildFileCallback)
+ except KeyboardInterrupt, e:
+ parallel_state.pool.terminate()
+ raise e
parallel_state.condition.release()
+ if parallel_state.error:
+ sys.exit()
# Look for the bracket that matches the first bracket seen in a

0 comments on commit 523297f

Please sign in to comment.