Skip to content

Commit

Permalink
Bug 1000723 - Add generic support for multiple output from compilatio…
Browse files Browse the repository at this point in the history
…n. r=mshal
  • Loading branch information
glandium committed Apr 28, 2014
1 parent aba929b commit 2fe8041
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 20 deletions.
25 changes: 16 additions & 9 deletions compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,12 @@ class Compiler(object):
'''
Base class for compiler helpers. Subclasses must implement the following
methods:
parse_arguments: parse a command line and return an object containing
parse_arguments: parse a command line and return a dict containing
all necessary information for preprocessing and compilation. This
object is meant to be passed to the two other methods.
dict is meant to be passed to the two other methods. As a contract
with the caller, this dict also contains a 'output' item which
value is a dict associating keys to output files (a compilation
can have more than one output)
preprocess: return the preprocessor output corresponding to the command
line in the form of a tuple return_code, stdout_buf, stderr_buf
(with the preprocessor output in stdout_buf)
Expand Down Expand Up @@ -190,7 +193,9 @@ def parse_arguments(self, args):
return {
'input': input,
'extension': extension,
'output': output,
'output': {
'obj': output,
},
'mt': mt,
'common_args': common_args,
}
Expand All @@ -208,7 +213,7 @@ def compile(self, preprocessor_output, parsed_args, cwd=None):
# Compile from the preprocessor output
proc = subprocess.Popen([self.executable, '-c', '-x',
GCCCompiler.FILE_TYPES[parsed_args['extension']], '-', '-o',
parsed_args['output']] + parsed_args['common_args'],
parsed_args['output']['obj']] + parsed_args['common_args'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, cwd=cwd)

Expand All @@ -230,7 +235,7 @@ def compile(self, preprocessor_output, parsed_args, cwd=None):
f.write(preprocessor_output)
try:
proc = subprocess.Popen([self.executable, '-c', path, '-o',
parsed_args['output']] + parsed_args['common_args'],
parsed_args['output']['obj']] + parsed_args['common_args'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)

stdout, stderr = proc.communicate()
Expand Down Expand Up @@ -330,7 +335,9 @@ def parse_arguments(self, args):
return {
'input': input,
'extension': extension,
'output': output,
'output': {
'obj': output,
},
'depfile': depfile,
'common_args': common_args,
}
Expand All @@ -351,7 +358,7 @@ def preprocess(self, parsed_args, cwd=None):
from win32util import normcase
from makeutil import Makefile
mk = Makefile()
rule = mk.create_rule([parsed_args['output']])
rule = mk.create_rule([parsed_args['output']['obj']])
rule.add_dependencies([normcase(parsed_args['input'])])
filtered_stderr = ''
for line in stderr.splitlines(True):
Expand All @@ -378,7 +385,7 @@ def compile(self, preprocessor_output, parsed_args, cwd=None):
f.write(preprocessor_output)
try:
proc = subprocess.Popen([self.executable, '-c', path,
'-Fo' + parsed_args['output']] + parsed_args['common_args'],
'-Fo' + parsed_args['output']['obj']] + parsed_args['common_args'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)
stdout, stderr = proc.communicate()
ret = proc.wait()
Expand All @@ -395,7 +402,7 @@ def compile(self, preprocessor_output, parsed_args, cwd=None):
# Sadly, MSVC preprocessor output is such that it sometimes fails to
# compile. So try again if it did fail.
proc = subprocess.Popen([self.executable, '-c', parsed_args['input'],
'-Fo' + parsed_args['output']] + parsed_args['common_args'],
'-Fo' + parsed_args['output']['obj']] + parsed_args['common_args'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)
stdout, stderr = proc.communicate()
ret = proc.wait()
Expand Down
25 changes: 14 additions & 11 deletions server.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
# thread, which then redispatches it to the ResponseHelper corresponding
# to the job.
# The data in cache keeps track of stdout and stderr from the cached command,
# as well as the output object.
# as well as the output object(s), as determined by
# Compiler.parsed_args()['output'].

import base_server
import hashlib
Expand Down Expand Up @@ -234,21 +235,22 @@ def _run_command(job):
yield dict(id=id, retcode=retcode, stderr=stderr, status='failure')
return

output = parsed_args['output']
output_from_cwd = os.path.join(cwd, output) if cwd else output
outputs = {key: os.path.join(cwd, path) if cwd else path
for key, path in parsed_args['output'].items()}
if preprocessed:
# Compute the key corresponding to the preprocessor output, the command
# line, and the compiler.
# TODO: Remove preprocessor-only arguments from args (like -D, -I...)
key = hash_key(compiler, args, preprocessed)
cache_key = hash_key(compiler, args, preprocessed)

if not 'SCCACHE_RECACHE' in os.environ:
# Get cached data if there is.
data = storage.get(key)
data = storage.get(cache_key)
if data:
cache = CacheData(data)
with open(output_from_cwd, 'wb') as obj:
obj.write(cache['obj'])
for key, path in outputs.items():
with open(path, 'wb') as obj:
obj.write(cache[key])
stdout = cache['stdout']
stderr = cache['stderr']
yield dict(id=id, retcode=0, stdout=stdout, stderr=stderr,
Expand All @@ -258,12 +260,13 @@ def _run_command(job):
# In case of cache miss, compile
ret, stdout, stderr = compiler.compile(preprocessed, parsed_args, cwd)
# Get the output file content before returning the job status
if not ret and os.path.exists(output_from_cwd):
if not ret and all(os.path.exists(out) for out in outputs.values()):
cache = CacheData()
cache['stdout'] = stdout
cache['stderr'] = stderr
with open(output_from_cwd, 'rb') as obj:
cache['obj'] = obj.read()
for key, path in outputs.items():
with open(path, 'rb') as f:
cache[key] = f.read()
else:
cache = None

Expand All @@ -272,7 +275,7 @@ def _run_command(job):

# Store cache after returning the job status.
if cache:
storage.put(key, cache.data)
storage.put(cache_key, cache.data)
yield dict(stats=storage.last_stats)


Expand Down

0 comments on commit 2fe8041

Please sign in to comment.