Skip to content

Commit

Permalink
Create msvs_large_pdb workaround.
Browse files Browse the repository at this point in the history
This creates a workaround for targets whose PDB sizes exceeds 1GB, using the same trick originally implemented here:

https://codereview.chromium.org/11968015/

There are currently 4 targets using this, and another 4 targets that need to use it hence the desire to centralize this functionality.

BUG=174136

Review URL: https://codereview.chromium.org/12476002

git-svn-id: http://gyp.googlecode.com/svn/trunk@1600 78cadc50-ecff-11dd-a971-7dbc132099af
  • Loading branch information
chrisha@chromium.org committed Mar 18, 2013
1 parent 82e7402 commit 9ee1e40
Show file tree
Hide file tree
Showing 8 changed files with 291 additions and 3 deletions.
12 changes: 12 additions & 0 deletions data/win/large-pdb-shim.cc
@@ -0,0 +1,12 @@
// Copyright (c) 2013 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
// then used during the final link for modules that have large PDBs. Otherwise,
// the linker will generate a pdb with a page size of 1KB, which imposes a limit
// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
// (rather than the linker), this limit is avoided. With this in place PDBs may
// grow to 2GB.
//
// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.
144 changes: 141 additions & 3 deletions pylib/gyp/MSVSUtil.py
Expand Up @@ -5,6 +5,55 @@
"""Utility functions shared amongst the Windows generators."""

import copy
import os


_TARGET_TYPE_EXT = {
'executable': '.exe',
'shared_library': '.dll'
}


def _GetLargePdbShimCcPath():
"""Returns the path of the large_pdb_shim.cc file."""
this_dir = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
win_data_dir = os.path.join(src_dir, 'data', 'win')
large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
return large_pdb_shim_cc


def _DeepCopySomeKeys(in_dict, keys):
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
Arguments:
in_dict: The dictionary to copy.
keys: The keys to be copied. If a key is in this list and doesn't exist in
|in_dict| this is not an error.
Returns:
The partially deep-copied dictionary.
"""
d = {}
for key in keys:
if key not in in_dict:
continue
d[key] = copy.deepcopy(in_dict[key])
return d


def _SuffixName(name, suffix):
"""Add a suffix to the end of a target.
Arguments:
name: name of the target (foo#target)
suffix: the suffix to be added
Returns:
Target name with suffix added (foo_suffix#target)
"""
parts = name.rsplit('#', 1)
parts[0] = '%s_%s' % (parts[0], suffix)
return '#'.join(parts)


def _ShardName(name, number):
"""Add a shard number to the end of a target.
Expand All @@ -15,9 +64,7 @@ def _ShardName(name, number):
Returns:
Target name with shard added (foo_1#target)
"""
parts = name.rsplit('#', 1)
parts[0] = '%s_%d' % (parts[0], number)
return '#'.join(parts)
return _SuffixName(name, str(number))


def ShardTargets(target_list, target_dicts):
Expand Down Expand Up @@ -72,3 +119,94 @@ def ShardTargets(target_list, target_dicts):
new_target_dicts[t]['dependencies'] = new_dependencies

return (new_target_list, new_target_dicts)


def InsertLargePdbShims(target_list, target_dicts, vars):
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
This is a workaround for targets with PDBs greater than 1GB in size, the
limit for the 1KB pagesize PDBs created by the linker by default.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
Tuple of the shimmed version of the inputs.
"""
# Determine which targets need shimming.
targets_to_shim = []
for t in target_dicts:
target_dict = target_dicts[t]
# We only want to shim targets that have msvs_large_pdb enabled.
if not int(target_dict.get('msvs_large_pdb', 0)):
continue
# This is intended for executable, shared_library and loadable_module
# targets where every configuration is set up to produce a PDB output.
# If any of these conditions is not true then the shim logic will fail
# below.
targets_to_shim.append(t)

large_pdb_shim_cc = _GetLargePdbShimCcPath()

for t in targets_to_shim:
target_dict = target_dicts[t]
target_name = target_dict.get('target_name')

base_dict = _DeepCopySomeKeys(target_dict,
['configurations', 'default_configuration', 'toolset'])

# This is the dict for copying the source file (part of the GYP tree)
# to the intermediate directory of the project. This is necessary because
# we can't always build a relative path to the shim source file (on Windows
# GYP and the project may be on different drives), and Ninja hates absolute
# paths (it ends up generating the .obj and .obj.d alongside the source
# file, polluting GYPs tree).
copy_suffix = '_large_pdb_copy'
copy_target_name = target_name + '_' + copy_suffix
full_copy_target_name = _SuffixName(t, copy_suffix)
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
copy_dict = copy.deepcopy(base_dict)
copy_dict['target_name'] = copy_target_name
copy_dict['type'] = 'none'
copy_dict['sources'] = [ large_pdb_shim_cc ]
copy_dict['copies'] = [{
'destination': shim_cc_dir,
'files': [ large_pdb_shim_cc ]
}]

# This is the dict for the PDB generating shim target. It depends on the
# copy target.
shim_suffix = '_large_pdb_shim'
shim_target_name = target_name + '_' + shim_suffix
full_shim_target_name = _SuffixName(t, shim_suffix)
shim_dict = copy.deepcopy(base_dict)
shim_dict['target_name'] = shim_target_name
shim_dict['type'] = 'static_library'
shim_dict['sources'] = [ shim_cc_path ]
shim_dict['dependencies'] = [ full_copy_target_name ]

# Set up the shim to output its PDB to the same location as the final linker
# target.
for config in shim_dict.get('configurations').itervalues():
msvs = config.setdefault('msvs_settings')

linker = msvs.pop('VCLinkerTool') # We want to clear this dict.
pdb_path = linker.get('ProgramDatabaseFile')

compiler = msvs.setdefault('VCCLCompilerTool', {})
compiler.setdefault('DebugInformationFormat', '3')
compiler.setdefault('ProgramDataBaseFileName', pdb_path)

# Add the new targets.
target_list.append(full_copy_target_name)
target_list.append(full_shim_target_name)
target_dicts[full_copy_target_name] = copy_dict
target_dicts[full_shim_target_name] = shim_dict

# Update the original target to depend on the shim target.
target_dict.setdefault('dependencies', []).append(full_shim_target_name)

return (target_list, target_dicts)
6 changes: 6 additions & 0 deletions pylib/gyp/generator/msvs.py
Expand Up @@ -64,6 +64,7 @@
generator_additional_non_configuration_keys = [
'msvs_cygwin_dirs',
'msvs_cygwin_shell',
'msvs_large_pdb',
'msvs_shard',
]

Expand Down Expand Up @@ -1768,6 +1769,11 @@ def GenerateOutput(target_list, target_dicts, data, params):
# Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
(target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts)

# Optionally use the large PDB workaround for targets marked with
# 'msvs_large_pdb': 1.
(target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)

# Prepare the set of configurations.
configs = set()
for qualified_target in target_list:
Expand Down
3 changes: 3 additions & 0 deletions pylib/gyp/generator/ninja.py
Expand Up @@ -1784,6 +1784,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
user_config = params.get('generator_flags', {}).get('config', None)
if gyp.common.GetFlavor(params) == 'win':
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)

if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
Expand Down
49 changes: 49 additions & 0 deletions test/win/gyptest-link-large-pdb.py
@@ -0,0 +1,49 @@
#!/usr/bin/env python

# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

"""
Make sure msvs_large_pdb works correctly.
"""

import TestGyp

import struct
import sys


CHDIR = 'large-pdb'


def CheckImageAndPdb(test, image_basename, expected_page_size):
pdb_basename = image_basename + '.pdb'
test.built_file_must_exist(image_basename, chdir=CHDIR)
test.built_file_must_exist(pdb_basename, chdir=CHDIR)

# We expect the PDB to have the given page size. For full details of the
# header look here: https://code.google.com/p/pdbparser/wiki/MSF_Format
# We read the little-endian 4-byte unsigned integer at position 32 of the
# file.
pdb_path = test.built_file_path(pdb_basename, chdir=CHDIR)
pdb_file = open(pdb_path, 'rb')
pdb_file.seek(32, 0)
page_size = struct.unpack('<I', pdb_file.read(4))[0]
if page_size != expected_page_size:
print "Expected page size of %d, got %d for PDB file `%s'." % (
expected_page_size, page_size, pdb_path)


if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])

test.run_gyp('large-pdb.gyp', chdir=CHDIR)
test.build('large-pdb.gyp', test.ALL, chdir=CHDIR)

CheckImageAndPdb(test, 'large_pdb_exe.exe', 4096)
CheckImageAndPdb(test, 'small_pdb_exe.exe', 1024)
CheckImageAndPdb(test, 'large_pdb_dll.dll', 4096)
CheckImageAndPdb(test, 'small_pdb_dll.dll', 1024)

test.pass_test()
9 changes: 9 additions & 0 deletions test/win/large-pdb/dllmain.cc
@@ -0,0 +1,9 @@
// Copyright (c) 2013 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#include <windows.h>

BOOL WINAPI DllMain(HINSTANCE hinstance, DWORD reason, LPVOID reserved) {
return TRUE;
}
64 changes: 64 additions & 0 deletions test/win/large-pdb/large-pdb.gyp
@@ -0,0 +1,64 @@
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

{
'targets': [
{
'target_name': 'large_pdb_exe',
'type': 'executable',
'msvs_large_pdb': 1,
'sources': [
'main.cc',
],
'msvs_settings': {
'VCLinkerTool': {
'GenerateDebugInformation': 'true',
'ProgramDatabaseFile': '<(PRODUCT_DIR)/large_pdb_exe.exe.pdb',
},
},
},
{
'target_name': 'small_pdb_exe',
'type': 'executable',
'msvs_large_pdb': 0,
'sources': [
'main.cc',
],
'msvs_settings': {
'VCLinkerTool': {
'GenerateDebugInformation': 'true',
'ProgramDatabaseFile': '<(PRODUCT_DIR)/small_pdb_exe.exe.pdb',
},
},
},
{
'target_name': 'large_pdb_dll',
'type': 'shared_library',
'msvs_large_pdb': 1,
'sources': [
'dllmain.cc',
],
'msvs_settings': {
'VCLinkerTool': {
'GenerateDebugInformation': 'true',
'ProgramDatabaseFile': '<(PRODUCT_DIR)/large_pdb_dll.dll.pdb',
},
},
},
{
'target_name': 'small_pdb_dll',
'type': 'shared_library',
'msvs_large_pdb': 0,
'sources': [
'dllmain.cc',
],
'msvs_settings': {
'VCLinkerTool': {
'GenerateDebugInformation': 'true',
'ProgramDatabaseFile': '<(PRODUCT_DIR)/small_pdb_dll.dll.pdb',
},
},
},
]
}
7 changes: 7 additions & 0 deletions test/win/large-pdb/main.cc
@@ -0,0 +1,7 @@
// Copyright (c) 2013 Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

int main(int argc, const char** argv) {
return 0;
}

0 comments on commit 9ee1e40

Please sign in to comment.