1 | # Copyright (c) 2013 Google Inc. All rights reserved.
|
---|
2 | # Use of this source code is governed by a BSD-style license that can be
|
---|
3 | # found in the LICENSE file.
|
---|
4 |
|
---|
5 | """Utility functions shared amongst the Windows generators."""
|
---|
6 |
|
---|
7 | import copy
|
---|
8 | import os
|
---|
9 |
|
---|
10 |
|
---|
11 | # A dictionary mapping supported target types to extensions.
|
---|
12 | TARGET_TYPE_EXT = {
|
---|
13 | "executable": "exe",
|
---|
14 | "loadable_module": "dll",
|
---|
15 | "shared_library": "dll",
|
---|
16 | "static_library": "lib",
|
---|
17 | "windows_driver": "sys",
|
---|
18 | }
|
---|
19 |
|
---|
20 |
|
---|
21 | def _GetLargePdbShimCcPath():
|
---|
22 | """Returns the path of the large_pdb_shim.cc file."""
|
---|
23 | this_dir = os.path.abspath(os.path.dirname(__file__))
|
---|
24 | src_dir = os.path.abspath(os.path.join(this_dir, "..", ".."))
|
---|
25 | win_data_dir = os.path.join(src_dir, "data", "win")
|
---|
26 | large_pdb_shim_cc = os.path.join(win_data_dir, "large-pdb-shim.cc")
|
---|
27 | return large_pdb_shim_cc
|
---|
28 |
|
---|
29 |
|
---|
30 | def _DeepCopySomeKeys(in_dict, keys):
|
---|
31 | """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
|
---|
32 |
|
---|
33 | Arguments:
|
---|
34 | in_dict: The dictionary to copy.
|
---|
35 | keys: The keys to be copied. If a key is in this list and doesn't exist in
|
---|
36 | |in_dict| this is not an error.
|
---|
37 | Returns:
|
---|
38 | The partially deep-copied dictionary.
|
---|
39 | """
|
---|
40 | d = {}
|
---|
41 | for key in keys:
|
---|
42 | if key not in in_dict:
|
---|
43 | continue
|
---|
44 | d[key] = copy.deepcopy(in_dict[key])
|
---|
45 | return d
|
---|
46 |
|
---|
47 |
|
---|
48 | def _SuffixName(name, suffix):
|
---|
49 | """Add a suffix to the end of a target.
|
---|
50 |
|
---|
51 | Arguments:
|
---|
52 | name: name of the target (foo#target)
|
---|
53 | suffix: the suffix to be added
|
---|
54 | Returns:
|
---|
55 | Target name with suffix added (foo_suffix#target)
|
---|
56 | """
|
---|
57 | parts = name.rsplit("#", 1)
|
---|
58 | parts[0] = "%s_%s" % (parts[0], suffix)
|
---|
59 | return "#".join(parts)
|
---|
60 |
|
---|
61 |
|
---|
62 | def _ShardName(name, number):
|
---|
63 | """Add a shard number to the end of a target.
|
---|
64 |
|
---|
65 | Arguments:
|
---|
66 | name: name of the target (foo#target)
|
---|
67 | number: shard number
|
---|
68 | Returns:
|
---|
69 | Target name with shard added (foo_1#target)
|
---|
70 | """
|
---|
71 | return _SuffixName(name, str(number))
|
---|
72 |
|
---|
73 |
|
---|
74 | def ShardTargets(target_list, target_dicts):
|
---|
75 | """Shard some targets apart to work around the linkers limits.
|
---|
76 |
|
---|
77 | Arguments:
|
---|
78 | target_list: List of target pairs: 'base/base.gyp:base'.
|
---|
79 | target_dicts: Dict of target properties keyed on target pair.
|
---|
80 | Returns:
|
---|
81 | Tuple of the new sharded versions of the inputs.
|
---|
82 | """
|
---|
83 | # Gather the targets to shard, and how many pieces.
|
---|
84 | targets_to_shard = {}
|
---|
85 | for t in target_dicts:
|
---|
86 | shards = int(target_dicts[t].get("msvs_shard", 0))
|
---|
87 | if shards:
|
---|
88 | targets_to_shard[t] = shards
|
---|
89 | # Shard target_list.
|
---|
90 | new_target_list = []
|
---|
91 | for t in target_list:
|
---|
92 | if t in targets_to_shard:
|
---|
93 | for i in range(targets_to_shard[t]):
|
---|
94 | new_target_list.append(_ShardName(t, i))
|
---|
95 | else:
|
---|
96 | new_target_list.append(t)
|
---|
97 | # Shard target_dict.
|
---|
98 | new_target_dicts = {}
|
---|
99 | for t in target_dicts:
|
---|
100 | if t in targets_to_shard:
|
---|
101 | for i in range(targets_to_shard[t]):
|
---|
102 | name = _ShardName(t, i)
|
---|
103 | new_target_dicts[name] = copy.copy(target_dicts[t])
|
---|
104 | new_target_dicts[name]["target_name"] = _ShardName(
|
---|
105 | new_target_dicts[name]["target_name"], i
|
---|
106 | )
|
---|
107 | sources = new_target_dicts[name].get("sources", [])
|
---|
108 | new_sources = []
|
---|
109 | for pos in range(i, len(sources), targets_to_shard[t]):
|
---|
110 | new_sources.append(sources[pos])
|
---|
111 | new_target_dicts[name]["sources"] = new_sources
|
---|
112 | else:
|
---|
113 | new_target_dicts[t] = target_dicts[t]
|
---|
114 | # Shard dependencies.
|
---|
115 | for t in sorted(new_target_dicts):
|
---|
116 | for deptype in ("dependencies", "dependencies_original"):
|
---|
117 | dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
|
---|
118 | new_dependencies = []
|
---|
119 | for d in dependencies:
|
---|
120 | if d in targets_to_shard:
|
---|
121 | for i in range(targets_to_shard[d]):
|
---|
122 | new_dependencies.append(_ShardName(d, i))
|
---|
123 | else:
|
---|
124 | new_dependencies.append(d)
|
---|
125 | new_target_dicts[t][deptype] = new_dependencies
|
---|
126 |
|
---|
127 | return (new_target_list, new_target_dicts)
|
---|
128 |
|
---|
129 |
|
---|
130 | def _GetPdbPath(target_dict, config_name, vars):
|
---|
131 | """Returns the path to the PDB file that will be generated by a given
|
---|
132 | configuration.
|
---|
133 |
|
---|
134 | The lookup proceeds as follows:
|
---|
135 | - Look for an explicit path in the VCLinkerTool configuration block.
|
---|
136 | - Look for an 'msvs_large_pdb_path' variable.
|
---|
137 | - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
|
---|
138 | specified.
|
---|
139 | - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
|
---|
140 |
|
---|
141 | Arguments:
|
---|
142 | target_dict: The target dictionary to be searched.
|
---|
143 | config_name: The name of the configuration of interest.
|
---|
144 | vars: A dictionary of common GYP variables with generator-specific values.
|
---|
145 | Returns:
|
---|
146 | The path of the corresponding PDB file.
|
---|
147 | """
|
---|
148 | config = target_dict["configurations"][config_name]
|
---|
149 | msvs = config.setdefault("msvs_settings", {})
|
---|
150 |
|
---|
151 | linker = msvs.get("VCLinkerTool", {})
|
---|
152 |
|
---|
153 | pdb_path = linker.get("ProgramDatabaseFile")
|
---|
154 | if pdb_path:
|
---|
155 | return pdb_path
|
---|
156 |
|
---|
157 | variables = target_dict.get("variables", {})
|
---|
158 | pdb_path = variables.get("msvs_large_pdb_path", None)
|
---|
159 | if pdb_path:
|
---|
160 | return pdb_path
|
---|
161 |
|
---|
162 | pdb_base = target_dict.get("product_name", target_dict["target_name"])
|
---|
163 | pdb_base = "%s.%s.pdb" % (pdb_base, TARGET_TYPE_EXT[target_dict["type"]])
|
---|
164 | pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base
|
---|
165 |
|
---|
166 | return pdb_path
|
---|
167 |
|
---|
168 |
|
---|
169 | def InsertLargePdbShims(target_list, target_dicts, vars):
|
---|
170 | """Insert a shim target that forces the linker to use 4KB pagesize PDBs.
|
---|
171 |
|
---|
172 | This is a workaround for targets with PDBs greater than 1GB in size, the
|
---|
173 | limit for the 1KB pagesize PDBs created by the linker by default.
|
---|
174 |
|
---|
175 | Arguments:
|
---|
176 | target_list: List of target pairs: 'base/base.gyp:base'.
|
---|
177 | target_dicts: Dict of target properties keyed on target pair.
|
---|
178 | vars: A dictionary of common GYP variables with generator-specific values.
|
---|
179 | Returns:
|
---|
180 | Tuple of the shimmed version of the inputs.
|
---|
181 | """
|
---|
182 | # Determine which targets need shimming.
|
---|
183 | targets_to_shim = []
|
---|
184 | for t in target_dicts:
|
---|
185 | target_dict = target_dicts[t]
|
---|
186 |
|
---|
187 | # We only want to shim targets that have msvs_large_pdb enabled.
|
---|
188 | if not int(target_dict.get("msvs_large_pdb", 0)):
|
---|
189 | continue
|
---|
190 | # This is intended for executable, shared_library and loadable_module
|
---|
191 | # targets where every configuration is set up to produce a PDB output.
|
---|
192 | # If any of these conditions is not true then the shim logic will fail
|
---|
193 | # below.
|
---|
194 | targets_to_shim.append(t)
|
---|
195 |
|
---|
196 | large_pdb_shim_cc = _GetLargePdbShimCcPath()
|
---|
197 |
|
---|
198 | for t in targets_to_shim:
|
---|
199 | target_dict = target_dicts[t]
|
---|
200 | target_name = target_dict.get("target_name")
|
---|
201 |
|
---|
202 | base_dict = _DeepCopySomeKeys(
|
---|
203 | target_dict, ["configurations", "default_configuration", "toolset"]
|
---|
204 | )
|
---|
205 |
|
---|
206 | # This is the dict for copying the source file (part of the GYP tree)
|
---|
207 | # to the intermediate directory of the project. This is necessary because
|
---|
208 | # we can't always build a relative path to the shim source file (on Windows
|
---|
209 | # GYP and the project may be on different drives), and Ninja hates absolute
|
---|
210 | # paths (it ends up generating the .obj and .obj.d alongside the source
|
---|
211 | # file, polluting GYPs tree).
|
---|
212 | copy_suffix = "large_pdb_copy"
|
---|
213 | copy_target_name = target_name + "_" + copy_suffix
|
---|
214 | full_copy_target_name = _SuffixName(t, copy_suffix)
|
---|
215 | shim_cc_basename = os.path.basename(large_pdb_shim_cc)
|
---|
216 | shim_cc_dir = vars["SHARED_INTERMEDIATE_DIR"] + "/" + copy_target_name
|
---|
217 | shim_cc_path = shim_cc_dir + "/" + shim_cc_basename
|
---|
218 | copy_dict = copy.deepcopy(base_dict)
|
---|
219 | copy_dict["target_name"] = copy_target_name
|
---|
220 | copy_dict["type"] = "none"
|
---|
221 | copy_dict["sources"] = [large_pdb_shim_cc]
|
---|
222 | copy_dict["copies"] = [
|
---|
223 | {"destination": shim_cc_dir, "files": [large_pdb_shim_cc]}
|
---|
224 | ]
|
---|
225 |
|
---|
226 | # This is the dict for the PDB generating shim target. It depends on the
|
---|
227 | # copy target.
|
---|
228 | shim_suffix = "large_pdb_shim"
|
---|
229 | shim_target_name = target_name + "_" + shim_suffix
|
---|
230 | full_shim_target_name = _SuffixName(t, shim_suffix)
|
---|
231 | shim_dict = copy.deepcopy(base_dict)
|
---|
232 | shim_dict["target_name"] = shim_target_name
|
---|
233 | shim_dict["type"] = "static_library"
|
---|
234 | shim_dict["sources"] = [shim_cc_path]
|
---|
235 | shim_dict["dependencies"] = [full_copy_target_name]
|
---|
236 |
|
---|
237 | # Set up the shim to output its PDB to the same location as the final linker
|
---|
238 | # target.
|
---|
239 | for config_name, config in shim_dict.get("configurations").items():
|
---|
240 | pdb_path = _GetPdbPath(target_dict, config_name, vars)
|
---|
241 |
|
---|
242 | # A few keys that we don't want to propagate.
|
---|
243 | for key in ["msvs_precompiled_header", "msvs_precompiled_source", "test"]:
|
---|
244 | config.pop(key, None)
|
---|
245 |
|
---|
246 | msvs = config.setdefault("msvs_settings", {})
|
---|
247 |
|
---|
248 | # Update the compiler directives in the shim target.
|
---|
249 | compiler = msvs.setdefault("VCCLCompilerTool", {})
|
---|
250 | compiler["DebugInformationFormat"] = "3"
|
---|
251 | compiler["ProgramDataBaseFileName"] = pdb_path
|
---|
252 |
|
---|
253 | # Set the explicit PDB path in the appropriate configuration of the
|
---|
254 | # original target.
|
---|
255 | config = target_dict["configurations"][config_name]
|
---|
256 | msvs = config.setdefault("msvs_settings", {})
|
---|
257 | linker = msvs.setdefault("VCLinkerTool", {})
|
---|
258 | linker["GenerateDebugInformation"] = "true"
|
---|
259 | linker["ProgramDatabaseFile"] = pdb_path
|
---|
260 |
|
---|
261 | # Add the new targets. They must go to the beginning of the list so that
|
---|
262 | # the dependency generation works as expected in ninja.
|
---|
263 | target_list.insert(0, full_copy_target_name)
|
---|
264 | target_list.insert(0, full_shim_target_name)
|
---|
265 | target_dicts[full_copy_target_name] = copy_dict
|
---|
266 | target_dicts[full_shim_target_name] = shim_dict
|
---|
267 |
|
---|
268 | # Update the original target to depend on the shim target.
|
---|
269 | target_dict.setdefault("dependencies", []).append(full_shim_target_name)
|
---|
270 |
|
---|
271 | return (target_list, target_dicts)
|
---|