Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Upgrade V8 to 3.6.1

  • Loading branch information...
commit 0bca54444a5bb869ddebaadc65cd0bf381bb1e81 1 parent 526c54c
@ry ry authored
Showing with 5,390 additions and 2,908 deletions.
  1. +43 −0 deps/v8/ChangeLog
  2. +3 −2 deps/v8/Makefile
  3. +1 −1  deps/v8/benchmarks/crypto.js
  4. +25 −25 deps/v8/benchmarks/earley-boyer.js
  5. +3 −3 deps/v8/benchmarks/regexp.js
  6. +2 −8 deps/v8/build/all.gyp
  7. +9 −1 deps/v8/build/common.gypi
  8. +2 −5 deps/v8/build/standalone.gypi
  9. +2 −2 deps/v8/include/v8.h
  10. +2 −4 deps/v8/samples/shell.cc
  11. +2 −0  deps/v8/src/accessors.cc
  12. +4 −4 deps/v8/src/arm/assembler-arm.cc
  13. +8 −8 deps/v8/src/arm/builtins-arm.cc
  14. +150 −75 deps/v8/src/arm/code-stubs-arm.cc
  15. +323 −296 deps/v8/src/arm/full-codegen-arm.cc
  16. +3 −3 deps/v8/src/arm/ic-arm.cc
  17. +7 −8 deps/v8/src/arm/lithium-codegen-arm.cc
  18. +40 −0 deps/v8/src/arm/macro-assembler-arm.cc
  19. +10 −0 deps/v8/src/arm/macro-assembler-arm.h
  20. +1 −1  deps/v8/src/arm/regexp-macro-assembler-arm.cc
  21. +3 −3 deps/v8/src/arm/stub-cache-arm.cc
  22. +38 −25 deps/v8/src/array.js
  23. +1 −33 deps/v8/src/ast.cc
  24. +15 −119 deps/v8/src/ast.h
  25. +13 −7 deps/v8/src/bootstrapper.cc
  26. +3 −3 deps/v8/src/checks.h
  27. +10 −1 deps/v8/src/contexts.cc
  28. +29 −2 deps/v8/src/contexts.h
  29. +2 −2 deps/v8/src/conversions.h
  30. +8 −6 deps/v8/src/d8.cc
  31. +1 −1  deps/v8/src/d8.js
  32. +6 −5 deps/v8/src/date.js
  33. +0 −1  deps/v8/src/elements.cc
  34. +4 −2 deps/v8/src/extensions/externalize-string-extension.cc
  35. +29 −63 deps/v8/src/full-codegen.cc
  36. +60 −29 deps/v8/src/full-codegen.h
  37. +80 −4 deps/v8/src/heap.cc
  38. +23 −2 deps/v8/src/heap.h
  39. +13 −5 deps/v8/src/hydrogen-instructions.cc
  40. +14 −13 deps/v8/src/hydrogen-instructions.h
  41. +263 −187 deps/v8/src/hydrogen.cc
  42. +7 −5 deps/v8/src/hydrogen.h
  43. +7 −7 deps/v8/src/ia32/builtins-ia32.cc
  44. +93 −16 deps/v8/src/ia32/code-stubs-ia32.cc
  45. +346 −325 deps/v8/src/ia32/full-codegen-ia32.cc
  46. +2 −2 deps/v8/src/ia32/ic-ia32.cc
  47. +5 −7 deps/v8/src/ia32/lithium-codegen-ia32.cc
  48. +43 −7 deps/v8/src/ia32/macro-assembler-ia32.cc
  49. +16 −5 deps/v8/src/ia32/macro-assembler-ia32.h
  50. +1 −1  deps/v8/src/ia32/regexp-macro-assembler-ia32.cc
  51. +1 −1  deps/v8/src/ia32/stub-cache-ia32.cc
  52. +0 −5 deps/v8/src/isolate.cc
  53. +0 −4 deps/v8/src/isolate.h
  54. +3 −2 deps/v8/src/json.js
  55. +5 −3 deps/v8/src/jsregexp.cc
  56. +1 −1  deps/v8/src/jsregexp.h
  57. +3 −4 deps/v8/src/liveedit.cc
  58. +16 −2 deps/v8/src/macros.py
  59. +7 −7 deps/v8/src/math.js
  60. +305 −265 deps/v8/src/messages.js
  61. +39 −7 deps/v8/src/mips/assembler-mips.cc
  62. +6 −6 deps/v8/src/mips/builtins-mips.cc
  63. +164 −74 deps/v8/src/mips/code-stubs-mips.cc
  64. +2 −4 deps/v8/src/mips/constants-mips.h
  65. +65 −48 deps/v8/src/mips/frames-mips.h
  66. +156 −50 deps/v8/src/mips/full-codegen-mips.cc
  67. +3 −3 deps/v8/src/mips/ic-mips.cc
  68. +128 −20 deps/v8/src/mips/macro-assembler-mips.cc
  69. +18 −3 deps/v8/src/mips/macro-assembler-mips.h
  70. +1 −1  deps/v8/src/mips/regexp-macro-assembler-mips.cc
  71. +4 −13 deps/v8/src/mips/simulator-mips.cc
  72. +3 −3 deps/v8/src/mips/stub-cache-mips.cc
  73. +0 −12 deps/v8/src/mksnapshot.cc
  74. +11 −5 deps/v8/src/objects-inl.h
  75. +1 −1  deps/v8/src/objects.cc
  76. +5 −0 deps/v8/src/objects.h
  77. +86 −34 deps/v8/src/parser.cc
  78. +11 −0 deps/v8/src/parser.h
  79. +0 −6 deps/v8/src/platform-linux.cc
  80. +38 −60 deps/v8/src/prettyprinter.cc
  81. +0 −3  deps/v8/src/prettyprinter.h
  82. +30 −4 deps/v8/src/profile-generator.cc
  83. +4 −2 deps/v8/src/profile-generator.h
  84. +3 −3 deps/v8/src/regexp.js
  85. +2 −4 deps/v8/src/runtime-profiler.cc
  86. +99 −26 deps/v8/src/runtime.cc
  87. +12 −1 deps/v8/src/runtime.h
  88. +1 −0  deps/v8/src/runtime.js
  89. +1 −1  deps/v8/src/scanner-base.h
  90. +0 −2  deps/v8/src/scanner.h
  91. +29 −36 deps/v8/src/scopeinfo.cc
  92. +111 −40 deps/v8/src/scopes.cc
  93. +12 −7 deps/v8/src/scopes.h
  94. +2 −1  deps/v8/src/spaces-inl.h
  95. +28 −32 deps/v8/src/string.js
  96. +1 −1  deps/v8/src/stub-cache.h
  97. +1 −0  deps/v8/src/token.h
  98. +4 −3 deps/v8/src/uri.js
  99. +136 −125 deps/v8/src/v8natives.js
  100. +2 −29 deps/v8/src/variables.cc
  101. +42 −13 deps/v8/src/variables.h
  102. +3 −3 deps/v8/src/version.cc
  103. +9 −8 deps/v8/src/weakmap.js
  104. +3 −3 deps/v8/src/x64/builtins-x64.cc
  105. +93 −16 deps/v8/src/x64/code-stubs-x64.cc
  106. +339 −324 deps/v8/src/x64/full-codegen-x64.cc
  107. +1 −1  deps/v8/src/x64/ic-x64.cc
  108. +5 −6 deps/v8/src/x64/lithium-codegen-x64.cc
  109. +47 −11 deps/v8/src/x64/macro-assembler-x64.cc
  110. +12 −1 deps/v8/src/x64/macro-assembler-x64.h
  111. +1 −1  deps/v8/src/x64/regexp-macro-assembler-x64.cc
  112. +2 −2 deps/v8/src/x64/stub-cache-x64.cc
  113. +14 −1 deps/v8/test/cctest/cctest.gyp
  114. +4 −0 deps/v8/test/cctest/cctest.status
  115. +15 −11 deps/v8/test/cctest/test-api.cc
  116. +14 −0 deps/v8/test/cctest/test-assembler-arm.cc
  117. +14 −0 deps/v8/test/cctest/test-assembler-ia32.cc
  118. +17 −1 deps/v8/test/cctest/test-assembler-mips.cc
  119. +14 −0 deps/v8/test/cctest/test-assembler-x64.cc
  120. +2 −2 deps/v8/test/cctest/test-compiler.cc
  121. +34 −29 deps/v8/test/cctest/test-disasm-mips.cc
  122. +5 −4 deps/v8/test/cctest/test-regexp.cc
  123. +29 −0 deps/v8/test/cctest/test-strings.cc
  124. +3 −3 deps/v8/test/es5conform/es5conform.status
  125. +4 −4 deps/v8/test/mjsunit/array-constructor.js
  126. +6 −6 deps/v8/test/mjsunit/array-iteration.js
  127. +1 −1  deps/v8/test/mjsunit/array-sort.js
  128. +2 −2 deps/v8/test/mjsunit/bugs/618.js
  129. +3 −3 deps/v8/test/mjsunit/bugs/bug-618.js
  130. +82 −0 deps/v8/test/mjsunit/builtins.js
  131. +4 −3 deps/v8/test/mjsunit/compiler/delete.js
  132. +1 −1  deps/v8/test/mjsunit/compiler/global-accessors.js
  133. +2 −2 deps/v8/test/mjsunit/const-redecl.js
  134. +2 −3 deps/v8/test/mjsunit/d8-os.js
  135. +1 −1  deps/v8/test/mjsunit/date-parse.js
  136. +1 −1  deps/v8/test/mjsunit/debug-compile-event.js
  137. +1 −1  deps/v8/test/mjsunit/debug-evaluate-recursive.js
  138. +2 −2 deps/v8/test/mjsunit/debug-handle.js
  139. +1 −1  deps/v8/test/mjsunit/debug-listbreakpoints.js
  140. +2 −2 deps/v8/test/mjsunit/debug-references.js
  141. +4 −4 deps/v8/test/mjsunit/debug-return-value.js
  142. +2 −2 deps/v8/test/mjsunit/debug-stepin-call-function-stub.js
  143. +1 −1  deps/v8/test/mjsunit/debug-stepin-constructor.js
  144. +1 −1  deps/v8/test/mjsunit/delete-in-with.js
  145. +1 −1  deps/v8/test/mjsunit/function-source.js
  146. +1 −1  deps/v8/test/mjsunit/get-own-property-descriptor.js
  147. +1 −1  deps/v8/test/mjsunit/global-deleted-property-keyed.js
  148. +126 −0 deps/v8/test/mjsunit/harmony/block-conflicts.js
  149. +225 −0 deps/v8/test/mjsunit/harmony/block-leave.js
  150. +63 −0 deps/v8/test/mjsunit/harmony/block-let-crankshaft.js
  151. +0 −2  deps/v8/test/mjsunit/harmony/block-let-declaration.js
  152. +138 −0 deps/v8/test/mjsunit/harmony/block-let-semantics.js
  153. +99 −22 deps/v8/test/mjsunit/harmony/debug-blockscopes.js
  154. +1 −0  deps/v8/test/mjsunit/harmony/weakmaps.js
  155. +1 −1  deps/v8/test/mjsunit/html-string-funcs.js
  156. +1 −1  deps/v8/test/mjsunit/in.js
  157. +3 −3 deps/v8/test/mjsunit/instanceof.js
  158. +1 −1  deps/v8/test/mjsunit/keyed-storage-extend.js
  159. +2 −2 deps/v8/test/mjsunit/mirror-array.js
  160. +1 −1  deps/v8/test/mjsunit/mirror-function.js
  161. +1 −1  deps/v8/test/mjsunit/mirror-script.js
  162. +1 −1  deps/v8/test/mjsunit/mirror-unresolved-function.js
  163. +7 −1 deps/v8/test/mjsunit/mjsunit.status
  164. +2 −2 deps/v8/test/mjsunit/no-semicolon.js
  165. +1 −1  deps/v8/test/mjsunit/object-define-properties.js
  166. +1 −1  deps/v8/test/mjsunit/object-literal-conversions.js
  167. +2 −2 deps/v8/test/mjsunit/object-literal-overwrite.js
  168. +1 −1  deps/v8/test/mjsunit/object-prevent-extensions.js
  169. +13 −0 deps/v8/test/mjsunit/parse-int-float.js
  170. +1 −1  deps/v8/test/mjsunit/regress/regress-1081309.js
  171. +1 −1  deps/v8/test/mjsunit/regress/regress-1092.js
  172. +1 −1  deps/v8/test/mjsunit/regress/regress-1110.js
  173. +1 −1  deps/v8/test/mjsunit/regress/regress-1213575.js
  174. +36 −0 deps/v8/test/mjsunit/regress/regress-1215.js
  175. +8 −0 deps/v8/test/mjsunit/regress/regress-1447.js
  176. +48 −0 deps/v8/test/mjsunit/regress/regress-1548.js
  177. +43 −0 deps/v8/test/mjsunit/regress/regress-1647.js
  178. +60 −0 deps/v8/test/mjsunit/regress/regress-1650.js
  179. +1 −1  deps/v8/test/mjsunit/regress/regress-1919169.js
  180. +1 −1  deps/v8/test/mjsunit/regress/regress-20070207.js
  181. +2 −2 deps/v8/test/mjsunit/regress/regress-269.js
  182. +1 −1  deps/v8/test/mjsunit/regress/regress-619.js
  183. +4 −4 deps/v8/test/mjsunit/regress/regress-678525.js
  184. +1 −1  deps/v8/test/mjsunit/regress/regress-696.js
  185. +1 −1  deps/v8/test/mjsunit/regress/regress-720.js
  186. +2 −2 deps/v8/test/mjsunit/regress/regress-747.js
  187. +1 −1  deps/v8/test/mjsunit/regress/regress-760-1.js
  188. +1 −1  deps/v8/test/mjsunit/regress/regress-760-2.js
  189. +7 −7 deps/v8/test/mjsunit/regress/regress-798.js
  190. +1 −1  deps/v8/test/mjsunit/regress/regress-918.js
  191. +2 −2 deps/v8/test/mjsunit/regress/regress-925537.js
  192. +1 −1  deps/v8/test/mjsunit/regress/regress-937896.js
  193. +46 −0 deps/v8/test/mjsunit/regress/regress-94425.js
  194. +48 −0 deps/v8/test/mjsunit/regress/regress-95113.js
  195. +42 −0 deps/v8/test/mjsunit/regress/regress-95485.js
  196. +44 −0 deps/v8/test/mjsunit/regress/regress-fundecl.js
  197. +13 −13 deps/v8/test/mjsunit/setter-on-constructor-prototype.js
  198. +3 −2 deps/v8/test/mjsunit/string-compare-alignment.js
  199. +2 −2 deps/v8/test/mjsunit/string-indexof-1.js
  200. +3 −3 deps/v8/test/mjsunit/string-indexof-2.js
  201. +6 −5 deps/v8/test/mjsunit/string-slices.js
  202. +2 −2 deps/v8/test/mjsunit/string-split.js
  203. +1 −1  deps/v8/test/mjsunit/substr.js
  204. +1 −1  deps/v8/test/mjsunit/this-property-assignment.js
  205. +2 −2 deps/v8/test/mjsunit/try.js
  206. +2 −2 deps/v8/test/mjsunit/unicode-test.js
  207. +7 −7 deps/v8/test/mjsunit/value-wrapper.js
  208. +6 −2 deps/v8/test/mozilla/mozilla.status
  209. +1 −1  deps/v8/tools/gdb-v8-support.py
  210. +38 −4 deps/v8/tools/presubmit.py
  211. +3 −3 deps/v8/tools/process-heap-prof.py
  212. +14 −2 deps/v8/tools/test-wrapper-gypbuild.py
  213. +8 −1 deps/v8/tools/test.py
View
43 deps/v8/ChangeLog
@@ -1,3 +1,46 @@
+2011-09-07: Version 3.6.1
+
+ Fixed a bug in abrupt exit from with or catch inside finally.
+
+ Fixed possible crash in FixedDoubleArray::Initialize() (Chromium
+ issue 95113).
+
+ Fixed a bug in Page::GetRegionMaskForSpan (Chromium issue 94425).
+
+ Fixed a few clang warnings (which -Werror treated as errors).
+
+ Performance improvements on all platforms.
+
+
+2011-09-05: Version 3.6.0
+
+ Fixed a bug when optimizing named function expression (issue 1647).
+
+ Fixed a bug when optimizing f.call.apply (issue 1650).
+
+ Made arguments and caller always be null on native functions
+ (issues 1548 and 1643).
+
+ Fixed issue 1648 (cross-compiling x64 targeting ia32).
+
+ Fixed issue 371 (d8 printing of strings containing \0).
+
+ Fixed order of evaluation in arguments to parseInt (issue 1649).
+
+ Fixed a problem with large heap snapshots in Chrome DevTools
+ (issue 1658, chromium issue 89268).
+
+ Upped default maximum heap size from 512M to 700M.
+
+
+2011-08-31: Version 3.5.10
+
+ Added dependency of v8_base on WinSocket2 Windows library in
+ the GYP-build.
+
+ Various bugfixes.
+
+
2011-08-29: Version 3.5.9
Made FromPropertyDescriptor not trigger inherited setters.
View
5 deps/v8/Makefile
@@ -98,8 +98,9 @@ CHECKS = $(addsuffix .check,$(BUILDS))
# File where previously used GYPFLAGS are stored.
ENVFILE = $(OUTDIR)/environment
-.PHONY: all clean $(ENVFILE).new \
- $(ARCHES) $(MODES) $(BUILDS) $(addsuffix .clean,$(ARCHES))
+.PHONY: all check clean $(ENVFILE).new \
+ $(ARCHES) $(MODES) $(BUILDS) $(CHECKS) $(addsuffix .clean,$(ARCHES)) \
+ $(addsuffix .check,$(MODES)) $(addsuffix .check,$(ARCHES))
# Target definitions. "all" is the default.
all: $(MODES)
View
2  deps/v8/benchmarks/crypto.js
@@ -1406,7 +1406,7 @@ function rng_seed_int(x) {
// Mix in the current time (w/milliseconds) into the pool
function rng_seed_time() {
- // Use pre-computed date to avoid making the benchmark
+ // Use pre-computed date to avoid making the benchmark
// results dependent on the current date.
rng_seed_int(1122926989487);
}
View
50 deps/v8/benchmarks/earley-boyer.js
@@ -134,7 +134,7 @@ function sc_rempropBang(sym, key) {
/*** META ((export #t)) */
function sc_any2String(o) {
return jsstring2string(sc_toDisplayString(o));
-}
+}
/*** META ((export #t)
(peephole (infix 2 2 "==="))
@@ -923,7 +923,7 @@ function sc_dualAppendBang(l1, l2) {
tmp.cdr = l2;
return l1;
}
-
+
/*** META ((export #t)) */
function sc_appendBang() {
var res = null;
@@ -1163,7 +1163,7 @@ sc_Char.readable2char = {
"us": "\037",
"sp": "\040",
"del": "\177"};
-
+
sc_Char.prototype.toString = function() {
return this.val;
};
@@ -1533,7 +1533,7 @@ function sc_mapBang(proc, l1) {
}
return l1_orig;
}
-
+
/*** META ((export #t)) */
function sc_forEach(proc, l1) {
if (l1 === undefined)
@@ -1871,7 +1871,7 @@ function sc_jsNew(c) {
evalStr += ", arguments[" + i + "]";
evalStr +=")";
return eval(evalStr);
-}
+}
// ======================== RegExp ====================
/*** META ((export #t)) */
@@ -1883,9 +1883,9 @@ function sc_pregexp(re) {
function sc_pregexpMatch(re, s) {
var reg = (re instanceof RegExp) ? re : sc_pregexp(re);
var tmp = reg.exec(sc_string2jsstring(s));
-
+
if (tmp == null) return false;
-
+
var res = null;
for (var i = tmp.length-1; i >= 0; i--) {
if (tmp[i] !== null) {
@@ -1896,7 +1896,7 @@ function sc_pregexpMatch(re, s) {
}
return res;
}
-
+
/*** META ((export #t)) */
function sc_pregexpReplace(re, s1, s2) {
var reg;
@@ -1914,7 +1914,7 @@ function sc_pregexpReplace(re, s1, s2) {
return jss1.replace(reg, jss2);
}
-
+
/*** META ((export pregexp-replace*)) */
function sc_pregexpReplaceAll(re, s1, s2) {
var reg;
@@ -1945,7 +1945,7 @@ function sc_pregexpSplit(re, s) {
return sc_vector2list(tmp);
}
-
+
/* =========================================================================== */
/* Other library stuff */
@@ -2136,7 +2136,7 @@ sc_ErrorInputPort.prototype.getNextChar = function() {
sc_ErrorInputPort.prototype.isCharReady = function() {
return false;
};
-
+
/* .............. String port ..........................*/
@@ -2200,7 +2200,7 @@ sc_Tokenizer.prototype.readToken = function() {
};
sc_Tokenizer.prototype.nextToken = function() {
var port = this.port;
-
+
function isNumberChar(c) {
return (c >= "0" && c <= "9");
};
@@ -2280,7 +2280,7 @@ sc_Tokenizer.prototype.nextToken = function() {
else
return new sc_Token(12/*NUMBER*/, res - 0);
};
-
+
function skipWhitespaceAndComments() {
var done = false;
while (!done) {
@@ -2299,7 +2299,7 @@ sc_Tokenizer.prototype.nextToken = function() {
}
}
};
-
+
function readDot() {
if (isWhitespace(port.peekChar()))
return new sc_Token(10/*DOT*/);
@@ -2429,7 +2429,7 @@ sc_Reader.prototype.read = function() {
while (true) {
var token = tokenizer.peekToken();
-
+
switch (token.type) {
case 2/*CLOSE_PAR*/:
case 4/*CLOSE_BRACE*/:
@@ -2491,7 +2491,7 @@ sc_Reader.prototype.read = function() {
else
throw "bad reference: " + nb;
};
-
+
var tokenizer = this.tokenizer;
var token = tokenizer.readToken();
@@ -2499,7 +2499,7 @@ sc_Reader.prototype.read = function() {
// handle error
if (token.type === 13/*ERROR*/)
throw token.val;
-
+
switch (token.type) {
case 1/*OPEN_PAR*/:
case 3/*OPEN_BRACE*/:
@@ -2550,7 +2550,7 @@ function sc_peekChar(port) {
port = SC_DEFAULT_IN; // THREAD: shared var...
var t = port.peekChar();
return t === SC_EOF_OBJECT? t: new sc_Char(t);
-}
+}
/*** META ((export #t)
(type bool))
*/
@@ -2722,7 +2722,7 @@ sc_StringOutputPort.prototype.close = function() {
function sc_getOutputString(sp) {
return sc_jsstring2string(sp.res);
}
-
+
function sc_ErrorOutputPort() {
}
@@ -2852,7 +2852,7 @@ function sc_newline(p) {
p = SC_DEFAULT_OUT;
p.appendJSString("\n");
}
-
+
/* ------------------ write-char ---------------------------------------------------*/
/*** META ((export #t)) */
@@ -2927,7 +2927,7 @@ sc_Pair.prototype.sc_toWriteCircleString = function(symb, inList) {
}
var res = "";
-
+
if (this[symb] !== undefined) { // implies > 0
this[symb + "use"] = true;
if (inList)
@@ -2939,10 +2939,10 @@ sc_Pair.prototype.sc_toWriteCircleString = function(symb, inList) {
if (!inList)
res += "(";
-
+
// print car
res += sc_genToWriteCircleString(this.car, symb);
-
+
if (sc_isPair(this.cdr)) {
res += " " + this.cdr.sc_toWriteCircleString(symb, true);
} else if (this.cdr !== null) {
@@ -3072,7 +3072,7 @@ function sc_format(s, args) {
p.appendJSString(arguments[j].toString(2));
i += 2; j++;
break;
-
+
case 37:
case 110:
// %, n
@@ -3186,7 +3186,7 @@ function sc_isEqual(o1, o2) {
function sc_number2symbol(x, radix) {
return sc_SYMBOL_PREFIX + sc_number2jsstring(x, radix);
}
-
+
/*** META ((export number->string integer->string)) */
var sc_number2string = sc_number2jsstring;
View
6 deps/v8/benchmarks/regexp.js
@@ -33,7 +33,7 @@
// the popularity of the pages where it occurs and the number of times
// it is executed while loading each page. Furthermore the literal
// letters in the data are encoded using ROT13 in a way that does not
-// affect how the regexps match their input. Finally the strings are
+// affect how the regexps match their input. Finally the strings are
// scrambled to exercise the regexp engine on different input strings.
@@ -47,7 +47,7 @@ function RegExpSetup() {
regExpBenchmark = new RegExpBenchmark();
RegExpRun(); // run once to get system initialized
}
-
+
function RegExpRun() {
regExpBenchmark.run();
}
@@ -1759,6 +1759,6 @@ function RegExpBenchmark() {
runBlock11();
}
}
-
+
this.run = run;
}
View
10 deps/v8/build/all.gyp
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Copyright 2011 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -11,13 +11,7 @@
'../preparser/preparser.gyp:*',
'../samples/samples.gyp:*',
'../src/d8.gyp:d8',
- ],
- 'conditions': [
- [ 'component!="shared_library"', {
- 'dependencies': [
- '../test/cctest/cctest.gyp:*',
- ],
- }]
+ '../test/cctest/cctest.gyp:*',
],
}
]
View
10 deps/v8/build/common.gypi
@@ -173,6 +173,14 @@
},
},
}],
+ ['OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
+ 'conditions': [
+ [ 'target_arch=="ia32"', {
+ 'cflags': [ '-m32' ],
+ 'ldflags': [ '-m32' ],
+ }],
+ ],
+ }],
],
'configurations': {
'Debug': {
@@ -207,7 +215,7 @@
'cflags': [ '-I/usr/local/include' ],
}],
['OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
- 'cflags': [ '-Wall', '-W', '-Wno-unused-parameter',
+ 'cflags': [ '-Wall', '-Werror', '-W', '-Wno-unused-parameter',
'-Wnon-virtual-dtor' ],
}],
],
View
7 deps/v8/build/standalone.gypi
@@ -74,15 +74,11 @@
'conditions': [
[ 'OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
'target_defaults': {
- 'cflags': [ '-Wall', '-W', '-Wno-unused-parameter',
+ 'cflags': [ '-Wall', '-Werror', '-W', '-Wno-unused-parameter',
'-Wnon-virtual-dtor', '-pthread', '-fno-rtti',
'-fno-exceptions', '-pedantic' ],
'ldflags': [ '-pthread', ],
'conditions': [
- [ 'target_arch=="ia32"', {
- 'cflags': [ '-m32' ],
- 'ldflags': [ '-m32' ],
- }],
[ 'OS=="linux"', {
'cflags': [ '-ansi' ],
}],
@@ -172,6 +168,7 @@
'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES',
'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden
'GCC_THREADSAFE_STATICS': 'NO', # -fno-threadsafe-statics
+ 'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES', # -Werror
'GCC_VERSION': '4.2',
'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES', # -Wnewline-eof
'MACOSX_DEPLOYMENT_TARGET': '10.4', # -mmacosx-version-min=10.4
View
4 deps/v8/include/v8.h
@@ -1656,7 +1656,7 @@ class Object : public Value {
V8EXPORT bool IsCallable();
/**
- * Call an Object as a function if a callback is set by the
+ * Call an Object as a function if a callback is set by the
* ObjectTemplate::SetCallAsFunctionHandler method.
*/
V8EXPORT Local<Value> CallAsFunction(Handle<Object> recv,
@@ -3562,7 +3562,7 @@ class V8EXPORT Context {
* // V8 Now no longer locked.
* \endcode
*
- *
+ *
*/
class V8EXPORT Unlocker {
public:
View
6 deps/v8/samples/shell.cc
@@ -250,16 +250,14 @@ void RunShell(v8::Handle<v8::Context> context) {
static const int kBufferSize = 256;
// Enter the execution environment before evaluating any code.
v8::Context::Scope context_scope(context);
+ v8::Local<v8::String> name(v8::String::New("(shell)"));
while (true) {
char buffer[kBufferSize];
printf("> ");
char* str = fgets(buffer, kBufferSize, stdin);
if (str == NULL) break;
v8::HandleScope handle_scope;
- ExecuteString(v8::String::New(str),
- v8::String::New("(shell)"),
- true,
- true);
+ ExecuteString(v8::String::New(str), name, true, true);
}
printf("\n");
}
View
2  deps/v8/src/accessors.cc
@@ -599,6 +599,7 @@ MaybeObject* Accessors::FunctionGetArguments(Object* object, void*) {
if (!found_it) return isolate->heap()->undefined_value();
Handle<JSFunction> function(holder, isolate);
+ if (function->shared()->native()) return isolate->heap()->null_value();
// Find the top invocation of the function by traversing frames.
List<JSFunction*> functions(2);
for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
@@ -732,6 +733,7 @@ MaybeObject* Accessors::FunctionGetCaller(Object* object, void*) {
bool found_it = false;
JSFunction* holder = FindInPrototypeChain<JSFunction>(object, &found_it);
if (!found_it) return isolate->heap()->undefined_value();
+ if (holder->shared()->native()) return isolate->heap()->null_value();
Handle<JSFunction> function(holder, isolate);
FrameFunctionIterator it(isolate, no_alloc);
View
8 deps/v8/src/arm/assembler-arm.cc
@@ -692,11 +692,11 @@ void Assembler::bind(Label* L) {
void Assembler::next(Label* L) {
ASSERT(L->is_linked());
int link = target_at(L->pos());
- if (link > 0) {
- L->link_to(link);
- } else {
- ASSERT(link == kEndOfChain);
+ if (link == kEndOfChain) {
L->Unuse();
+ } else {
+ ASSERT(link >= 0);
+ L->link_to(link);
}
}
View
16 deps/v8/src/arm/builtins-arm.cc
@@ -138,7 +138,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
__ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
// Clear the heap tag on the elements array.
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ sub(scratch1, scratch1, Operand(kHeapObjectTag));
// Initialize the FixedArray and fill it with holes. FixedArray length is
@@ -207,7 +207,7 @@ static void AllocateJSArray(MacroAssembler* masm,
// Allocate the JSArray object together with space for a FixedArray with the
// requested number of elements.
__ bind(&not_empty);
- ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ mov(elements_array_end,
Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
__ add(elements_array_end,
@@ -243,7 +243,7 @@ static void AllocateJSArray(MacroAssembler* masm,
FieldMemOperand(result, JSArray::kElementsOffset));
// Clear the heap tag on the elements array.
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ sub(elements_array_storage,
elements_array_storage,
Operand(kHeapObjectTag));
@@ -255,7 +255,7 @@ static void AllocateJSArray(MacroAssembler* masm,
__ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
__ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ tst(array_size, array_size);
// Length of the FixedArray is the number of pre-allocated elements if
// the actual JSArray has length 0 and the size of the JSArray for non-empty
@@ -272,7 +272,7 @@ static void AllocateJSArray(MacroAssembler* masm,
// result: JSObject
// elements_array_storage: elements array element storage
// array_size: smi-tagged size of elements array
- ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
__ add(elements_array_end,
elements_array_storage,
Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize));
@@ -337,14 +337,14 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ bind(&argc_one_or_more);
__ cmp(r0, Operand(1));
__ b(ne, &argc_two_or_more);
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
__ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
__ b(ne, call_generic_code);
// Handle construction of an empty array of a certain size. Bail out if size
// is too large to actually allocate an elements array.
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
__ b(ge, call_generic_code);
@@ -571,7 +571,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
// Is it a String?
__ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
- ASSERT(kNotStringTag != 0);
+ STATIC_ASSERT(kNotStringTag != 0);
__ tst(r3, Operand(kIsNotStringMask));
__ b(ne, &convert_argument);
__ mov(argument, r0);
View
225 deps/v8/src/arm/code-stubs-arm.cc
@@ -4389,8 +4389,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// a sequential string or an external string.
// In the case of a sliced string its offset has to be taken into account.
Label cons_string, check_encoding;
- STATIC_ASSERT((kConsStringTag < kExternalStringTag));
- STATIC_ASSERT((kSlicedStringTag > kExternalStringTag));
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
__ cmp(r1, Operand(kExternalStringTag));
__ b(lt, &cons_string);
__ b(eq, &runtime);
@@ -4487,7 +4487,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// frame. Therefore we have to use fp, which points exactly to two pointer
// sizes below the previous sp. (Because creating a new stack frame pushes
// the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
- __ ldr(r0, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
+ __ ldr(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
// If slice offset is not 0, load the length from the original sliced string.
// Argument 4, r3: End of string data
// Argument 3, r2: Start of string data
@@ -4495,7 +4495,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ add(r9, r8, Operand(r9, LSL, r3));
__ add(r2, r9, Operand(r1, LSL, r3));
- __ ldr(r8, FieldMemOperand(r0, String::kLengthOffset));
+ __ ldr(r8, FieldMemOperand(subject, String::kLengthOffset));
__ mov(r8, Operand(r8, ASR, kSmiTagSize));
__ add(r3, r9, Operand(r8, LSL, r3));
@@ -4503,7 +4503,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Already there
// Argument 1 (r0): Subject string.
- // Already there
+ __ mov(r0, subject);
// Locate the code entry and call it.
__ add(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
@@ -4520,12 +4520,12 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Check the result.
Label success;
- __ cmp(subject, Operand(NativeRegExpMacroAssembler::SUCCESS));
+ __ cmp(r0, Operand(NativeRegExpMacroAssembler::SUCCESS));
__ b(eq, &success);
Label failure;
- __ cmp(subject, Operand(NativeRegExpMacroAssembler::FAILURE));
+ __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE));
__ b(eq, &failure);
- __ cmp(subject, Operand(NativeRegExpMacroAssembler::EXCEPTION));
+ __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
// If not exception it can only be retry. Handle that in the runtime system.
__ b(ne, &runtime);
// Result must now be exception. If there is no pending exception already a
@@ -4537,18 +4537,18 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address,
isolate)));
__ ldr(r0, MemOperand(r2, 0));
- __ cmp(subject, r1);
+ __ cmp(r0, r1);
__ b(eq, &runtime);
__ str(r1, MemOperand(r2, 0)); // Clear pending exception.
// Check if the exception is a termination. If so, throw as uncatchable.
- __ LoadRoot(ip, Heap::kTerminationExceptionRootIndex);
- __ cmp(subject, ip);
+ __ CompareRoot(r0, Heap::kTerminationExceptionRootIndex);
+
Label termination_exception;
__ b(eq, &termination_exception);
- __ Throw(subject); // Expects thrown value in r0.
+ __ Throw(r0); // Expects thrown value in r0.
__ bind(&termination_exception);
__ ThrowUncatchable(TERMINATION, r0); // Expects thrown value in r0.
@@ -4857,8 +4857,8 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
// Handle non-flat strings.
__ and_(result_, result_, Operand(kStringRepresentationMask));
- STATIC_ASSERT((kConsStringTag < kExternalStringTag));
- STATIC_ASSERT((kSlicedStringTag > kExternalStringTag));
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
__ cmp(result_, Operand(kExternalStringTag));
__ b(gt, &sliced_string);
__ b(eq, &call_runtime_);
@@ -4894,7 +4894,8 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
// Check for 1-byte or 2-byte string.
__ bind(&flat_string);
- STATIC_ASSERT(kAsciiStringTag != 0);
+ STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ tst(result_, Operand(kStringEncodingMask));
__ b(ne, &ascii_string);
@@ -5468,11 +5469,6 @@ void SubStringStub::Generate(MacroAssembler* masm) {
Register to = r6;
Register from = r7;
- if (FLAG_string_slices) {
- __ nop(0); // Jumping as first instruction would crash the code generation.
- __ jmp(&runtime);
- }
-
__ Ldrd(to, from, MemOperand(sp, kToOffset));
STATIC_ASSERT(kFromOffset == kToOffset + 4);
STATIC_ASSERT(kSmiTag == 0);
@@ -5490,64 +5486,79 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ b(mi, &runtime); // Fail if from > to.
// Special handling of sub-strings of length 1 and 2. One character strings
// are handled in the runtime system (looked up in the single character
- // cache). Two character strings are looked for in the symbol cache.
+ // cache). Two character strings are looked for in the symbol cache in
+ // generated code.
__ cmp(r2, Operand(2));
__ b(lt, &runtime);
- // r2: length
- // r3: from index (untaged smi)
+ // r2: result string length
+ // r3: from index (untagged smi)
// r6 (a.k.a. to): to (smi)
// r7 (a.k.a. from): from offset (smi)
-
// Make sure first argument is a sequential (or flat) string.
- __ ldr(r5, MemOperand(sp, kStringOffset));
+ __ ldr(r0, MemOperand(sp, kStringOffset));
STATIC_ASSERT(kSmiTag == 0);
- __ JumpIfSmi(r5, &runtime);
- Condition is_string = masm->IsObjectStringType(r5, r1);
+ __ JumpIfSmi(r0, &runtime);
+ Condition is_string = masm->IsObjectStringType(r0, r1);
__ b(NegateCondition(is_string), &runtime);
+ // Short-cut for the case of trivial substring.
+ Label return_r0;
+ // r0: original string
+ // r2: result string length
+ __ ldr(r4, FieldMemOperand(r0, String::kLengthOffset));
+ __ cmp(r2, Operand(r4, ASR, 1));
+ __ b(eq, &return_r0);
+
+ Label create_slice;
+ if (FLAG_string_slices) {
+ __ cmp(r2, Operand(SlicedString::kMinLength));
+ __ b(ge, &create_slice);
+ }
+
+ // r0: original string
// r1: instance type
- // r2: length
+ // r2: result string length
// r3: from index (untagged smi)
- // r5: string
// r6 (a.k.a. to): to (smi)
// r7 (a.k.a. from): from offset (smi)
Label seq_string;
__ and_(r4, r1, Operand(kStringRepresentationMask));
STATIC_ASSERT(kSeqStringTag < kConsStringTag);
STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kConsStringTag < kSlicedStringTag);
__ cmp(r4, Operand(kConsStringTag));
- __ b(gt, &runtime); // External strings go to runtime.
+ __ b(gt, &runtime); // Slices and external strings go to runtime.
__ b(lt, &seq_string); // Sequential strings are handled directly.
// Cons string. Try to recurse (once) on the first substring.
// (This adds a little more generality than necessary to handle flattened
// cons strings, but not much).
- __ ldr(r5, FieldMemOperand(r5, ConsString::kFirstOffset));
- __ ldr(r4, FieldMemOperand(r5, HeapObject::kMapOffset));
+ __ ldr(r0, FieldMemOperand(r0, ConsString::kFirstOffset));
+ __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(r1, FieldMemOperand(r4, Map::kInstanceTypeOffset));
__ tst(r1, Operand(kStringRepresentationMask));
STATIC_ASSERT(kSeqStringTag == 0);
- __ b(ne, &runtime); // Cons and External strings go to runtime.
+ __ b(ne, &runtime); // Cons, slices and external strings go to runtime.
// Definitly a sequential string.
__ bind(&seq_string);
- // r1: instance type.
- // r2: length
- // r3: from index (untaged smi)
- // r5: string
+ // r0: original string
+ // r1: instance type
+ // r2: result string length
+ // r3: from index (untagged smi)
// r6 (a.k.a. to): to (smi)
// r7 (a.k.a. from): from offset (smi)
- __ ldr(r4, FieldMemOperand(r5, String::kLengthOffset));
+ __ ldr(r4, FieldMemOperand(r0, String::kLengthOffset));
__ cmp(r4, Operand(to));
__ b(lt, &runtime); // Fail if to > length.
to = no_reg;
- // r1: instance type.
- // r2: result string length.
- // r3: from index (untaged smi)
- // r5: string.
+ // r0: original string or left hand side of the original cons string.
+ // r1: instance type
+ // r2: result string length
+ // r3: from index (untagged smi)
// r7 (a.k.a. from): from offset (smi)
// Check for flat ASCII string.
Label non_ascii_flat;
@@ -5561,82 +5572,146 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// Sub string of length 2 requested.
// Get the two characters forming the sub string.
- __ add(r5, r5, Operand(r3));
- __ ldrb(r3, FieldMemOperand(r5, SeqAsciiString::kHeaderSize));
- __ ldrb(r4, FieldMemOperand(r5, SeqAsciiString::kHeaderSize + 1));
+ __ add(r0, r0, Operand(r3));
+ __ ldrb(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
+ __ ldrb(r4, FieldMemOperand(r0, SeqAsciiString::kHeaderSize + 1));
// Try to lookup two character string in symbol table.
Label make_two_character_string;
StringHelper::GenerateTwoCharacterSymbolTableProbe(
masm, r3, r4, r1, r5, r6, r7, r9, &make_two_character_string);
Counters* counters = masm->isolate()->counters();
- __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
- __ add(sp, sp, Operand(3 * kPointerSize));
- __ Ret();
+ __ jmp(&return_r0);
// r2: result string length.
// r3: two characters combined into halfword in little endian byte order.
__ bind(&make_two_character_string);
__ AllocateAsciiString(r0, r2, r4, r5, r9, &runtime);
__ strh(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
- __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
- __ add(sp, sp, Operand(3 * kPointerSize));
- __ Ret();
+ __ jmp(&return_r0);
__ bind(&result_longer_than_two);
+ // Locate 'from' character of string.
+ __ add(r5, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ add(r5, r5, Operand(from, ASR, 1));
+
// Allocate the result.
__ AllocateAsciiString(r0, r2, r3, r4, r1, &runtime);
- // r0: result string.
- // r2: result string length.
- // r5: string.
+ // r0: result string
+ // r2: result string length
+ // r5: first character of substring to copy
// r7 (a.k.a. from): from offset (smi)
// Locate first character of result.
__ add(r1, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- // Locate 'from' character of string.
- __ add(r5, r5, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- __ add(r5, r5, Operand(from, ASR, 1));
- // r0: result string.
- // r1: first character of result string.
- // r2: result string length.
- // r5: first character of sub string to copy.
+ // r0: result string
+ // r1: first character of result string
+ // r2: result string length
+ // r5: first character of substring to copy
STATIC_ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
COPY_ASCII | DEST_ALWAYS_ALIGNED);
- __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
- __ add(sp, sp, Operand(3 * kPointerSize));
- __ Ret();
+ __ jmp(&return_r0);
__ bind(&non_ascii_flat);
- // r2: result string length.
- // r5: string.
+ // r0: original string
+ // r2: result string length
// r7 (a.k.a. from): from offset (smi)
// Check for flat two byte string.
+ // Locate 'from' character of string.
+ __ add(r5, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ // As "from" is a smi it is 2 times the value which matches the size of a two
+ // byte character.
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+ __ add(r5, r5, Operand(from));
+
// Allocate the result.
__ AllocateTwoByteString(r0, r2, r1, r3, r4, &runtime);
- // r0: result string.
- // r2: result string length.
- // r5: string.
+ // r0: result string
+ // r2: result string length
+ // r5: first character of substring to copy
// Locate first character of result.
__ add(r1, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- // Locate 'from' character of string.
- __ add(r5, r5, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- // As "from" is a smi it is 2 times the value which matches the size of a two
- // byte character.
- __ add(r5, r5, Operand(from));
+
from = no_reg;
// r0: result string.
// r1: first character of result.
// r2: result length.
- // r5: first character of string to copy.
+ // r5: first character of substring to copy.
STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(
masm, r1, r5, r2, r3, r4, r6, r7, r9, DEST_ALWAYS_ALIGNED);
+ __ jmp(&return_r0);
+
+ if (FLAG_string_slices) {
+ __ bind(&create_slice);
+ // r0: original string
+ // r1: instance type
+ // r2: length
+ // r3: from index (untagged smi)
+ // r6 (a.k.a. to): to (smi)
+ // r7 (a.k.a. from): from offset (smi)
+ Label allocate_slice, sliced_string, seq_string;
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ tst(r1, Operand(kStringRepresentationMask));
+ __ b(eq, &seq_string);
+ STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
+ STATIC_ASSERT(kIsIndirectStringMask != 0);
+ __ tst(r1, Operand(kIsIndirectStringMask));
+ // External string. Jump to runtime.
+ __ b(eq, &runtime);
+
+ __ tst(r1, Operand(kSlicedNotConsMask));
+ __ b(ne, &sliced_string);
+ // Cons string. Check whether it is flat, then fetch first part.
+ __ ldr(r5, FieldMemOperand(r0, ConsString::kSecondOffset));
+ __ LoadRoot(r9, Heap::kEmptyStringRootIndex);
+ __ cmp(r5, r9);
+ __ b(ne, &runtime);
+ __ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset));
+ __ jmp(&allocate_slice);
+
+ __ bind(&sliced_string);
+ // Sliced string. Fetch parent and correct start index by offset.
+ __ ldr(r5, FieldMemOperand(r0, SlicedString::kOffsetOffset));
+ __ add(r7, r7, r5);
+ __ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
+ __ jmp(&allocate_slice);
+
+ __ bind(&seq_string);
+ // Sequential string. Just move string to the right register.
+ __ mov(r5, r0);
+
+ __ bind(&allocate_slice);
+ // r1: instance type of original string
+ // r2: length
+ // r5: underlying subject string
+ // r7 (a.k.a. from): from offset (smi)
+ // Allocate new sliced string. At this point we do not reload the instance
+ // type including the string encoding because we simply rely on the info
+ // provided by the original string. It does not matter if the original
+ // string's encoding is wrong because we always have to recheck encoding of
+ // the newly created string's parent anyways due to externalized strings.
+ Label two_byte_slice, set_slice_header;
+ STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
+ STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
+ __ tst(r1, Operand(kStringEncodingMask));
+ __ b(eq, &two_byte_slice);
+ __ AllocateAsciiSlicedString(r0, r2, r3, r4, &runtime);
+ __ jmp(&set_slice_header);
+ __ bind(&two_byte_slice);
+ __ AllocateTwoByteSlicedString(r0, r2, r3, r4, &runtime);
+ __ bind(&set_slice_header);
+ __ str(r7, FieldMemOperand(r0, SlicedString::kOffsetOffset));
+ __ str(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
+ }
+
+ __ bind(&return_r0);
__ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
__ add(sp, sp, Operand(3 * kPointerSize));
__ Ret();
View
619 deps/v8/src/arm/full-codegen-arm.cc
@@ -193,14 +193,14 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// Copy any necessary parameters into the context.
int num_parameters = info->scope()->num_parameters();
for (int i = 0; i < num_parameters; i++) {
- Slot* slot = scope()->parameter(i)->AsSlot();
- if (slot != NULL && slot->type() == Slot::CONTEXT) {
+ Variable* var = scope()->parameter(i);
+ if (var->IsContextSlot()) {
int parameter_offset = StandardFrameConstants::kCallerSPOffset +
(num_parameters - 1 - i) * kPointerSize;
// Load parameter from stack.
__ ldr(r0, MemOperand(fp, parameter_offset));
// Store it in the context.
- __ mov(r1, Operand(Context::SlotOffset(slot->index())));
+ __ mov(r1, Operand(Context::SlotOffset(var->index())));
__ str(r0, MemOperand(cp, r1));
// Update the write barrier. This clobbers all involved
// registers, so we have to use two more registers to avoid
@@ -244,7 +244,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
ArgumentsAccessStub stub(type);
__ CallStub(&stub);
- Move(arguments->AsSlot(), r0, r1, r2);
+ SetVar(arguments, r0, r1, r2);
}
if (FLAG_trace) {
@@ -258,17 +258,19 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
scope()->VisitIllegalRedeclaration(this);
} else {
+ PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
{ Comment cmnt(masm_, "[ Declarations");
// For named function expressions, declare the function name as a
// constant.
if (scope()->is_function_scope() && scope()->function() != NULL) {
- EmitDeclaration(scope()->function(), Variable::CONST, NULL);
+ int ignored = 0;
+ EmitDeclaration(scope()->function(), Variable::CONST, NULL, &ignored);
}
VisitDeclarations(scope()->declarations());
}
{ Comment cmnt(masm_, "[ Stack check");
- PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
+ PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
Label ok;
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
@@ -367,24 +369,28 @@ void FullCodeGenerator::EmitReturnSequence() {
}
-void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
+void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
}
-void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
- codegen()->Move(result_register(), slot);
+void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ codegen()->GetVar(result_register(), var);
}
-void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
- codegen()->Move(result_register(), slot);
+void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ codegen()->GetVar(result_register(), var);
__ push(result_register());
}
-void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
+void FullCodeGenerator::TestContext::Plug(Variable* var) const {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
// For simplicity we always test the accumulator register.
- codegen()->Move(result_register(), slot);
+ codegen()->GetVar(result_register(), var);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(this);
}
@@ -616,45 +622,54 @@ void FullCodeGenerator::Split(Condition cond,
}
-MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- return MemOperand(fp, SlotOffset(slot));
- case Slot::CONTEXT: {
- int context_chain_length =
- scope()->ContextChainLength(slot->var()->scope());
- __ LoadContext(scratch, context_chain_length);
- return ContextOperand(scratch, slot->index());
- }
- case Slot::LOOKUP:
- UNREACHABLE();
+MemOperand FullCodeGenerator::StackOperand(Variable* var) {
+ ASSERT(var->IsStackAllocated());
+ // Offset is negative because higher indexes are at lower addresses.
+ int offset = -var->index() * kPointerSize;
+ // Adjust by a (parameter or local) base offset.
+ if (var->IsParameter()) {
+ offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
+ } else {
+ offset += JavaScriptFrameConstants::kLocal0Offset;
+ }
+ return MemOperand(fp, offset);
+}
+
+
+MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
+ ASSERT(var->IsContextSlot() || var->IsStackAllocated());
+ if (var->IsContextSlot()) {
+ int context_chain_length = scope()->ContextChainLength(var->scope());
+ __ LoadContext(scratch, context_chain_length);
+ return ContextOperand(scratch, var->index());
+ } else {
+ return StackOperand(var);
}
- UNREACHABLE();
- return MemOperand(r0, 0);
}
-void FullCodeGenerator::Move(Register destination, Slot* source) {
+void FullCodeGenerator::GetVar(Register dest, Variable* var) {
// Use destination as scratch.
- MemOperand slot_operand = EmitSlotSearch(source, destination);
- __ ldr(destination, slot_operand);
+ MemOperand location = VarOperand(var, dest);
+ __ ldr(dest, location);
}
-void FullCodeGenerator::Move(Slot* dst,
- Register src,
- Register scratch1,
- Register scratch2) {
- ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
- ASSERT(!scratch1.is(src) && !scratch2.is(src));
- MemOperand location = EmitSlotSearch(dst, scratch1);
+void FullCodeGenerator::SetVar(Variable* var,
+ Register src,
+ Register scratch0,
+ Register scratch1) {
+ ASSERT(var->IsContextSlot() || var->IsStackAllocated());
+ ASSERT(!scratch0.is(src));
+ ASSERT(!scratch0.is(scratch1));
+ ASSERT(!scratch1.is(src));
+ MemOperand location = VarOperand(var, scratch0);
__ str(src, location);
// Emit the write barrier code if the location is in the heap.
- if (dst->type() == Slot::CONTEXT) {
- __ RecordWrite(scratch1,
- Operand(Context::SlotOffset(dst->index())),
- scratch2,
+ if (var->IsContextSlot()) {
+ __ RecordWrite(scratch0,
+ Operand(Context::SlotOffset(var->index())),
+ scratch1,
src);
}
}
@@ -687,29 +702,33 @@ void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
}
-void FullCodeGenerator::EmitDeclaration(Variable* variable,
+void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
Variable::Mode mode,
- FunctionLiteral* function) {
- Comment cmnt(masm_, "[ Declaration");
- ASSERT(variable != NULL); // Must have been resolved.
- Slot* slot = variable->AsSlot();
- ASSERT(slot != NULL);
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- if (mode == Variable::CONST) {
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ str(ip, MemOperand(fp, SlotOffset(slot)));
- } else if (function != NULL) {
+ FunctionLiteral* function,
+ int* global_count) {
+ // If it was not possible to allocate the variable at compile time, we
+ // need to "declare" it at runtime to make sure it actually exists in the
+ // local context.
+ Variable* variable = proxy->var();
+ switch (variable->location()) {
+ case Variable::UNALLOCATED:
+ ++(*global_count);
+ break;
+
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ if (function != NULL) {
+ Comment cmnt(masm_, "[ Declaration");
VisitForAccumulatorValue(function);
- __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
+ __ str(result_register(), StackOperand(variable));
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ Comment cmnt(masm_, "[ Declaration");
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+ __ str(ip, StackOperand(variable));
}
break;
- case Slot::CONTEXT:
- // We bypass the general EmitSlotSearch because we know more about
- // this specific context.
-
+ case Variable::CONTEXT:
// The variable in the decl always resides in the current function
// context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
@@ -721,23 +740,28 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
__ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
__ Check(ne, "Declaration in catch context.");
}
- if (mode == Variable::CONST) {
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ str(ip, ContextOperand(cp, slot->index()));
- // No write barrier since the_hole_value is in old space.
- } else if (function != NULL) {
+ if (function != NULL) {
+ Comment cmnt(masm_, "[ Declaration");
VisitForAccumulatorValue(function);
- __ str(result_register(), ContextOperand(cp, slot->index()));
- int offset = Context::SlotOffset(slot->index());
+ __ str(result_register(), ContextOperand(cp, variable->index()));
+ int offset = Context::SlotOffset(variable->index());
// We know that we have written a function, which is not a smi.
__ mov(r1, Operand(cp));
__ RecordWrite(r1, Operand(offset), r2, result_register());
+ PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ Comment cmnt(masm_, "[ Declaration");
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+ __ str(ip, ContextOperand(cp, variable->index()));
+ // No write barrier since the_hole_value is in old space.
+ PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
}
break;
- case Slot::LOOKUP: {
+ case Variable::LOOKUP: {
+ Comment cmnt(masm_, "[ Declaration");
__ mov(r2, Operand(variable->name()));
- // Declaration nodes are always introduced in one of two modes.
+ // Declaration nodes are always introduced in one of three modes.
ASSERT(mode == Variable::VAR ||
mode == Variable::CONST ||
mode == Variable::LET);
@@ -747,15 +771,15 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
// Note: For variables we must not push an initial value (such as
// 'undefined') because we may have a (legal) redeclaration and we
// must not destroy the current value.
- if (mode == Variable::CONST) {
- __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
- __ Push(cp, r2, r1, r0);
- } else if (function != NULL) {
+ if (function != NULL) {
__ Push(cp, r2, r1);
// Push initial value for function declaration.
VisitForStackValue(function);
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
+ __ Push(cp, r2, r1, r0);
} else {
- __ mov(r0, Operand(Smi::FromInt(0))); // No initial value!
+ __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
__ Push(cp, r2, r1, r0);
}
__ CallRuntime(Runtime::kDeclareContextSlot, 4);
@@ -765,19 +789,16 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
}
-void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
- EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
-}
+void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
// Call the runtime to declare the globals.
// The context is the first argument.
- __ mov(r2, Operand(pairs));
- __ mov(r1, Operand(Smi::FromInt(is_eval() ? 1 : 0)));
- __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
- __ Push(cp, r2, r1, r0);
- __ CallRuntime(Runtime::kDeclareGlobals, 4);
+ __ mov(r1, Operand(pairs));
+ __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
+ __ Push(cp, r1, r0);
+ __ CallRuntime(Runtime::kDeclareGlobals, 3);
// Return value is ignored.
}
@@ -1085,10 +1106,9 @@ void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
}
-void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
- Slot* slot,
- TypeofState typeof_state,
- Label* slow) {
+void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
+ TypeofState typeof_state,
+ Label* slow) {
Register current = cp;
Register next = r1;
Register temp = r2;
@@ -1135,7 +1155,7 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
}
__ ldr(r0, GlobalObjectOperand());
- __ mov(r2, Operand(slot->var()->name()));
+ __ mov(r2, Operand(var->name()));
RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
? RelocInfo::CODE_TARGET
: RelocInfo::CODE_TARGET_CONTEXT;
@@ -1144,15 +1164,14 @@ void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
}
-MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
- Slot* slot,
- Label* slow) {
- ASSERT(slot->type() == Slot::CONTEXT);
+MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
+ Label* slow) {
+ ASSERT(var->IsContextSlot());
Register context = cp;
Register next = r3;
Register temp = r4;
- for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
+ for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
if (s->num_heap_slots() > 0) {
if (s->calls_eval()) {
// Check that extension is NULL.
@@ -1173,59 +1192,30 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
// This function is used only for loads, not stores, so it's safe to
// return an cp-based operand (the write barrier cannot be allowed to
// destroy the cp register).
- return ContextOperand(context, slot->index());
+ return ContextOperand(context, var->index());
}
-void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
- Slot* slot,
- TypeofState typeof_state,
- Label* slow,
- Label* done) {
+void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
+ TypeofState typeof_state,
+ Label* slow,
+ Label* done) {
// Generate fast-case code for variables that might be shadowed by
// eval-introduced variables. Eval is used a lot without
// introducing variables. In those cases, we do not want to
// perform a runtime call for all variables in the scope
// containing the eval.
- if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
- EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
+ if (var->mode() == Variable::DYNAMIC_GLOBAL) {
+ EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
__ jmp(done);
- } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
- Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
- Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
- if (potential_slot != NULL) {
- // Generate fast case for locals that rewrite to slots.
- __ ldr(r0, ContextSlotOperandCheckExtensions(potential_slot, slow));
- if (potential_slot->var()->mode() == Variable::CONST) {
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r0, ip);
- __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
- }
- __ jmp(done);
- } else if (rewrite != NULL) {
- // Generate fast case for calls of an argument function.
- Property* property = rewrite->AsProperty();
- if (property != NULL) {
- VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
- Literal* key_literal = property->key()->AsLiteral();
- if (obj_proxy != NULL &&
- key_literal != NULL &&
- obj_proxy->IsArguments() &&
- key_literal->handle()->IsSmi()) {
- // Load arguments object if there are no eval-introduced
- // variables. Then load the argument from the arguments
- // object using keyed load.
- __ ldr(r1,
- ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
- slow));
- __ mov(r0, Operand(key_literal->handle()));
- Handle<Code> ic =
- isolate()->builtins()->KeyedLoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
- __ jmp(done);
- }
- }
+ } else if (var->mode() == Variable::DYNAMIC_LOCAL) {
+ Variable* local = var->local_if_not_shadowed();
+ __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
+ if (local->mode() == Variable::CONST) {
+ __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
}
+ __ jmp(done);
}
}
@@ -1235,52 +1225,60 @@ void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
SetSourcePosition(proxy->position());
Variable* var = proxy->var();
- // Three cases: non-this global variables, lookup slots, and all other
- // types of slots.
- Slot* slot = var->AsSlot();
- ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
-
- if (slot == NULL) {
- Comment cmnt(masm_, "Global variable");
- // Use inline caching. Variable name is passed in r2 and the global
- // object (receiver) in r0.
- __ ldr(r0, GlobalObjectOperand());
- __ mov(r2, Operand(var->name()));
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
- context()->Plug(r0);
-
- } else if (slot->type() == Slot::LOOKUP) {
- Label done, slow;
-
- // Generate code for loading from variables potentially shadowed
- // by eval-introduced variables.
- EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
-
- __ bind(&slow);
- Comment cmnt(masm_, "Lookup slot");
- __ mov(r1, Operand(var->name()));
- __ Push(cp, r1); // Context and name.
- __ CallRuntime(Runtime::kLoadContextSlot, 2);
- __ bind(&done);
+ // Three cases: global variables, lookup variables, and all other types of
+ // variables.
+ switch (var->location()) {
+ case Variable::UNALLOCATED: {
+ Comment cmnt(masm_, "Global variable");
+ // Use inline caching. Variable name is passed in r2 and the global
+ // object (receiver) in r0.
+ __ ldr(r0, GlobalObjectOperand());
+ __ mov(r2, Operand(var->name()));
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ context()->Plug(r0);
+ break;
+ }
- context()->Plug(r0);
+ case Variable::PARAMETER:
+ case Variable::LOCAL:
+ case Variable::CONTEXT: {
+ Comment cmnt(masm_, var->IsContextSlot()
+ ? "Context variable"
+ : "Stack variable");
+ if (var->mode() != Variable::LET && var->mode() != Variable::CONST) {
+ context()->Plug(var);
+ } else {
+ // Let and const need a read barrier.
+ GetVar(r0, var);
+ __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
+ if (var->mode() == Variable::LET) {
+ Label done;
+ __ b(ne, &done);
+ __ mov(r0, Operand(var->name()));
+ __ push(r0);
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ bind(&done);
+ } else {
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
+ }
+ context()->Plug(r0);
+ }
+ break;
+ }
- } else {
- Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
- ? "Context slot"
- : "Stack slot");
- if (var->mode() == Variable::CONST) {
- // Constants may be the hole value if they have not been initialized.
- // Unhole them.
- MemOperand slot_operand = EmitSlotSearch(slot, r0);
- __ ldr(r0, slot_operand);
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r0, ip);
- __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
+ case Variable::LOOKUP: {
+ Label done, slow;
+ // Generate code for loading from variables potentially shadowed
+ // by eval-introduced variables.
+ EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
+ __ bind(&slow);
+ Comment cmnt(masm_, "Lookup variable");
+ __ mov(r1, Operand(var->name()));
+ __ Push(cp, r1); // Context and name.
+ __ CallRuntime(Runtime::kLoadContextSlot, 2);
+ __ bind(&done);
context()->Plug(r0);
- } else {
- context()->Plug(slot);
}
}
}
@@ -1814,14 +1812,8 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
- ASSERT(var != NULL);
- ASSERT(var->is_global() || var->AsSlot() != NULL);
-
- if (var->is_global()) {
- ASSERT(!var->is_this());
- // Assignment to a global variable. Use inline caching for the
- // assignment. Right-hand-side value is passed in r0, variable name in
- // r2, and the global object in r1.
+ if (var->IsUnallocated()) {
+ // Global var, const, or let.
__ mov(r2, Operand(var->name()));
__ ldr(r1, GlobalObjectOperand());
Handle<Code> ic = is_strict_mode()
@@ -1830,67 +1822,83 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ Call(ic, RelocInfo::CODE_TARGET_CONTEXT);
} else if (op == Token::INIT_CONST) {
- // Like var declarations, const declarations are hoisted to function
- // scope. However, unlike var initializers, const initializers are able
- // to drill a hole to that function context, even from inside a 'with'
- // context. We thus bypass the normal static scope lookup.
- Slot* slot = var->AsSlot();
- Label skip;
- switch (slot->type()) {
- case Slot::PARAMETER:
- // No const parameters.
- UNREACHABLE();
- break;
- case Slot::LOCAL:
- // Detect const reinitialization by checking for the hole value.
- __ ldr(r1, MemOperand(fp, SlotOffset(slot)));
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r1, ip);
- __ b(ne, &skip);
- __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
- break;
- case Slot::CONTEXT:
- case Slot::LOOKUP:
- __ push(r0);
- __ mov(r0, Operand(slot->var()->name()));
- __ Push(cp, r0); // Context and name.
- __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
- break;
+ // Const initializers need a write barrier.
+ ASSERT(!var->IsParameter()); // No const parameters.
+ if (var->IsStackLocal()) {
+ Label skip;
+ __ ldr(r1, StackOperand(var));
+ __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
+ __ b(ne, &skip);
+ __ str(result_register(), StackOperand(var));
+ __ bind(&skip);
+ } else {
+ ASSERT(var->IsContextSlot() || var->IsLookupSlot());
+ // Like var declarations, const declarations are hoisted to function
+ // scope. However, unlike var initializers, const initializers are
+ // able to drill a hole to that function context, even from inside a
+ // 'with' context. We thus bypass the normal static scope lookup for
+ // var->IsContextSlot().
+ __ push(r0);
+ __ mov(r0, Operand(var->name()));
+ __ Push(cp, r0); // Context and name.
+ __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
}
- __ bind(&skip);
- } else if (var->mode() != Variable::CONST) {
- // Perform the assignment for non-const variables. Const assignments
- // are simply skipped.
- Slot* slot = var->AsSlot();
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- // Perform the assignment.
- __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
- break;
-
- case Slot::CONTEXT: {
- MemOperand target = EmitSlotSearch(slot, r1);
- // Perform the assignment and issue the write barrier.
- __ str(result_register(), target);
+ } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
+ // Non-initializing assignment to let variable needs a write barrier.
+ if (var->IsLookupSlot()) {
+ __ push(r0); // Value.
+ __ mov(r1, Operand(var->name()));
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
+ __ Push(cp, r1, r0); // Context, name, strict mode.
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
+ } else {
+ ASSERT(var->IsStackAllocated() || var->IsContextSlot());
+ Label assign;
+ MemOperand location = VarOperand(var, r1);
+ __ ldr(r3, location);
+ __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
+ __ b(ne, &assign);
+ __ mov(r3, Operand(var->name()));
+ __ push(r3);
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ // Perform the assignment.
+ __ bind(&assign);
+ __ str(result_register(), location);
+ if (var->IsContextSlot()) {
// RecordWrite may destroy all its register arguments.
__ mov(r3, result_register());
- int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize;
+ int offset = Context::SlotOffset(var->index());
__ RecordWrite(r1, Operand(offset), r2, r3);
- break;
}
+ }
- case Slot::LOOKUP:
- // Call the runtime for the assignment.
- __ push(r0); // Value.
- __ mov(r1, Operand(slot->var()->name()));
- __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
- __ Push(cp, r1, r0); // Context, name, strict mode.
- __ CallRuntime(Runtime::kStoreContextSlot, 4);
- break;
+ } else if (var->mode() != Variable::CONST) {
+ // Assignment to var or initializing assignment to let.
+ if (var->IsStackAllocated() || var->IsContextSlot()) {
+ MemOperand location = VarOperand(var, r1);
+ if (FLAG_debug_code && op == Token::INIT_LET) {
+ // Check for an uninitialized let binding.
+ __ ldr(r2, location);
+ __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
+ __ Check(eq, "Let binding re-initialization.");
+ }
+ // Perform the assignment.
+ __ str(r0, location);
+ if (var->IsContextSlot()) {
+ __ mov(r3, r0);
+ __ RecordWrite(r1, Operand(Context::SlotOffset(var->index())), r2, r3);
+ }
+ } else {
+ ASSERT(var->IsLookupSlot());
+ __ push(r0); // Value.
+ __ mov(r1, Operand(var->name()));
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
+ __ Push(cp, r1, r0); // Context, name, strict mode.
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
}
}
+ // Non-initializing assignments to consts are ignored.
}
@@ -2100,8 +2108,13 @@ void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
int receiver_offset = 2 + info_->scope()->num_parameters();
__ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
__ push(r1);
- // Push the strict mode flag.
- __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
+ // Push the strict mode flag. In harmony mode every eval call
+ // is a strict mode eval call.
+ StrictModeFlag strict_mode = strict_mode_flag();
+ if (FLAG_harmony_block_scoping) {
+ strict_mode = kStrictMode;
+ }
+ __ mov(r1, Operand(Smi::FromInt(strict_mode)));
__ push(r1);
__ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
@@ -2118,10 +2131,11 @@ void FullCodeGenerator::VisitCall(Call* expr) {
#endif
Comment cmnt(masm_, "[ Call");
- Expression* fun = expr->expression();
- Variable* var = fun->AsVariableProxy()->AsVariable();
+ Expression* callee = expr->expression();
+ VariableProxy* proxy = callee->AsVariableProxy();
+ Property* property = callee->AsProperty();
- if (var != NULL && var->is_possibly_eval()) {
+ if (proxy != NULL && proxy->var()->is_possibly_eval()) {
// In a call to eval, we first call %ResolvePossiblyDirectEval to
// resolve the function we need to call and the receiver of the
// call. Then we call the resolved function using the given
@@ -2130,7 +2144,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
int arg_count = args->length();
{ PreservePositionScope pos_scope(masm()->positions_recorder());
- VisitForStackValue(fun);
+ VisitForStackValue(callee);
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
__ push(r2); // Reserved receiver slot.
@@ -2144,11 +2158,10 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// in generated code. If we succeed, there is no need to perform a
// context lookup in the runtime system.
Label done;
- if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
+ Variable* var = proxy->var();
+ if (!var->IsUnallocated() && var->mode() == Variable::DYNAMIC_GLOBAL) {
Label slow;
- EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
- NOT_INSIDE_TYPEOF,
- &slow);
+ EmitLoadGlobalCheckExtensions(var, NOT_INSIDE_TYPEOF, &slow);
// Push the function and resolve eval.
__ push(r0);
EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
@@ -2156,14 +2169,12 @@ void FullCodeGenerator::VisitCall(Call* expr) {
__ bind(&slow);
}
- // Push copy of the function (found below the arguments) and
+ // Push a copy of the function (found below the arguments) and
// resolve eval.
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ push(r1);
EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
- if (done.is_linked()) {
- __ bind(&done);
- }
+ __ bind(&done);
// The runtime call returns a pair of values in r0 (function) and
// r1 (receiver). Touch up the stack with the right values.
@@ -2180,30 +2191,26 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->DropAndPlug(1, r0);
- } else if (var != NULL && !var->is_this() && var->is_global()) {
+ } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
// Push global object as receiver for the call IC.
__ ldr(r0, GlobalObjectOperand());
__ push(r0);
- EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
- } else if (var != NULL && var->AsSlot() != NULL &&
- var->AsSlot()->type() == Slot::LOOKUP) {
+ EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
+ } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
// Call to a lookup slot (dynamically introduced variable).
Label slow, done;
{ PreservePositionScope scope(masm()->positions_recorder());
// Generate code for loading from variables potentially shadowed
// by eval-introduced variables.
- EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
- NOT_INSIDE_TYPEOF,
- &slow,
- &done);
+ EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
}
__ bind(&slow);
// Call the runtime to find the function to call (returned in r0)
// and the object holding it (returned in edx).
__ push(context_register());
- __ mov(r2, Operand(var->name()));
+ __ mov(r2, Operand(proxy->name()));
__ push(r2);
__ CallRuntime(Runtime::kLoadContextSlot, 2);
__ Push(r0, r1); // Function, receiver.
@@ -2228,26 +2235,21 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// by LoadContextSlot. That object could be the hole if the
// receiver is implicitly the global object.
EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
- } else if (fun->AsProperty() != NULL) {
- // Call to an object property.
- Property* prop = fun->AsProperty();
- Literal* key = prop->key()->AsLiteral();
- if (key != NULL && key->handle()->IsSymbol()) {
- // Call to a named property, use call IC.
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
+ } else if (property != NULL) {
+ { PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(property->obj());
+ }
+ if (property->key()->IsPropertyName()) {
+ EmitCallWithIC(expr,
+ property->key()->AsLiteral()->handle(),
+ RelocInfo::CODE_TARGET);
} else {
- // Call to a keyed property.
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitKeyedCallWithIC(expr, prop->key());
+ EmitKeyedCallWithIC(expr, property->key());
}
} else {
+ // Call to an arbitrary expression not handled specially above.
{ PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(fun);
+ VisitForStackValue(callee);
}
// Load global receiver object.
__ ldr(r1, GlobalObjectOperand());
@@ -3194,7 +3196,7 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
Label done, not_found;
// tmp now holds finger offset as a smi.
- ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
// r2 now holds finger offset as a smi.
__ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -3569,32 +3571,32 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
switch (expr->op()) {
case Token::DELETE: {
Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
- Property* prop = expr->expression()->AsProperty();
- Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
+ Property* property = expr->expression()->AsProperty();
+ VariableProxy* proxy = expr->expression()->AsVariableProxy();
- if (prop != NULL) {
- VisitForStackValue(prop->obj());
- VisitForStackValue(prop->key());
+ if (property != NULL) {
+ VisitForStackValue(property->obj());
+ VisitForStackValue(property->key());
__ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
__ push(r1);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(r0);
- } else if (var != NULL) {
+ } else if (proxy != NULL) {
+ Variable* var = proxy->var();
// Delete of an unqualified identifier is disallowed in strict mode
- // but "delete this" is.
+ // but "delete this" is allowed.
ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
- if (var->is_global()) {
+ if (var->IsUnallocated()) {
__ ldr(r2, GlobalObjectOperand());
__ mov(r1, Operand(var->name()));
__ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
__ Push(r2, r1, r0);
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(r0);
- } else if (var->AsSlot() != NULL &&
- var->AsSlot()->type() != Slot::LOOKUP) {
+ } else if (var->IsStackAllocated() || var->IsContextSlot()) {
// Result of deleting non-global, non-dynamic variables is false.
// The subexpression does not have side effects.
- context()->Plug(false);