PageRenderTime 45ms CodeModel.GetById 13ms RepoModel.GetById 0ms app.codeStats 0ms

/src/qt/qtwebkit/Source/JavaScriptCore/jit/JITOpcodes.cpp

https://gitlab.com/x33n/phantomjs
C++ | 1342 lines | 1017 code | 235 blank | 90 comment | 46 complexity | 49db75672e950348514427b8d17dc716 MD5 | raw file
  1. /*
  2. * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
  3. * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
  4. *
  5. * Redistribution and use in source and binary forms, with or without
  6. * modification, are permitted provided that the following conditions
  7. * are met:
  8. * 1. Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. * 2. Redistributions in binary form must reproduce the above copyright
  11. * notice, this list of conditions and the following disclaimer in the
  12. * documentation and/or other materials provided with the distribution.
  13. *
  14. * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
  15. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  16. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  17. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
  18. * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  19. * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  20. * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  21. * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  22. * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  23. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  24. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  25. */
  26. #include "config.h"
  27. #if ENABLE(JIT)
  28. #include "JIT.h"
  29. #include "Arguments.h"
  30. #include "CopiedSpaceInlines.h"
  31. #include "Heap.h"
  32. #include "JITInlines.h"
  33. #include "JITStubCall.h"
  34. #include "JSArray.h"
  35. #include "JSCell.h"
  36. #include "JSFunction.h"
  37. #include "JSPropertyNameIterator.h"
  38. #include "LinkBuffer.h"
  39. namespace JSC {
  40. #if USE(JSVALUE64)
  41. JIT::CodeRef JIT::privateCompileCTINativeCall(VM* vm, NativeFunction)
  42. {
  43. return vm->getCTIStub(nativeCallGenerator);
  44. }
  45. void JIT::emit_op_mov(Instruction* currentInstruction)
  46. {
  47. int dst = currentInstruction[1].u.operand;
  48. int src = currentInstruction[2].u.operand;
  49. if (canBeOptimizedOrInlined()) {
  50. // Use simpler approach, since the DFG thinks that the last result register
  51. // is always set to the destination on every operation.
  52. emitGetVirtualRegister(src, regT0);
  53. emitPutVirtualRegister(dst);
  54. } else {
  55. if (m_codeBlock->isConstantRegisterIndex(src)) {
  56. if (!getConstantOperand(src).isNumber())
  57. store64(TrustedImm64(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
  58. else
  59. store64(Imm64(JSValue::encode(getConstantOperand(src))), Address(callFrameRegister, dst * sizeof(Register)));
  60. if (dst == m_lastResultBytecodeRegister)
  61. killLastResultRegister();
  62. } else if ((src == m_lastResultBytecodeRegister) || (dst == m_lastResultBytecodeRegister)) {
  63. // If either the src or dst is the cached register go though
  64. // get/put registers to make sure we track this correctly.
  65. emitGetVirtualRegister(src, regT0);
  66. emitPutVirtualRegister(dst);
  67. } else {
  68. // Perform the copy via regT1; do not disturb any mapping in regT0.
  69. load64(Address(callFrameRegister, src * sizeof(Register)), regT1);
  70. store64(regT1, Address(callFrameRegister, dst * sizeof(Register)));
  71. }
  72. }
  73. }
  74. void JIT::emit_op_end(Instruction* currentInstruction)
  75. {
  76. RELEASE_ASSERT(returnValueRegister != callFrameRegister);
  77. emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
  78. restoreReturnAddressBeforeReturn(Address(callFrameRegister, JSStack::ReturnPC * static_cast<int>(sizeof(Register))));
  79. ret();
  80. }
  81. void JIT::emit_op_jmp(Instruction* currentInstruction)
  82. {
  83. unsigned target = currentInstruction[1].u.operand;
  84. addJump(jump(), target);
  85. }
  86. void JIT::emit_op_new_object(Instruction* currentInstruction)
  87. {
  88. Structure* structure = currentInstruction[3].u.objectAllocationProfile->structure();
  89. size_t allocationSize = JSObject::allocationSize(structure->inlineCapacity());
  90. MarkedAllocator* allocator = &m_vm->heap.allocatorForObjectWithoutDestructor(allocationSize);
  91. RegisterID resultReg = regT0;
  92. RegisterID allocatorReg = regT1;
  93. RegisterID scratchReg = regT2;
  94. move(TrustedImmPtr(allocator), allocatorReg);
  95. emitAllocateJSObject(allocatorReg, TrustedImmPtr(structure), resultReg, scratchReg);
  96. emitPutVirtualRegister(currentInstruction[1].u.operand);
  97. }
  98. void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  99. {
  100. linkSlowCase(iter);
  101. JITStubCall stubCall(this, cti_op_new_object);
  102. stubCall.addArgument(TrustedImmPtr(currentInstruction[3].u.objectAllocationProfile->structure()));
  103. stubCall.call(currentInstruction[1].u.operand);
  104. }
  105. void JIT::emit_op_check_has_instance(Instruction* currentInstruction)
  106. {
  107. unsigned baseVal = currentInstruction[3].u.operand;
  108. emitGetVirtualRegister(baseVal, regT0);
  109. // Check that baseVal is a cell.
  110. emitJumpSlowCaseIfNotJSCell(regT0, baseVal);
  111. // Check that baseVal 'ImplementsHasInstance'.
  112. loadPtr(Address(regT0, JSCell::structureOffset()), regT0);
  113. addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance)));
  114. }
  115. void JIT::emit_op_instanceof(Instruction* currentInstruction)
  116. {
  117. unsigned dst = currentInstruction[1].u.operand;
  118. unsigned value = currentInstruction[2].u.operand;
  119. unsigned proto = currentInstruction[3].u.operand;
  120. // Load the operands (baseVal, proto, and value respectively) into registers.
  121. // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
  122. emitGetVirtualRegister(value, regT2);
  123. emitGetVirtualRegister(proto, regT1);
  124. // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance.
  125. emitJumpSlowCaseIfNotJSCell(regT2, value);
  126. emitJumpSlowCaseIfNotJSCell(regT1, proto);
  127. // Check that prototype is an object
  128. loadPtr(Address(regT1, JSCell::structureOffset()), regT3);
  129. addSlowCase(emitJumpIfNotObject(regT3));
  130. // Optimistically load the result true, and start looping.
  131. // Initially, regT1 still contains proto and regT2 still contains value.
  132. // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
  133. move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0);
  134. Label loop(this);
  135. // Load the prototype of the object in regT2. If this is equal to regT1 - WIN!
  136. // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
  137. loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
  138. load64(Address(regT2, Structure::prototypeOffset()), regT2);
  139. Jump isInstance = branchPtr(Equal, regT2, regT1);
  140. emitJumpIfJSCell(regT2).linkTo(loop, this);
  141. // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
  142. move(TrustedImm64(JSValue::encode(jsBoolean(false))), regT0);
  143. // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
  144. isInstance.link(this);
  145. emitPutVirtualRegister(dst);
  146. }
  147. void JIT::emit_op_is_undefined(Instruction* currentInstruction)
  148. {
  149. unsigned dst = currentInstruction[1].u.operand;
  150. unsigned value = currentInstruction[2].u.operand;
  151. emitGetVirtualRegister(value, regT0);
  152. Jump isCell = emitJumpIfJSCell(regT0);
  153. compare64(Equal, regT0, TrustedImm32(ValueUndefined), regT0);
  154. Jump done = jump();
  155. isCell.link(this);
  156. loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
  157. Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
  158. move(TrustedImm32(0), regT0);
  159. Jump notMasqueradesAsUndefined = jump();
  160. isMasqueradesAsUndefined.link(this);
  161. move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
  162. loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1);
  163. comparePtr(Equal, regT0, regT1, regT0);
  164. notMasqueradesAsUndefined.link(this);
  165. done.link(this);
  166. emitTagAsBoolImmediate(regT0);
  167. emitPutVirtualRegister(dst);
  168. }
  169. void JIT::emit_op_is_boolean(Instruction* currentInstruction)
  170. {
  171. unsigned dst = currentInstruction[1].u.operand;
  172. unsigned value = currentInstruction[2].u.operand;
  173. emitGetVirtualRegister(value, regT0);
  174. xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
  175. test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0);
  176. emitTagAsBoolImmediate(regT0);
  177. emitPutVirtualRegister(dst);
  178. }
  179. void JIT::emit_op_is_number(Instruction* currentInstruction)
  180. {
  181. unsigned dst = currentInstruction[1].u.operand;
  182. unsigned value = currentInstruction[2].u.operand;
  183. emitGetVirtualRegister(value, regT0);
  184. test64(NonZero, regT0, tagTypeNumberRegister, regT0);
  185. emitTagAsBoolImmediate(regT0);
  186. emitPutVirtualRegister(dst);
  187. }
  188. void JIT::emit_op_is_string(Instruction* currentInstruction)
  189. {
  190. unsigned dst = currentInstruction[1].u.operand;
  191. unsigned value = currentInstruction[2].u.operand;
  192. emitGetVirtualRegister(value, regT0);
  193. Jump isNotCell = emitJumpIfNotJSCell(regT0);
  194. loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
  195. compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0);
  196. emitTagAsBoolImmediate(regT0);
  197. Jump done = jump();
  198. isNotCell.link(this);
  199. move(TrustedImm32(ValueFalse), regT0);
  200. done.link(this);
  201. emitPutVirtualRegister(dst);
  202. }
  203. void JIT::emit_op_call(Instruction* currentInstruction)
  204. {
  205. compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++);
  206. }
  207. void JIT::emit_op_call_eval(Instruction* currentInstruction)
  208. {
  209. compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex);
  210. }
  211. void JIT::emit_op_call_varargs(Instruction* currentInstruction)
  212. {
  213. compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++);
  214. }
  215. void JIT::emit_op_construct(Instruction* currentInstruction)
  216. {
  217. compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++);
  218. }
  219. void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
  220. {
  221. int activation = currentInstruction[1].u.operand;
  222. Jump activationNotCreated = branchTest64(Zero, addressFor(activation));
  223. JITStubCall stubCall(this, cti_op_tear_off_activation);
  224. stubCall.addArgument(activation, regT2);
  225. stubCall.call();
  226. activationNotCreated.link(this);
  227. }
  228. void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction)
  229. {
  230. int arguments = currentInstruction[1].u.operand;
  231. int activation = currentInstruction[2].u.operand;
  232. Jump argsNotCreated = branchTest64(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(arguments))));
  233. JITStubCall stubCall(this, cti_op_tear_off_arguments);
  234. stubCall.addArgument(unmodifiedArgumentsRegister(arguments), regT2);
  235. stubCall.addArgument(activation, regT2);
  236. stubCall.call();
  237. argsNotCreated.link(this);
  238. }
  239. void JIT::emit_op_ret(Instruction* currentInstruction)
  240. {
  241. ASSERT(callFrameRegister != regT1);
  242. ASSERT(regT1 != returnValueRegister);
  243. ASSERT(returnValueRegister != callFrameRegister);
  244. // Return the result in %eax.
  245. emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
  246. // Grab the return address.
  247. emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
  248. // Restore our caller's "r".
  249. emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
  250. // Return.
  251. restoreReturnAddressBeforeReturn(regT1);
  252. ret();
  253. }
  254. void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction)
  255. {
  256. ASSERT(callFrameRegister != regT1);
  257. ASSERT(regT1 != returnValueRegister);
  258. ASSERT(returnValueRegister != callFrameRegister);
  259. // Return the result in %eax.
  260. emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueRegister);
  261. Jump notJSCell = emitJumpIfNotJSCell(returnValueRegister);
  262. loadPtr(Address(returnValueRegister, JSCell::structureOffset()), regT2);
  263. Jump notObject = emitJumpIfNotObject(regT2);
  264. // Grab the return address.
  265. emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
  266. // Restore our caller's "r".
  267. emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
  268. // Return.
  269. restoreReturnAddressBeforeReturn(regT1);
  270. ret();
  271. // Return 'this' in %eax.
  272. notJSCell.link(this);
  273. notObject.link(this);
  274. emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueRegister);
  275. // Grab the return address.
  276. emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT1);
  277. // Restore our caller's "r".
  278. emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister);
  279. // Return.
  280. restoreReturnAddressBeforeReturn(regT1);
  281. ret();
  282. }
  283. void JIT::emit_op_to_primitive(Instruction* currentInstruction)
  284. {
  285. int dst = currentInstruction[1].u.operand;
  286. int src = currentInstruction[2].u.operand;
  287. emitGetVirtualRegister(src, regT0);
  288. Jump isImm = emitJumpIfNotJSCell(regT0);
  289. addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
  290. isImm.link(this);
  291. if (dst != src)
  292. emitPutVirtualRegister(dst);
  293. }
  294. void JIT::emit_op_strcat(Instruction* currentInstruction)
  295. {
  296. JITStubCall stubCall(this, cti_op_strcat);
  297. stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
  298. stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
  299. stubCall.call(currentInstruction[1].u.operand);
  300. }
  301. void JIT::emit_op_not(Instruction* currentInstruction)
  302. {
  303. emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
  304. // Invert against JSValue(false); if the value was tagged as a boolean, then all bits will be
  305. // clear other than the low bit (which will be 0 or 1 for false or true inputs respectively).
  306. // Then invert against JSValue(true), which will add the tag back in, and flip the low bit.
  307. xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
  308. addSlowCase(branchTestPtr(NonZero, regT0, TrustedImm32(static_cast<int32_t>(~1))));
  309. xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), regT0);
  310. emitPutVirtualRegister(currentInstruction[1].u.operand);
  311. }
  312. void JIT::emit_op_jfalse(Instruction* currentInstruction)
  313. {
  314. unsigned target = currentInstruction[2].u.operand;
  315. emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
  316. addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target);
  317. Jump isNonZero = emitJumpIfImmediateInteger(regT0);
  318. addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target);
  319. addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))));
  320. isNonZero.link(this);
  321. }
  322. void JIT::emit_op_jeq_null(Instruction* currentInstruction)
  323. {
  324. unsigned src = currentInstruction[1].u.operand;
  325. unsigned target = currentInstruction[2].u.operand;
  326. emitGetVirtualRegister(src, regT0);
  327. Jump isImmediate = emitJumpIfNotJSCell(regT0);
  328. // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
  329. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  330. Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
  331. move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
  332. addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target);
  333. Jump masqueradesGlobalObjectIsForeign = jump();
  334. // Now handle the immediate cases - undefined & null
  335. isImmediate.link(this);
  336. and64(TrustedImm32(~TagBitUndefined), regT0);
  337. addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
  338. isNotMasqueradesAsUndefined.link(this);
  339. masqueradesGlobalObjectIsForeign.link(this);
  340. };
  341. void JIT::emit_op_jneq_null(Instruction* currentInstruction)
  342. {
  343. unsigned src = currentInstruction[1].u.operand;
  344. unsigned target = currentInstruction[2].u.operand;
  345. emitGetVirtualRegister(src, regT0);
  346. Jump isImmediate = emitJumpIfNotJSCell(regT0);
  347. // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
  348. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  349. addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target);
  350. move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
  351. addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target);
  352. Jump wasNotImmediate = jump();
  353. // Now handle the immediate cases - undefined & null
  354. isImmediate.link(this);
  355. and64(TrustedImm32(~TagBitUndefined), regT0);
  356. addJump(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsNull()))), target);
  357. wasNotImmediate.link(this);
  358. }
  359. void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
  360. {
  361. unsigned src = currentInstruction[1].u.operand;
  362. Special::Pointer ptr = currentInstruction[2].u.specialPointer;
  363. unsigned target = currentInstruction[3].u.operand;
  364. emitGetVirtualRegister(src, regT0);
  365. addJump(branchPtr(NotEqual, regT0, TrustedImmPtr(actualPointerFor(m_codeBlock, ptr))), target);
  366. }
  367. void JIT::emit_op_eq(Instruction* currentInstruction)
  368. {
  369. emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
  370. emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
  371. compare32(Equal, regT1, regT0, regT0);
  372. emitTagAsBoolImmediate(regT0);
  373. emitPutVirtualRegister(currentInstruction[1].u.operand);
  374. }
  375. void JIT::emit_op_jtrue(Instruction* currentInstruction)
  376. {
  377. unsigned target = currentInstruction[2].u.operand;
  378. emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
  379. Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0))));
  380. addJump(emitJumpIfImmediateInteger(regT0), target);
  381. addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target);
  382. addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))));
  383. isZero.link(this);
  384. }
  385. void JIT::emit_op_neq(Instruction* currentInstruction)
  386. {
  387. emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
  388. emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
  389. compare32(NotEqual, regT1, regT0, regT0);
  390. emitTagAsBoolImmediate(regT0);
  391. emitPutVirtualRegister(currentInstruction[1].u.operand);
  392. }
  393. void JIT::emit_op_bitxor(Instruction* currentInstruction)
  394. {
  395. emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
  396. emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
  397. xor64(regT1, regT0);
  398. emitFastArithReTagImmediate(regT0, regT0);
  399. emitPutVirtualRegister(currentInstruction[1].u.operand);
  400. }
  401. void JIT::emit_op_bitor(Instruction* currentInstruction)
  402. {
  403. emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
  404. emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2);
  405. or64(regT1, regT0);
  406. emitPutVirtualRegister(currentInstruction[1].u.operand);
  407. }
  408. void JIT::emit_op_throw(Instruction* currentInstruction)
  409. {
  410. JITStubCall stubCall(this, cti_op_throw);
  411. stubCall.addArgument(currentInstruction[1].u.operand, regT2);
  412. stubCall.call();
  413. ASSERT(regT0 == returnValueRegister);
  414. #ifndef NDEBUG
  415. // cti_op_throw always changes it's return address,
  416. // this point in the code should never be reached.
  417. breakpoint();
  418. #endif
  419. }
  420. void JIT::emit_op_get_pnames(Instruction* currentInstruction)
  421. {
  422. int dst = currentInstruction[1].u.operand;
  423. int base = currentInstruction[2].u.operand;
  424. int i = currentInstruction[3].u.operand;
  425. int size = currentInstruction[4].u.operand;
  426. int breakTarget = currentInstruction[5].u.operand;
  427. JumpList isNotObject;
  428. emitGetVirtualRegister(base, regT0);
  429. if (!m_codeBlock->isKnownNotImmediate(base))
  430. isNotObject.append(emitJumpIfNotJSCell(regT0));
  431. if (base != m_codeBlock->thisRegister() || m_codeBlock->isStrictMode()) {
  432. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  433. isNotObject.append(emitJumpIfNotObject(regT2));
  434. }
  435. // We could inline the case where you have a valid cache, but
  436. // this call doesn't seem to be hot.
  437. Label isObject(this);
  438. JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
  439. getPnamesStubCall.addArgument(regT0);
  440. getPnamesStubCall.call(dst);
  441. load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
  442. store64(tagTypeNumberRegister, addressFor(i));
  443. store32(TrustedImm32(Int32Tag), intTagFor(size));
  444. store32(regT3, intPayloadFor(size));
  445. Jump end = jump();
  446. isNotObject.link(this);
  447. move(regT0, regT1);
  448. and32(TrustedImm32(~TagBitUndefined), regT1);
  449. addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget);
  450. JITStubCall toObjectStubCall(this, cti_to_object);
  451. toObjectStubCall.addArgument(regT0);
  452. toObjectStubCall.call(base);
  453. jump().linkTo(isObject, this);
  454. end.link(this);
  455. }
  456. void JIT::emit_op_next_pname(Instruction* currentInstruction)
  457. {
  458. int dst = currentInstruction[1].u.operand;
  459. int base = currentInstruction[2].u.operand;
  460. int i = currentInstruction[3].u.operand;
  461. int size = currentInstruction[4].u.operand;
  462. int it = currentInstruction[5].u.operand;
  463. int target = currentInstruction[6].u.operand;
  464. JumpList callHasProperty;
  465. Label begin(this);
  466. load32(intPayloadFor(i), regT0);
  467. Jump end = branch32(Equal, regT0, intPayloadFor(size));
  468. // Grab key @ i
  469. loadPtr(addressFor(it), regT1);
  470. loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
  471. load64(BaseIndex(regT2, regT0, TimesEight), regT2);
  472. emitPutVirtualRegister(dst, regT2);
  473. // Increment i
  474. add32(TrustedImm32(1), regT0);
  475. store32(regT0, intPayloadFor(i));
  476. // Verify that i is valid:
  477. emitGetVirtualRegister(base, regT0);
  478. // Test base's structure
  479. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  480. callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
  481. // Test base's prototype chain
  482. loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
  483. loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
  484. addJump(branchTestPtr(Zero, Address(regT3)), target);
  485. Label checkPrototype(this);
  486. load64(Address(regT2, Structure::prototypeOffset()), regT2);
  487. callHasProperty.append(emitJumpIfNotJSCell(regT2));
  488. loadPtr(Address(regT2, JSCell::structureOffset()), regT2);
  489. callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
  490. addPtr(TrustedImm32(sizeof(Structure*)), regT3);
  491. branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
  492. // Continue loop.
  493. addJump(jump(), target);
  494. // Slow case: Ask the object if i is valid.
  495. callHasProperty.link(this);
  496. emitGetVirtualRegister(dst, regT1);
  497. JITStubCall stubCall(this, cti_has_property);
  498. stubCall.addArgument(regT0);
  499. stubCall.addArgument(regT1);
  500. stubCall.call();
  501. // Test for valid key.
  502. addJump(branchTest32(NonZero, regT0), target);
  503. jump().linkTo(begin, this);
  504. // End of loop.
  505. end.link(this);
  506. }
  507. void JIT::emit_op_push_with_scope(Instruction* currentInstruction)
  508. {
  509. JITStubCall stubCall(this, cti_op_push_with_scope);
  510. stubCall.addArgument(currentInstruction[1].u.operand, regT2);
  511. stubCall.call();
  512. }
  513. void JIT::emit_op_pop_scope(Instruction*)
  514. {
  515. JITStubCall(this, cti_op_pop_scope).call();
  516. }
  517. void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
  518. {
  519. unsigned dst = currentInstruction[1].u.operand;
  520. unsigned src1 = currentInstruction[2].u.operand;
  521. unsigned src2 = currentInstruction[3].u.operand;
  522. emitGetVirtualRegisters(src1, regT0, src2, regT1);
  523. // Jump slow if both are cells (to cover strings).
  524. move(regT0, regT2);
  525. or64(regT1, regT2);
  526. addSlowCase(emitJumpIfJSCell(regT2));
  527. // Jump slow if either is a double. First test if it's an integer, which is fine, and then test
  528. // if it's a double.
  529. Jump leftOK = emitJumpIfImmediateInteger(regT0);
  530. addSlowCase(emitJumpIfImmediateNumber(regT0));
  531. leftOK.link(this);
  532. Jump rightOK = emitJumpIfImmediateInteger(regT1);
  533. addSlowCase(emitJumpIfImmediateNumber(regT1));
  534. rightOK.link(this);
  535. if (type == OpStrictEq)
  536. compare64(Equal, regT1, regT0, regT0);
  537. else
  538. compare64(NotEqual, regT1, regT0, regT0);
  539. emitTagAsBoolImmediate(regT0);
  540. emitPutVirtualRegister(dst);
  541. }
  542. void JIT::emit_op_stricteq(Instruction* currentInstruction)
  543. {
  544. compileOpStrictEq(currentInstruction, OpStrictEq);
  545. }
  546. void JIT::emit_op_nstricteq(Instruction* currentInstruction)
  547. {
  548. compileOpStrictEq(currentInstruction, OpNStrictEq);
  549. }
  550. void JIT::emit_op_to_number(Instruction* currentInstruction)
  551. {
  552. int srcVReg = currentInstruction[2].u.operand;
  553. emitGetVirtualRegister(srcVReg, regT0);
  554. addSlowCase(emitJumpIfNotImmediateNumber(regT0));
  555. emitPutVirtualRegister(currentInstruction[1].u.operand);
  556. }
  557. void JIT::emit_op_push_name_scope(Instruction* currentInstruction)
  558. {
  559. JITStubCall stubCall(this, cti_op_push_name_scope);
  560. stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[1].u.operand)));
  561. stubCall.addArgument(currentInstruction[2].u.operand, regT2);
  562. stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
  563. stubCall.call();
  564. }
  565. void JIT::emit_op_catch(Instruction* currentInstruction)
  566. {
  567. killLastResultRegister(); // FIXME: Implicitly treat op_catch as a labeled statement, and remove this line of code.
  568. move(regT0, callFrameRegister);
  569. peek(regT3, OBJECT_OFFSETOF(struct JITStackFrame, vm) / sizeof(void*));
  570. load64(Address(regT3, OBJECT_OFFSETOF(VM, exception)), regT0);
  571. store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, OBJECT_OFFSETOF(VM, exception)));
  572. emitPutVirtualRegister(currentInstruction[1].u.operand);
  573. }
  574. void JIT::emit_op_switch_imm(Instruction* currentInstruction)
  575. {
  576. unsigned tableIndex = currentInstruction[1].u.operand;
  577. unsigned defaultOffset = currentInstruction[2].u.operand;
  578. unsigned scrutinee = currentInstruction[3].u.operand;
  579. // create jump table for switch destinations, track this switch statement.
  580. SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
  581. m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate));
  582. jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
  583. JITStubCall stubCall(this, cti_op_switch_imm);
  584. stubCall.addArgument(scrutinee, regT2);
  585. stubCall.addArgument(TrustedImm32(tableIndex));
  586. stubCall.call();
  587. jump(regT0);
  588. }
  589. void JIT::emit_op_switch_char(Instruction* currentInstruction)
  590. {
  591. unsigned tableIndex = currentInstruction[1].u.operand;
  592. unsigned defaultOffset = currentInstruction[2].u.operand;
  593. unsigned scrutinee = currentInstruction[3].u.operand;
  594. // create jump table for switch destinations, track this switch statement.
  595. SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
  596. m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character));
  597. jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
  598. JITStubCall stubCall(this, cti_op_switch_char);
  599. stubCall.addArgument(scrutinee, regT2);
  600. stubCall.addArgument(TrustedImm32(tableIndex));
  601. stubCall.call();
  602. jump(regT0);
  603. }
  604. void JIT::emit_op_switch_string(Instruction* currentInstruction)
  605. {
  606. unsigned tableIndex = currentInstruction[1].u.operand;
  607. unsigned defaultOffset = currentInstruction[2].u.operand;
  608. unsigned scrutinee = currentInstruction[3].u.operand;
  609. // create jump table for switch destinations, track this switch statement.
  610. StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
  611. m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset));
  612. JITStubCall stubCall(this, cti_op_switch_string);
  613. stubCall.addArgument(scrutinee, regT2);
  614. stubCall.addArgument(TrustedImm32(tableIndex));
  615. stubCall.call();
  616. jump(regT0);
  617. }
  618. void JIT::emit_op_throw_static_error(Instruction* currentInstruction)
  619. {
  620. JITStubCall stubCall(this, cti_op_throw_static_error);
  621. if (!m_codeBlock->getConstant(currentInstruction[1].u.operand).isNumber())
  622. stubCall.addArgument(TrustedImm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
  623. else
  624. stubCall.addArgument(Imm64(JSValue::encode(m_codeBlock->getConstant(currentInstruction[1].u.operand))));
  625. stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
  626. stubCall.call();
  627. }
  628. void JIT::emit_op_debug(Instruction* currentInstruction)
  629. {
  630. #if ENABLE(DEBUG_WITH_BREAKPOINT)
  631. UNUSED_PARAM(currentInstruction);
  632. breakpoint();
  633. #else
  634. JITStubCall stubCall(this, cti_op_debug);
  635. stubCall.addArgument(TrustedImm32(currentInstruction[1].u.operand));
  636. stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
  637. stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
  638. stubCall.addArgument(TrustedImm32(currentInstruction[4].u.operand));
  639. stubCall.call();
  640. #endif
  641. }
  642. void JIT::emit_op_eq_null(Instruction* currentInstruction)
  643. {
  644. unsigned dst = currentInstruction[1].u.operand;
  645. unsigned src1 = currentInstruction[2].u.operand;
  646. emitGetVirtualRegister(src1, regT0);
  647. Jump isImmediate = emitJumpIfNotJSCell(regT0);
  648. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  649. Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
  650. move(TrustedImm32(0), regT0);
  651. Jump wasNotMasqueradesAsUndefined = jump();
  652. isMasqueradesAsUndefined.link(this);
  653. move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
  654. loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
  655. comparePtr(Equal, regT0, regT2, regT0);
  656. Jump wasNotImmediate = jump();
  657. isImmediate.link(this);
  658. and64(TrustedImm32(~TagBitUndefined), regT0);
  659. compare64(Equal, regT0, TrustedImm32(ValueNull), regT0);
  660. wasNotImmediate.link(this);
  661. wasNotMasqueradesAsUndefined.link(this);
  662. emitTagAsBoolImmediate(regT0);
  663. emitPutVirtualRegister(dst);
  664. }
  665. void JIT::emit_op_neq_null(Instruction* currentInstruction)
  666. {
  667. unsigned dst = currentInstruction[1].u.operand;
  668. unsigned src1 = currentInstruction[2].u.operand;
  669. emitGetVirtualRegister(src1, regT0);
  670. Jump isImmediate = emitJumpIfNotJSCell(regT0);
  671. loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
  672. Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined));
  673. move(TrustedImm32(1), regT0);
  674. Jump wasNotMasqueradesAsUndefined = jump();
  675. isMasqueradesAsUndefined.link(this);
  676. move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
  677. loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2);
  678. comparePtr(NotEqual, regT0, regT2, regT0);
  679. Jump wasNotImmediate = jump();
  680. isImmediate.link(this);
  681. and64(TrustedImm32(~TagBitUndefined), regT0);
  682. compare64(NotEqual, regT0, TrustedImm32(ValueNull), regT0);
  683. wasNotImmediate.link(this);
  684. wasNotMasqueradesAsUndefined.link(this);
  685. emitTagAsBoolImmediate(regT0);
  686. emitPutVirtualRegister(dst);
  687. }
  688. void JIT::emit_op_enter(Instruction*)
  689. {
  690. emitEnterOptimizationCheck();
  691. // Even though CTI doesn't use them, we initialize our constant
  692. // registers to zap stale pointers, to avoid unnecessarily prolonging
  693. // object lifetime and increasing GC pressure.
  694. size_t count = m_codeBlock->m_numVars;
  695. for (size_t j = 0; j < count; ++j)
  696. emitInitRegister(j);
  697. }
  698. void JIT::emit_op_create_activation(Instruction* currentInstruction)
  699. {
  700. unsigned dst = currentInstruction[1].u.operand;
  701. Jump activationCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
  702. JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
  703. emitPutVirtualRegister(dst);
  704. activationCreated.link(this);
  705. }
  706. void JIT::emit_op_create_arguments(Instruction* currentInstruction)
  707. {
  708. unsigned dst = currentInstruction[1].u.operand;
  709. Jump argsCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst));
  710. JITStubCall(this, cti_op_create_arguments).call();
  711. emitPutVirtualRegister(dst);
  712. emitPutVirtualRegister(unmodifiedArgumentsRegister(dst));
  713. argsCreated.link(this);
  714. }
  715. void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction)
  716. {
  717. unsigned dst = currentInstruction[1].u.operand;
  718. store64(TrustedImm64((int64_t)0), Address(callFrameRegister, sizeof(Register) * dst));
  719. }
  720. void JIT::emit_op_convert_this(Instruction* currentInstruction)
  721. {
  722. emitGetVirtualRegister(currentInstruction[1].u.operand, regT1);
  723. emitJumpSlowCaseIfNotJSCell(regT1);
  724. if (shouldEmitProfiling()) {
  725. loadPtr(Address(regT1, JSCell::structureOffset()), regT0);
  726. emitValueProfilingSite();
  727. }
  728. addSlowCase(branchPtr(Equal, Address(regT1, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get())));
  729. }
  730. void JIT::emit_op_get_callee(Instruction* currentInstruction)
  731. {
  732. unsigned result = currentInstruction[1].u.operand;
  733. emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0);
  734. emitValueProfilingSite();
  735. emitPutVirtualRegister(result);
  736. }
  737. void JIT::emit_op_create_this(Instruction* currentInstruction)
  738. {
  739. int callee = currentInstruction[2].u.operand;
  740. RegisterID calleeReg = regT0;
  741. RegisterID resultReg = regT0;
  742. RegisterID allocatorReg = regT1;
  743. RegisterID structureReg = regT2;
  744. RegisterID scratchReg = regT3;
  745. emitGetVirtualRegister(callee, calleeReg);
  746. loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg);
  747. loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg);
  748. addSlowCase(branchTestPtr(Zero, allocatorReg));
  749. emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg);
  750. emitPutVirtualRegister(currentInstruction[1].u.operand);
  751. }
  752. void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  753. {
  754. linkSlowCase(iter); // doesn't have an allocation profile
  755. linkSlowCase(iter); // allocation failed
  756. JITStubCall stubCall(this, cti_op_create_this);
  757. stubCall.addArgument(TrustedImm32(currentInstruction[3].u.operand));
  758. stubCall.call(currentInstruction[1].u.operand);
  759. }
  760. void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
  761. {
  762. JITStubCall stubCall(this, cti_op_profile_will_call);
  763. stubCall.addArgument(currentInstruction[1].u.operand, regT1);
  764. stubCall.call();
  765. }
  766. void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
  767. {
  768. JITStubCall stubCall(this, cti_op_profile_did_call);
  769. stubCall.addArgument(currentInstruction[1].u.operand, regT1);
  770. stubCall.call();
  771. }
  772. // Slow cases
  773. void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  774. {
  775. void* globalThis = m_codeBlock->globalObject()->globalThis();
  776. linkSlowCase(iter);
  777. if (shouldEmitProfiling())
  778. move(TrustedImm64((JSValue::encode(jsUndefined()))), regT0);
  779. Jump isNotUndefined = branch64(NotEqual, regT1, TrustedImm64(JSValue::encode(jsUndefined())));
  780. emitValueProfilingSite();
  781. move(TrustedImm64(JSValue::encode(JSValue(static_cast<JSCell*>(globalThis)))), regT0);
  782. emitPutVirtualRegister(currentInstruction[1].u.operand, regT0);
  783. emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_convert_this));
  784. linkSlowCase(iter);
  785. if (shouldEmitProfiling())
  786. move(TrustedImm64(JSValue::encode(m_vm->stringStructure.get())), regT0);
  787. isNotUndefined.link(this);
  788. emitValueProfilingSite();
  789. JITStubCall stubCall(this, cti_op_convert_this);
  790. stubCall.addArgument(regT1);
  791. stubCall.call(currentInstruction[1].u.operand);
  792. }
  793. void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  794. {
  795. linkSlowCase(iter);
  796. JITStubCall stubCall(this, cti_op_to_primitive);
  797. stubCall.addArgument(regT0);
  798. stubCall.call(currentInstruction[1].u.operand);
  799. }
  800. void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  801. {
  802. linkSlowCase(iter);
  803. xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0);
  804. JITStubCall stubCall(this, cti_op_not);
  805. stubCall.addArgument(regT0);
  806. stubCall.call(currentInstruction[1].u.operand);
  807. }
  808. void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  809. {
  810. linkSlowCase(iter);
  811. JITStubCall stubCall(this, cti_op_jtrue);
  812. stubCall.addArgument(regT0);
  813. stubCall.call();
  814. emitJumpSlowToHot(branchTest32(Zero, regT0), currentInstruction[2].u.operand); // inverted!
  815. }
  816. void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  817. {
  818. linkSlowCase(iter);
  819. JITStubCall stubCall(this, cti_op_jtrue);
  820. stubCall.addArgument(regT0);
  821. stubCall.call();
  822. emitJumpSlowToHot(branchTest32(NonZero, regT0), currentInstruction[2].u.operand);
  823. }
  824. void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  825. {
  826. linkSlowCase(iter);
  827. JITStubCall stubCall(this, cti_op_bitxor);
  828. stubCall.addArgument(regT0);
  829. stubCall.addArgument(regT1);
  830. stubCall.call(currentInstruction[1].u.operand);
  831. }
  832. void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  833. {
  834. linkSlowCase(iter);
  835. JITStubCall stubCall(this, cti_op_bitor);
  836. stubCall.addArgument(regT0);
  837. stubCall.addArgument(regT1);
  838. stubCall.call(currentInstruction[1].u.operand);
  839. }
  840. void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  841. {
  842. linkSlowCase(iter);
  843. JITStubCall stubCall(this, cti_op_eq);
  844. stubCall.addArgument(regT0);
  845. stubCall.addArgument(regT1);
  846. stubCall.call();
  847. emitTagAsBoolImmediate(regT0);
  848. emitPutVirtualRegister(currentInstruction[1].u.operand);
  849. }
  850. void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  851. {
  852. linkSlowCase(iter);
  853. JITStubCall stubCall(this, cti_op_eq);
  854. stubCall.addArgument(regT0);
  855. stubCall.addArgument(regT1);
  856. stubCall.call();
  857. xor32(TrustedImm32(0x1), regT0);
  858. emitTagAsBoolImmediate(regT0);
  859. emitPutVirtualRegister(currentInstruction[1].u.operand);
  860. }
  861. void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  862. {
  863. linkSlowCase(iter);
  864. linkSlowCase(iter);
  865. linkSlowCase(iter);
  866. JITStubCall stubCall(this, cti_op_stricteq);
  867. stubCall.addArgument(regT0);
  868. stubCall.addArgument(regT1);
  869. stubCall.call(currentInstruction[1].u.operand);
  870. }
  871. void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  872. {
  873. linkSlowCase(iter);
  874. linkSlowCase(iter);
  875. linkSlowCase(iter);
  876. JITStubCall stubCall(this, cti_op_nstricteq);
  877. stubCall.addArgument(regT0);
  878. stubCall.addArgument(regT1);
  879. stubCall.call(currentInstruction[1].u.operand);
  880. }
  881. void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  882. {
  883. unsigned dst = currentInstruction[1].u.operand;
  884. unsigned value = currentInstruction[2].u.operand;
  885. unsigned baseVal = currentInstruction[3].u.operand;
  886. linkSlowCaseIfNotJSCell(iter, baseVal);
  887. linkSlowCase(iter);
  888. JITStubCall stubCall(this, cti_op_check_has_instance);
  889. stubCall.addArgument(value, regT2);
  890. stubCall.addArgument(baseVal, regT2);
  891. stubCall.call(dst);
  892. emitJumpSlowToHot(jump(), currentInstruction[4].u.operand);
  893. }
  894. void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  895. {
  896. unsigned dst = currentInstruction[1].u.operand;
  897. unsigned value = currentInstruction[2].u.operand;
  898. unsigned proto = currentInstruction[3].u.operand;
  899. linkSlowCaseIfNotJSCell(iter, value);
  900. linkSlowCaseIfNotJSCell(iter, proto);
  901. linkSlowCase(iter);
  902. JITStubCall stubCall(this, cti_op_instanceof);
  903. stubCall.addArgument(value, regT2);
  904. stubCall.addArgument(proto, regT2);
  905. stubCall.call(dst);
  906. }
  907. void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  908. {
  909. compileOpCallSlowCase(op_call, currentInstruction, iter, m_callLinkInfoIndex++);
  910. }
  911. void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  912. {
  913. compileOpCallSlowCase(op_call_eval, currentInstruction, iter, m_callLinkInfoIndex);
  914. }
  915. void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  916. {
  917. compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++);
  918. }
  919. void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  920. {
  921. compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++);
  922. }
  923. void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  924. {
  925. linkSlowCase(iter);
  926. JITStubCall stubCall(this, cti_op_to_number);
  927. stubCall.addArgument(regT0);
  928. stubCall.call(currentInstruction[1].u.operand);
  929. }
  930. void JIT::emit_op_get_arguments_length(Instruction* currentInstruction)
  931. {
  932. int dst = currentInstruction[1].u.operand;
  933. int argumentsRegister = currentInstruction[2].u.operand;
  934. addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
  935. emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0);
  936. sub32(TrustedImm32(1), regT0);
  937. emitFastArithReTagImmediate(regT0, regT0);
  938. emitPutVirtualRegister(dst, regT0);
  939. }
  940. void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  941. {
  942. linkSlowCase(iter);
  943. unsigned dst = currentInstruction[1].u.operand;
  944. unsigned base = currentInstruction[2].u.operand;
  945. Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
  946. emitGetVirtualRegister(base, regT0);
  947. JITStubCall stubCall(this, cti_op_get_by_id_generic);
  948. stubCall.addArgument(regT0);
  949. stubCall.addArgument(TrustedImmPtr(ident));
  950. stubCall.call(dst);
  951. }
  952. void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction)
  953. {
  954. int dst = currentInstruction[1].u.operand;
  955. int argumentsRegister = currentInstruction[2].u.operand;
  956. int property = currentInstruction[3].u.operand;
  957. addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister)));
  958. emitGetVirtualRegister(property, regT1);
  959. addSlowCase(emitJumpIfNotImmediateInteger(regT1));
  960. add32(TrustedImm32(1), regT1);
  961. // regT1 now contains the integer index of the argument we want, including this
  962. emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT2);
  963. addSlowCase(branch32(AboveOrEqual, regT1, regT2));
  964. neg32(regT1);
  965. signExtend32ToPtr(regT1, regT1);
  966. load64(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0);
  967. emitValueProfilingSite();
  968. emitPutVirtualRegister(dst, regT0);
  969. }
  970. void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
  971. {
  972. unsigned dst = currentInstruction[1].u.operand;
  973. unsigned arguments = currentInstruction[2].u.operand;
  974. unsigned property = currentInstruction[3].u.operand;
  975. linkSlowCase(iter);
  976. Jump skipArgumentsCreation = jump();
  977. linkSlowCase(iter);
  978. linkSlowCase(iter);
  979. JITStubCall(this, cti_op_create_arguments).call();
  980. emitPutVirtualRegister(arguments);
  981. emitPutVirtualRegister(unmodifiedArgumentsRegister(arguments));
  982. skipArgumentsCreation.link(this);
  983. JITStubCall stubCall(this, cti_op_get_by_val_generic);
  984. stubCall.addArgument(arguments, regT2);
  985. stubCall.addArgument(property, regT2);
  986. stubCall.callWithValueProfiling(dst);
  987. }
  988. void JIT::emit_op_put_to_base(Instruction* currentInstruction)
  989. {
  990. int base = currentInstruction[1].u.operand;
  991. int id = currentInstruction[2].u.operand;
  992. int value = currentInstruction[3].u.operand;
  993. PutToBaseOperation* operation = currentInstruction[4].u.putToBaseOperation;
  994. switch (operation->m_kind) {
  995. case PutToBaseOperation::GlobalVariablePutChecked:
  996. addSlowCase(branchTest8(NonZero, AbsoluteAddress(operation->m_predicatePointer)));
  997. case PutToBaseOperation::GlobalVariablePut: {
  998. JSGlobalObject* globalObject = m_codeBlock->globalObject();
  999. if (operation->m_isDynamic) {
  1000. emitGetVirtualRegister(base, regT0);
  1001. addSlowCase(branchPtr(NotEqual, regT0, TrustedImmPtr(globalObject)));
  1002. }
  1003. emitGetVirtualRegister(value, regT0);
  1004. store64(regT0, operation->m_registerAddress);
  1005. if (Heap::isWriteBarrierEnabled())
  1006. emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
  1007. return;
  1008. }
  1009. case PutToBaseOperation::VariablePut: {
  1010. emitGetVirtualRegisters(base, regT0, value, regT1);
  1011. loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT2);
  1012. store64(regT1, Address(regT2, operation->m_offset * sizeof(Register)));
  1013. if (Heap::isWriteBarrierEnabled())
  1014. emitWriteBarrier(regT0, regT1, regT2, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
  1015. return;
  1016. }
  1017. case PutToBaseOperation::GlobalPropertyPut: {
  1018. emitGetVirtualRegisters(base, regT0, value, regT1);
  1019. loadPtr(&operation->m_structure, regT2);
  1020. addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), regT2));
  1021. ASSERT(!operation->m_structure || !operation->m_structure->inlineCapacity());
  1022. loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
  1023. load32(&operation->m_offsetInButterfly, regT3);
  1024. signExtend32ToPtr(regT3, regT3);
  1025. store64(regT1, BaseIndex(regT2, regT3, TimesEight));
  1026. if (Heap::isWriteBarrierEnabled())
  1027. emitWriteBarrier(regT0, regT1, regT2, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
  1028. return;
  1029. }
  1030. case PutToBaseOperation::Uninitialised:
  1031. case PutToBaseOperation::Readonly:
  1032. case PutToBaseOperation::Generic:
  1033. JITStubCall stubCall(this, cti_op_put_to_base);
  1034. stubCall.addArgument(TrustedImm32(base));
  1035. stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(id)));
  1036. stubCall.addArgument(TrustedImm32(value));
  1037. stubCall.addArgument(TrustedImmPtr(operation));
  1038. stubCall.call();
  1039. return;
  1040. }
  1041. }
  1042. #endif // USE(JSVALUE64)
  1043. void JIT::emit_op_loop_hint(Instruction*)
  1044. {
  1045. // Emit the JIT optimization check:
  1046. if (canBeOptimized())
  1047. addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()),
  1048. AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter())));
  1049. // Emit the watchdog timer check:
  1050. if (m_vm->watchdog.isEnabled())
  1051. addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog.timerDidFireAddress())));
  1052. }
  1053. void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter)
  1054. {
  1055. #if ENABLE(DFG_JIT)
  1056. // Emit the slow path for the JIT optimization check:
  1057. if (canBeOptimized()) {
  1058. linkSlowCase(iter);
  1059. JITStubCall stubCall(this, cti_optimize);
  1060. stubCall.addArgument(TrustedImm32(m_bytecodeOffset));
  1061. stubCall.call();
  1062. emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
  1063. }
  1064. #endif
  1065. // Emit the slow path of the watchdog timer check:
  1066. if (m_vm->watchdog.isEnabled()) {
  1067. linkSlowCase(iter);
  1068. JITStubCall stubCall(this, cti_handle_watchdog_timer);
  1069. stubCall.call();
  1070. emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint));
  1071. }
  1072. }
  1073. void JIT::emit_resolve_operations(ResolveOperations* resolveOperations, const int* baseVR, const int* valueVR)
  1074. {
  1075. #if USE(JSVALUE32_64)
  1076. unmap();
  1077. #else
  1078. killLastResultRegister();
  1079. #endif
  1080. if (resolveOperations->isEmpty()) {
  1081. addSlowCase(jump());
  1082. return;
  1083. }
  1084. const RegisterID value = regT0;
  1085. #if USE(JSVALUE32_64)
  1086. const RegisterID valueTag = regT1;
  1087. #endif
  1088. const RegisterID scope = regT2;
  1089. const RegisterID scratch = regT3;
  1090. JSGlobalObject* globalObject = m_codeBlock->globalObject();
  1091. ResolveOperation* pc = resolveOperations->data();
  1092. emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, scope);
  1093. bool setBase = false;
  1094. bool resolvingBase = true;
  1095. while (resolvingBase) {
  1096. switch (pc->m_operation) {
  1097. case ResolveOperation::ReturnGlobalObjectAsBase:
  1098. move(TrustedImmPtr(globalObject), value);
  1099. #if USE(JSVALUE32_64)
  1100. move(TrustedImm32(JSValue::CellTag), valueTag);
  1101. #endif
  1102. emitValueProfilingSite();
  1103. emitStoreCell(*baseVR, value);
  1104. return;
  1105. case ResolveOperation::SetBaseToGlobal:
  1106. RELEASE_ASSERT(baseVR);
  1107. setBase