PageRenderTime 29ms CodeModel.GetById 12ms RepoModel.GetById 1ms app.codeStats 0ms

/hphp/runtime/vm/jit/irgen-call.cpp

https://gitlab.com/Blueprint-Marketing/hhvm
C++ | 1028 lines | 780 code | 122 blank | 126 comment | 126 complexity | 6a082a024b37bf0a45a104fcd518006d MD5 | raw file
  1. /*
  2. +----------------------------------------------------------------------+
  3. | HipHop for PHP |
  4. +----------------------------------------------------------------------+
  5. | Copyright (c) 2010-2014 Facebook, Inc. (http://www.facebook.com) |
  6. +----------------------------------------------------------------------+
  7. | This source file is subject to version 3.01 of the PHP license, |
  8. | that is bundled with this package in the file LICENSE, and is |
  9. | available through the world-wide-web at the following url: |
  10. | http://www.php.net/license/3_01.txt |
  11. | If you did not receive a copy of the PHP license and are unable to |
  12. | obtain it through the world-wide-web, please send a note to |
  13. | license@php.net so we can mail you a copy immediately. |
  14. +----------------------------------------------------------------------+
  15. */
  16. #include "hphp/runtime/vm/jit/irgen-call.h"
  17. #include "hphp/runtime/vm/jit/mc-generator.h"
  18. #include "hphp/runtime/vm/jit/normalized-instruction.h"
  19. #include "hphp/runtime/vm/jit/target-profile.h"
  20. #include "hphp/runtime/vm/jit/type-constraint.h"
  21. #include "hphp/runtime/vm/jit/type.h"
  22. #include "hphp/runtime/vm/jit/irgen-exit.h"
  23. #include "hphp/runtime/vm/jit/irgen-create.h"
  24. #include "hphp/runtime/vm/jit/irgen-internal.h"
  25. namespace HPHP { namespace jit { namespace irgen {
  26. namespace {
  27. //////////////////////////////////////////////////////////////////////
  28. const StaticString s_self("self");
  29. const StaticString s_parent("parent");
  30. const StaticString s_static("static");
  31. //////////////////////////////////////////////////////////////////////
  32. const Func* findCuf(Op op,
  33. SSATmp* callable,
  34. const Class* ctx,
  35. const Class*& cls,
  36. StringData*& invName,
  37. bool& forward) {
  38. cls = nullptr;
  39. invName = nullptr;
  40. const StringData* str =
  41. callable->hasConstVal(TStr) ? callable->strVal() : nullptr;
  42. const ArrayData* arr =
  43. callable->hasConstVal(TArr) ? callable->arrVal() : nullptr;
  44. StringData* sclass = nullptr;
  45. StringData* sname = nullptr;
  46. if (str) {
  47. Func* f = Unit::lookupFunc(str);
  48. if (f) return f;
  49. String name(const_cast<StringData*>(str));
  50. int pos = name.find("::");
  51. if (pos <= 0 || pos + 2 >= name.size() ||
  52. name.find("::", pos + 2) != String::npos) {
  53. return nullptr;
  54. }
  55. sclass = makeStaticString(name.substr(0, pos).get());
  56. sname = makeStaticString(name.substr(pos + 2).get());
  57. } else if (arr) {
  58. if (arr->size() != 2) return nullptr;
  59. const Variant& e0 = arr->get(int64_t(0), false);
  60. const Variant& e1 = arr->get(int64_t(1), false);
  61. if (!e0.isString() || !e1.isString()) return nullptr;
  62. sclass = e0.getStringData();
  63. sname = e1.getStringData();
  64. String name(sname);
  65. if (name.find("::") != String::npos) return nullptr;
  66. } else {
  67. return nullptr;
  68. }
  69. if (sclass->isame(s_self.get())) {
  70. if (!ctx) return nullptr;
  71. cls = ctx;
  72. forward = true;
  73. } else if (sclass->isame(s_parent.get())) {
  74. if (!ctx || !ctx->parent()) return nullptr;
  75. cls = ctx->parent();
  76. forward = true;
  77. } else if (sclass->isame(s_static.get())) {
  78. return nullptr;
  79. } else {
  80. cls = Unit::lookupClassOrUniqueClass(sclass);
  81. if (!cls) return nullptr;
  82. }
  83. bool magicCall = false;
  84. const Func* f = lookupImmutableMethod(cls, sname, magicCall,
  85. /* staticLookup = */ true, ctx);
  86. if (!f || (forward && !ctx->classof(f->cls()))) {
  87. /*
  88. * To preserve the invariant that the lsb class
  89. * is an instance of the context class, we require
  90. * that f's class is an instance of the context class.
  91. * This is conservative, but without it, we would need
  92. * a runtime check to decide whether or not to forward
  93. * the lsb class
  94. */
  95. return nullptr;
  96. }
  97. if (magicCall) invName = sname;
  98. return f;
  99. }
  100. bool canInstantiateClass(const Class* cls) {
  101. return cls && isNormalClass(cls) && !isAbstract(cls);
  102. }
  103. //////////////////////////////////////////////////////////////////////
  104. // Pushing for object method when we don't know the Func* statically.
  105. void fpushObjMethodUnknown(IRGS& env,
  106. SSATmp* obj,
  107. const StringData* methodName,
  108. int32_t numParams,
  109. bool shouldFatal) {
  110. emitIncStat(env, Stats::ObjMethod_cached, 1);
  111. spillStack(env);
  112. fpushActRec(env,
  113. cns(env, TNullptr), // Will be set by LdObjMethod
  114. obj,
  115. numParams,
  116. nullptr,
  117. false);
  118. spillStack(env);
  119. auto const objCls = gen(env, LdObjClass, obj);
  120. // This is special. We need to move the stackpointer in case LdObjMethod
  121. // calls a destructor. Otherwise it would clobber the ActRec we just pushed.
  122. updateMarker(env);
  123. env.irb->exceptionStackBoundary();
  124. gen(env,
  125. LdObjMethod,
  126. LdObjMethodData {
  127. offsetFromIRSP(env, BCSPOffset{0}), methodName, shouldFatal
  128. },
  129. objCls,
  130. sp(env));
  131. }
  132. /*
  133. * Returns true iff a method named methodName appears in iface or any of its
  134. * implemented (parent) interfaces. vtableSlot and func will be initialized to
  135. * the appropriate vtable slot and interface Func when true is returned;
  136. * otherwise their contents are undefined.
  137. */
  138. bool findInterfaceVtableSlot(IRGS& env,
  139. const Class* iface,
  140. const StringData* methodName,
  141. Slot& vtableSlot,
  142. const Func*& func) {
  143. vtableSlot = iface->preClass()->ifaceVtableSlot();
  144. if (vtableSlot != kInvalidSlot) {
  145. auto res = g_context->lookupObjMethod(func, iface, methodName,
  146. curClass(env), false);
  147. if (res == LookupResult::MethodFoundWithThis ||
  148. res == LookupResult::MethodFoundNoThis) {
  149. return true;
  150. }
  151. }
  152. for (auto pface : iface->allInterfaces().range()) {
  153. if (findInterfaceVtableSlot(env, pface, methodName, vtableSlot, func)) {
  154. return true;
  155. }
  156. }
  157. return false;
  158. }
  159. void fpushObjMethodWithBaseClass(IRGS& env,
  160. SSATmp* obj,
  161. const Class* baseClass,
  162. const StringData* methodName,
  163. int32_t numParams,
  164. bool shouldFatal,
  165. bool isMonomorphic) {
  166. SSATmp* objOrCls = obj;
  167. bool magicCall = false;
  168. const Func* func = lookupImmutableMethod(baseClass,
  169. methodName,
  170. magicCall,
  171. /* staticLookup: */
  172. false,
  173. curClass(env));
  174. if (!func) {
  175. if (baseClass && !(baseClass->attrs() & AttrInterface)) {
  176. auto const res =
  177. g_context->lookupObjMethod(func, baseClass, methodName, curClass(env),
  178. false);
  179. if (res == LookupResult::MethodFoundWithThis ||
  180. res == LookupResult::MethodFoundNoThis) {
  181. /*
  182. * If we found the func in baseClass, then either:
  183. * a) its private, and this is always going to be the
  184. * called function. This case is handled further down.
  185. * OR
  186. * b) any derived class must have a func that matches in staticness
  187. * and is at least as accessible (and in particular, you can't
  188. * override a public/protected method with a private method). In
  189. * this case, we emit code to dynamically lookup the method given
  190. * the Object and the method slot, which is the same as func's.
  191. */
  192. if (!(func->attrs() & AttrPrivate)) {
  193. emitIncStat(env, Stats::ObjMethod_methodslot, 1);
  194. auto const clsTmp = gen(env, LdObjClass, obj);
  195. auto const funcTmp = gen(
  196. env,
  197. LdClsMethod,
  198. clsTmp,
  199. cns(env, -(func->methodSlot() + 1))
  200. );
  201. if (res == LookupResult::MethodFoundNoThis) {
  202. gen(env, DecRef, obj);
  203. objOrCls = clsTmp;
  204. }
  205. fpushActRec(env,
  206. funcTmp,
  207. objOrCls,
  208. numParams,
  209. magicCall ? methodName : nullptr,
  210. false);
  211. return;
  212. }
  213. } else {
  214. // method lookup did not find anything
  215. func = nullptr; // force lookup
  216. }
  217. }
  218. }
  219. if (func != nullptr) {
  220. /*
  221. * static function: store base class into this slot instead of obj
  222. * and decref the obj that was pushed as the this pointer since
  223. * the obj won't be in the actrec and thus MethodCache::lookup won't
  224. * decref it
  225. *
  226. * static closure body: we still need to pass the object instance
  227. * for the closure prologue to properly do its dispatch (and
  228. * extract use vars). It will decref it and put the class on the
  229. * actrec before entering the "real" cloned closure body.
  230. */
  231. emitIncStat(env, Stats::ObjMethod_known, 1);
  232. if (func->isStatic() && !func->isClosureBody()) {
  233. assertx(baseClass);
  234. gen(env, DecRef, obj);
  235. objOrCls = cns(env, baseClass);
  236. }
  237. fpushActRec(env,
  238. cns(env, func),
  239. objOrCls,
  240. numParams,
  241. magicCall ? methodName : nullptr,
  242. false);
  243. return;
  244. }
  245. if (!isMonomorphic && classIsUniqueInterface(baseClass)) {
  246. Slot vtableSlot;
  247. const Func* ifaceFunc;
  248. if (findInterfaceVtableSlot(env, baseClass, methodName,
  249. vtableSlot, ifaceFunc)) {
  250. emitIncStat(env, Stats::ObjMethod_ifaceslot, 1);
  251. auto cls = gen(env, LdObjClass, obj);
  252. auto func = gen(env, LdIfaceMethod,
  253. IfaceMethodData{vtableSlot, ifaceFunc->methodSlot()},
  254. cls);
  255. if (ifaceFunc->attrs() & AttrStatic) {
  256. gen(env, DecRef, obj);
  257. objOrCls = cls;
  258. }
  259. fpushActRec(env, func, objOrCls, numParams, nullptr, false);
  260. return;
  261. }
  262. }
  263. fpushObjMethodUnknown(env, obj, methodName, numParams, shouldFatal);
  264. }
  265. static const StringData* classProfileKey = makeStaticString(
  266. "ClassProfile-FPushObjMethod"
  267. );
  268. void fpushObjMethod(IRGS& env,
  269. SSATmp* obj,
  270. const StringData* methodName,
  271. int32_t numParams,
  272. bool shouldFatal,
  273. Block* sideExit) {
  274. emitIncStat(env, Stats::ObjMethod_total, 1);
  275. if (auto cls = obj->type().clsSpec().cls()) {
  276. if (!env.irb->constrainValue(obj, TypeConstraint(cls).setWeak())) {
  277. // If we know the class without having to specialize a guard any further,
  278. // use it.
  279. fpushObjMethodWithBaseClass(env, obj, cls, methodName,
  280. numParams, shouldFatal, false);
  281. return;
  282. }
  283. }
  284. TargetProfile<ClassProfile> profile(env.context, env.irb->curMarker(),
  285. classProfileKey);
  286. if (profile.profiling()) {
  287. gen(env, ProfileObjClass, RDSHandleData { profile.handle() }, obj);
  288. }
  289. const bool shouldTryToOptimize = !env.transFlags.noProfiledFPush
  290. || !env.firstBcInst;
  291. auto isMonomorphic = false;
  292. if (profile.optimizing() && shouldTryToOptimize) {
  293. ClassProfile data = profile.data(ClassProfile::reduce);
  294. if (data.isMonomorphic()) {
  295. isMonomorphic = true;
  296. auto baseClass = data.getClass(0);
  297. if (baseClass->attrs() & AttrNoOverride) {
  298. auto refinedObj = gen(env, CheckType, Type::ExactObj(baseClass),
  299. sideExit, obj);
  300. env.irb->constrainValue(refinedObj, TypeConstraint(baseClass));
  301. fpushObjMethodWithBaseClass(env, refinedObj, baseClass, methodName,
  302. numParams, shouldFatal, true);
  303. return;
  304. }
  305. }
  306. }
  307. fpushObjMethodWithBaseClass(env, obj, nullptr, methodName, numParams,
  308. shouldFatal, isMonomorphic);
  309. }
  310. void fpushFuncObj(IRGS& env, int32_t numParams) {
  311. auto const slowExit = makeExitSlow(env);
  312. auto const obj = popC(env);
  313. auto const cls = gen(env, LdObjClass, obj);
  314. auto const func = gen(env, LdObjInvoke, slowExit, cls);
  315. fpushActRec(env, func, obj, numParams, nullptr, false);
  316. }
  317. void fpushFuncArr(IRGS& env, int32_t numParams) {
  318. auto const thisAR = fp(env);
  319. auto const arr = popC(env);
  320. fpushActRec(
  321. env,
  322. cns(env, TNullptr),
  323. cns(env, TNullptr),
  324. numParams,
  325. nullptr,
  326. false
  327. );
  328. spillStack(env);
  329. // This is special. We need to move the stackpointer incase LdArrFuncCtx
  330. // calls a destructor. Otherwise it would clobber the ActRec we just
  331. // pushed.
  332. updateMarker(env);
  333. env.irb->exceptionStackBoundary();
  334. gen(env, LdArrFuncCtx, IRSPOffsetData { offsetFromIRSP(env, BCSPOffset{0}) },
  335. arr, sp(env), thisAR);
  336. gen(env, DecRef, arr);
  337. }
  338. // FPushCuf when the callee is not known at compile time.
  339. void fpushCufUnknown(IRGS& env, Op op, int32_t numParams) {
  340. if (op != Op::FPushCuf) {
  341. PUNT(fpushCufUnknown-nonFPushCuf);
  342. }
  343. if (topC(env)->isA(TObj)) return fpushFuncObj(env, numParams);
  344. if (!topC(env)->type().subtypeOfAny(TArr, TStr)) {
  345. PUNT(fpushCufUnknown);
  346. }
  347. auto const callable = popC(env);
  348. fpushActRec(
  349. env,
  350. cns(env, TNullptr),
  351. cns(env, TNullptr),
  352. numParams,
  353. nullptr,
  354. false
  355. );
  356. spillStack(env);
  357. /*
  358. * This is a similar case to lookup for functions in FPushFunc or
  359. * FPushObjMethod. We can throw in a weird situation where the
  360. * ActRec is already on the stack, but this bytecode isn't done
  361. * executing yet. See arPreliveOverwriteCells for details about why
  362. * we need this marker.
  363. */
  364. updateMarker(env);
  365. env.irb->exceptionStackBoundary();
  366. auto const opcode = callable->isA(TArr) ? LdArrFPushCuf
  367. : LdStrFPushCuf;
  368. gen(env, opcode, IRSPOffsetData { offsetFromIRSP(env, BCSPOffset{0}) },
  369. callable, sp(env), fp(env));
  370. gen(env, DecRef, callable);
  371. }
  372. SSATmp* clsMethodCtx(IRGS& env, const Func* callee, const Class* cls) {
  373. bool mustBeStatic = true;
  374. if (!(callee->attrs() & AttrStatic) &&
  375. !(curFunc(env)->attrs() & AttrStatic) &&
  376. curClass(env)) {
  377. if (curClass(env)->classof(cls)) {
  378. // In this case, it might not be static, but we can be sure
  379. // we're going to forward $this if thisAvailable.
  380. mustBeStatic = false;
  381. } else if (cls->classof(curClass(env))) {
  382. // Unlike the above, we might be calling down to a subclass that
  383. // is not related to the current instance. To know whether this
  384. // call forwards $this requires a runtime type check, so we have
  385. // to punt instead of trying the thisAvailable path below.
  386. PUNT(getClsMethodCtx-PossibleStaticRelatedCall);
  387. }
  388. }
  389. if (mustBeStatic) {
  390. return ldCls(env, cns(env, cls->name()));
  391. }
  392. if (env.irb->thisAvailable()) {
  393. // might not be a static call and $this is available, so we know it's
  394. // definitely not static
  395. assertx(curClass(env));
  396. auto this_ = ldThis(env);
  397. gen(env, IncRef, this_);
  398. return this_;
  399. }
  400. // might be a non-static call. we have to inspect the func at runtime
  401. PUNT(getClsMethodCtx-MightNotBeStatic);
  402. }
  403. void implFPushCufOp(IRGS& env, Op op, int32_t numArgs) {
  404. const bool safe = op == OpFPushCufSafe;
  405. bool forward = op == OpFPushCufF;
  406. SSATmp* callable = topC(env, BCSPOffset{safe ? 1 : 0});
  407. const Class* cls = nullptr;
  408. StringData* invName = nullptr;
  409. auto const callee = findCuf(op, callable, curClass(env), cls, invName,
  410. forward);
  411. if (!callee) return fpushCufUnknown(env, op, numArgs);
  412. SSATmp* ctx;
  413. auto const safeFlag = cns(env, true); // This is always true until the slow exits
  414. // below are implemented
  415. auto func = cns(env, callee);
  416. if (cls) {
  417. auto const exitSlow = makeExitSlow(env);
  418. if (!rds::isPersistentHandle(cls->classHandle())) {
  419. // The miss path is complicated and rare. Punt for now. This must be
  420. // checked before we IncRef the context below, because the slow exit will
  421. // want to do that same IncRef via InterpOne.
  422. auto const clsOrNull = gen(env, LdClsCachedSafe, cns(env, cls->name()));
  423. gen(env, CheckNonNull, exitSlow, clsOrNull);
  424. }
  425. if (forward) {
  426. ctx = ldCtx(env);
  427. ctx = gen(env, GetCtxFwdCall, ctx, cns(env, callee));
  428. } else {
  429. ctx = clsMethodCtx(env, callee, cls);
  430. }
  431. } else {
  432. ctx = cns(env, TNullptr);
  433. if (!rds::isPersistentHandle(callee->funcHandle())) {
  434. // The miss path is complicated and rare. Punt for now.
  435. func = gen(env, LdFuncCachedSafe, LdFuncCachedData(callee->name()));
  436. func = gen(env, CheckNonNull, makeExitSlow(env), func);
  437. }
  438. }
  439. auto const defaultVal = safe ? popC(env) : nullptr;
  440. popDecRef(env); // callable
  441. if (safe) {
  442. push(env, defaultVal);
  443. push(env, safeFlag);
  444. }
  445. fpushActRec(env, func, ctx, numArgs, invName, false);
  446. }
  447. void fpushFuncCommon(IRGS& env,
  448. int32_t numParams,
  449. const StringData* name,
  450. const StringData* fallback) {
  451. if (auto const func = Unit::lookupFunc(name)) {
  452. if (func->isNameBindingImmutable(curUnit(env))) {
  453. fpushActRec(env,
  454. cns(env, func),
  455. cns(env, TNullptr),
  456. numParams,
  457. nullptr,
  458. false);
  459. return;
  460. }
  461. }
  462. auto const ssaFunc = fallback
  463. ? gen(env, LdFuncCachedU, LdFuncCachedUData { name, fallback })
  464. : gen(env, LdFuncCached, LdFuncCachedData { name });
  465. fpushActRec(env,
  466. ssaFunc,
  467. cns(env, TNullptr),
  468. numParams,
  469. nullptr,
  470. false);
  471. }
  472. void implUnboxR(IRGS& env) {
  473. auto const exit = makeExit(env);
  474. auto const srcBox = popR(env);
  475. auto const unboxed = unbox(env, srcBox, exit);
  476. if (unboxed == srcBox) {
  477. // If the Unbox ended up being a noop, don't bother refcounting
  478. push(env, unboxed);
  479. } else {
  480. pushIncRef(env, unboxed);
  481. gen(env, DecRef, srcBox);
  482. }
  483. }
  484. //////////////////////////////////////////////////////////////////////
  485. }
  486. //////////////////////////////////////////////////////////////////////
  487. void fpushActRec(IRGS& env,
  488. SSATmp* func,
  489. SSATmp* objOrClass,
  490. int32_t numArgs,
  491. const StringData* invName,
  492. bool fromFPushCtor) {
  493. spillStack(env);
  494. auto const returnSPOff = env.irb->syncedSpLevel();
  495. ActRecInfo info;
  496. info.spOffset = offsetFromIRSP(env, BCSPOffset{-int32_t{kNumActRecCells}});
  497. info.numArgs = numArgs;
  498. info.invName = invName;
  499. info.fromFPushCtor = fromFPushCtor;
  500. gen(
  501. env,
  502. SpillFrame,
  503. info,
  504. sp(env),
  505. func,
  506. objOrClass
  507. );
  508. auto const sframe = &env.irb->curBlock()->back();
  509. assertx(sframe->is(SpillFrame));
  510. env.fpiStack.push(FPIInfo { sp(env), returnSPOff, sframe });
  511. assertx(env.irb->stackDeficit() == 0);
  512. }
  513. //////////////////////////////////////////////////////////////////////
  514. void emitFPushCufIter(IRGS& env, int32_t numParams, int32_t itId) {
  515. spillStack(env);
  516. env.fpiStack.push(FPIInfo { sp(env), env.irb->spOffset(), nullptr });
  517. gen(
  518. env,
  519. CufIterSpillFrame,
  520. FPushCufData {
  521. offsetFromIRSP(env, BCSPOffset{-int32_t{kNumActRecCells}}),
  522. static_cast<uint32_t>(numParams),
  523. itId
  524. },
  525. sp(env),
  526. fp(env)
  527. );
  528. }
  529. void emitFPushCuf(IRGS& env, int32_t numArgs) {
  530. implFPushCufOp(env, Op::FPushCuf, numArgs);
  531. }
  532. void emitFPushCufF(IRGS& env, int32_t numArgs) {
  533. implFPushCufOp(env, Op::FPushCufF, numArgs);
  534. }
  535. void emitFPushCufSafe(IRGS& env, int32_t numArgs) {
  536. implFPushCufOp(env, Op::FPushCufSafe, numArgs);
  537. }
  538. void emitFPushCtor(IRGS& env, int32_t numParams) {
  539. auto const cls = popA(env);
  540. auto const func = gen(env, LdClsCtor, cls);
  541. auto const obj = gen(env, AllocObj, cls);
  542. pushIncRef(env, obj);
  543. fpushActRec(env, func, obj, numParams, nullptr, true /* fromFPushCtor */);
  544. }
  545. void emitFPushCtorD(IRGS& env,
  546. int32_t numParams,
  547. const StringData* className) {
  548. auto const cls = Unit::lookupClassOrUniqueClass(className);
  549. bool const uniqueCls = classIsUnique(cls);
  550. bool const persistentCls = classHasPersistentRDS(cls);
  551. bool const canInstantiate = canInstantiateClass(cls);
  552. bool const fastAlloc =
  553. persistentCls &&
  554. canInstantiate &&
  555. !cls->callsCustomInstanceInit() &&
  556. !cls->hasNativePropHandler();
  557. auto const func = lookupImmutableCtor(cls, curClass(env));
  558. auto ssaCls = persistentCls
  559. ? cns(env, cls)
  560. : gen(env, LdClsCached, cns(env, className));
  561. if (!ssaCls->hasConstVal() && uniqueCls) {
  562. // If the Class is unique but not persistent, it's safe to use it as a
  563. // const after the LdClsCached, which will throw if the class can't be
  564. // defined.
  565. ssaCls = cns(env, cls);
  566. }
  567. auto const ssaFunc = func ? cns(env, func)
  568. : gen(env, LdClsCtor, ssaCls);
  569. auto const obj = fastAlloc ? allocObjFast(env, cls)
  570. : gen(env, AllocObj, ssaCls);
  571. pushIncRef(env, obj);
  572. fpushActRec(env, ssaFunc, obj, numParams, nullptr, true /* FromFPushCtor */);
  573. }
  574. void emitFPushFuncD(IRGS& env, int32_t nargs, const StringData* name) {
  575. fpushFuncCommon(env, nargs, name, nullptr);
  576. }
  577. void emitFPushFuncU(IRGS& env,
  578. int32_t nargs,
  579. const StringData* name,
  580. const StringData* fallback) {
  581. fpushFuncCommon(env, nargs, name, fallback);
  582. }
  583. void emitFPushFunc(IRGS& env, int32_t numParams) {
  584. if (topC(env)->isA(TObj)) return fpushFuncObj(env, numParams);
  585. if (topC(env)->isA(TArr)) return fpushFuncArr(env, numParams);
  586. if (!topC(env)->isA(TStr)) {
  587. PUNT(FPushFunc_not_Str);
  588. }
  589. auto const funcName = popC(env);
  590. fpushActRec(env,
  591. gen(env, LdFunc, funcName),
  592. cns(env, TNullptr),
  593. numParams,
  594. nullptr,
  595. false);
  596. }
  597. void emitFPushObjMethodD(IRGS& env,
  598. int32_t numParams,
  599. const StringData* methodName,
  600. ObjMethodOp subop) {
  601. TransFlags trFlags;
  602. trFlags.noProfiledFPush = true;
  603. auto sideExit = makeExit(env, trFlags);
  604. auto const obj = popC(env);
  605. if (obj->type() <= TObj) {
  606. fpushObjMethod(env, obj, methodName, numParams,
  607. true /* shouldFatal */, sideExit);
  608. return;
  609. }
  610. if (obj->type() <= TInitNull && subop == ObjMethodOp::NullSafe) {
  611. fpushActRec(
  612. env,
  613. cns(env, SystemLib::s_nullFunc),
  614. cns(env, TNullptr),
  615. numParams,
  616. nullptr,
  617. false);
  618. return;
  619. }
  620. PUNT(FPushObjMethodD-nonObj);
  621. }
  622. void emitFPushClsMethodD(IRGS& env,
  623. int32_t numParams,
  624. const StringData* methodName,
  625. const StringData* className) {
  626. auto const baseClass = Unit::lookupClassOrUniqueClass(className);
  627. bool magicCall = false;
  628. if (auto const func = lookupImmutableMethod(baseClass,
  629. methodName,
  630. magicCall,
  631. true /* staticLookup */,
  632. curClass(env))) {
  633. auto const objOrCls = clsMethodCtx(env, func, baseClass);
  634. fpushActRec(env,
  635. cns(env, func),
  636. objOrCls,
  637. numParams,
  638. func && magicCall ? methodName : nullptr,
  639. false);
  640. return;
  641. }
  642. auto const slowExit = makeExitSlow(env);
  643. auto const ne = NamedEntity::get(className);
  644. auto const data = ClsMethodData { className, methodName, ne };
  645. // Look up the Func* in the targetcache. If it's not there, try the slow
  646. // path. If that fails, slow exit.
  647. auto const func = cond(
  648. env,
  649. [&] (Block* taken) {
  650. auto const mcFunc = gen(env, LdClsMethodCacheFunc, data);
  651. return gen(env, CheckNonNull, taken, mcFunc);
  652. },
  653. [&] (SSATmp* func) { // next
  654. return func;
  655. },
  656. [&] { // taken
  657. hint(env, Block::Hint::Unlikely);
  658. auto const result = gen(env, LookupClsMethodCache, data, fp(env));
  659. return gen(env, CheckNonNull, slowExit, result);
  660. }
  661. );
  662. auto const clsCtx = gen(env, LdClsMethodCacheCls, data);
  663. fpushActRec(env,
  664. func,
  665. clsCtx,
  666. numParams,
  667. nullptr,
  668. false);
  669. }
  670. void emitFPushClsMethod(IRGS& env, int32_t numParams) {
  671. auto const clsVal = popA(env);
  672. auto const methVal = popC(env);
  673. if (!methVal->isA(TStr) || !clsVal->isA(TCls)) {
  674. PUNT(FPushClsMethod-unknownType);
  675. }
  676. if (methVal->hasConstVal()) {
  677. const Class* cls = nullptr;
  678. if (clsVal->hasConstVal()) {
  679. cls = clsVal->clsVal();
  680. } else if (clsVal->inst()->is(LdClsCtx, LdClsCctx)) {
  681. /*
  682. * Optimize FPushClsMethod when the method is a known static
  683. * string and the input class is the context. The common bytecode
  684. * pattern here is LateBoundCls ; FPushClsMethod.
  685. *
  686. * This logic feels like it belongs in the simplifier, but the
  687. * generated code for this case is pretty different, since we
  688. * don't need the pre-live ActRec trick.
  689. */
  690. cls = curClass(env);
  691. }
  692. if (cls) {
  693. const Func* func;
  694. auto res =
  695. g_context->lookupClsMethod(func,
  696. cls,
  697. methVal->strVal(),
  698. nullptr,
  699. cls,
  700. false);
  701. if (res == LookupResult::MethodFoundNoThis && func->isStatic()) {
  702. auto funcTmp = clsVal->hasConstVal()
  703. ? cns(env, func)
  704. : gen(env, LdClsMethod, clsVal, cns(env, -(func->methodSlot() + 1)));
  705. fpushActRec(env, funcTmp, clsVal, numParams, nullptr, false);
  706. return;
  707. }
  708. }
  709. }
  710. fpushActRec(env,
  711. cns(env, TNullptr),
  712. cns(env, TNullptr),
  713. numParams,
  714. nullptr,
  715. false);
  716. spillStack(env);
  717. /*
  718. * Similar to FPushFunc/FPushObjMethod, we have an incomplete ActRec on the
  719. * stack and must handle that properly if we throw or re-enter.
  720. */
  721. updateMarker(env);
  722. env.irb->exceptionStackBoundary();
  723. gen(env, LookupClsMethod,
  724. IRSPOffsetData { offsetFromIRSP(env, BCSPOffset{0}) },
  725. clsVal, methVal, sp(env), fp(env));
  726. gen(env, DecRef, methVal);
  727. }
  728. void emitFPushClsMethodF(IRGS& env, int32_t numParams) {
  729. auto const exitBlock = makeExitSlow(env);
  730. auto classTmp = top(env);
  731. auto methodTmp = topC(env, BCSPOffset{1}, DataTypeGeneric);
  732. assertx(classTmp->isA(TCls));
  733. if (!classTmp->hasConstVal() || !methodTmp->hasConstVal(TStr)) {
  734. PUNT(FPushClsMethodF-unknownClassOrMethod);
  735. }
  736. env.irb->constrainValue(methodTmp, DataTypeSpecific);
  737. auto const cls = classTmp->clsVal();
  738. auto const methName = methodTmp->strVal();
  739. bool magicCall = false;
  740. auto const vmfunc = lookupImmutableMethod(cls,
  741. methName,
  742. magicCall,
  743. true /* staticLookup */,
  744. curClass(env));
  745. discard(env, 2);
  746. auto const curCtxTmp = ldCtx(env);
  747. if (vmfunc) {
  748. auto const funcTmp = cns(env, vmfunc);
  749. auto const newCtxTmp = gen(env, GetCtxFwdCall, curCtxTmp, funcTmp);
  750. fpushActRec(env, funcTmp, newCtxTmp, numParams,
  751. magicCall ? methName : nullptr, false);
  752. return;
  753. }
  754. auto const data = ClsMethodData{cls->name(), methName};
  755. auto const funcTmp = cond(
  756. env,
  757. [&](Block* taken) {
  758. auto const fcacheFunc = gen(env, LdClsMethodFCacheFunc, data);
  759. return gen(env, CheckNonNull, taken, fcacheFunc);
  760. },
  761. [&](SSATmp* func) { // next
  762. return func;
  763. },
  764. [&] { // taken
  765. hint(env, Block::Hint::Unlikely);
  766. auto const result = gen(
  767. env,
  768. LookupClsMethodFCache,
  769. data,
  770. cns(env, cls),
  771. fp(env)
  772. );
  773. return gen(env, CheckNonNull, exitBlock, result);
  774. }
  775. );
  776. auto const ctx = gen(env, GetCtxFwdCallDyn, data, curCtxTmp);
  777. fpushActRec(env,
  778. funcTmp,
  779. ctx,
  780. numParams,
  781. magicCall ? methName : nullptr,
  782. false);
  783. }
  784. //////////////////////////////////////////////////////////////////////
  785. /*
  786. * All fpass instructions spill the stack after they execute, because we are
  787. * sure to need that value in memory, regardless of whether we side-exit or
  788. * throw. At the level of HHBC semantics, it's illegal to pop them from the
  789. * stack until we've left the FPI region, and we will be spilling the whole
  790. * stack when we get to the FCall{D,} at the end of the region. This should
  791. * also potentially reduce the number of live registers during call sequences.
  792. *
  793. * Note: there is a general problem with the spillStack mechanism, in that it
  794. * may sink stores that are not profitable to sink, but in this case we can
  795. * work around it easily.
  796. */
  797. void emitFPassL(IRGS& env, int32_t argNum, int32_t id) {
  798. if (env.currentNormalizedInstruction->preppedByRef) {
  799. emitVGetL(env, id);
  800. } else {
  801. emitCGetL(env, id);
  802. }
  803. spillStack(env);
  804. }
  805. void emitFPassS(IRGS& env, int32_t argNum) {
  806. if (env.currentNormalizedInstruction->preppedByRef) {
  807. emitVGetS(env);
  808. } else {
  809. emitCGetS(env);
  810. }
  811. spillStack(env);
  812. }
  813. void emitFPassG(IRGS& env, int32_t argNum) {
  814. if (env.currentNormalizedInstruction->preppedByRef) {
  815. emitVGetG(env);
  816. } else {
  817. emitCGetG(env);
  818. }
  819. spillStack(env);
  820. }
  821. void emitFPassR(IRGS& env, int32_t argNum) {
  822. if (env.currentNormalizedInstruction->preppedByRef) {
  823. PUNT(FPassR-byRef);
  824. }
  825. implUnboxR(env);
  826. spillStack(env);
  827. }
  828. void emitFPassM(IRGS& env, int32_t, int x) {
  829. if (env.currentNormalizedInstruction->preppedByRef) {
  830. emitVGetM(env, x);
  831. } else {
  832. emitCGetM(env, x);
  833. }
  834. spillStack(env);
  835. }
  836. void emitUnboxR(IRGS& env) { implUnboxR(env); }
  837. void emitFPassV(IRGS& env, int32_t argNum) {
  838. if (env.currentNormalizedInstruction->preppedByRef) {
  839. // FPassV is a no-op when the callee expects by ref.
  840. return;
  841. }
  842. auto const tmp = popV(env);
  843. pushIncRef(env, gen(env, LdRef, TInitCell, tmp));
  844. gen(env, DecRef, tmp);
  845. spillStack(env);
  846. }
  847. void emitFPassCE(IRGS& env, int32_t argNum) {
  848. if (env.currentNormalizedInstruction->preppedByRef) {
  849. // Need to raise an error
  850. PUNT(FPassCE-byRef);
  851. }
  852. spillStack(env);
  853. }
  854. void emitFPassCW(IRGS& env, int32_t argNum) {
  855. if (env.currentNormalizedInstruction->preppedByRef) {
  856. // Need to raise a warning
  857. PUNT(FPassCW-byRef);
  858. }
  859. spillStack(env);
  860. }
  861. //////////////////////////////////////////////////////////////////////
  862. void emitFCallArray(IRGS& env) {
  863. spillStack(env);
  864. auto const data = CallArrayData {
  865. offsetFromIRSP(env, BCSPOffset{0}),
  866. bcOff(env),
  867. nextBcOff(env),
  868. callDestroysLocals(*env.currentNormalizedInstruction, curFunc(env))
  869. };
  870. env.irb->exceptionStackBoundary();
  871. gen(env, CallArray, data, sp(env), fp(env));
  872. }
  873. void emitFCallD(IRGS& env,
  874. int32_t numParams,
  875. const StringData*,
  876. const StringData*) {
  877. emitFCall(env, numParams);
  878. }
  879. void emitFCall(IRGS& env, int32_t numParams) {
  880. auto const returnBcOffset = nextBcOff(env) - curFunc(env)->base();
  881. auto const callee = env.currentNormalizedInstruction->funcd;
  882. auto const destroyLocals = callDestroysLocals(
  883. *env.currentNormalizedInstruction,
  884. curFunc(env)
  885. );
  886. spillStack(env);
  887. env.irb->exceptionStackBoundary();
  888. gen(
  889. env,
  890. Call,
  891. CallData {
  892. offsetFromIRSP(env, BCSPOffset{0}),
  893. static_cast<uint32_t>(numParams),
  894. returnBcOffset,
  895. callee,
  896. destroyLocals
  897. },
  898. sp(env),
  899. fp(env)
  900. );
  901. if (!env.fpiStack.empty()) {
  902. env.fpiStack.pop();
  903. }
  904. }
  905. //////////////////////////////////////////////////////////////////////
  906. }}}