PageRenderTime 48ms CodeModel.GetById 14ms RepoModel.GetById 0ms app.codeStats 0ms

/hphp/runtime/base/array-iterator.cpp

https://gitlab.com/Blueprint-Marketing/hhvm
C++ | 1646 lines | 1377 code | 143 blank | 126 comment | 295 complexity | 9c7f1a6286a6501ef90727a81304629a MD5 | raw file
  1. /*
  2. +----------------------------------------------------------------------+
  3. | HipHop for PHP |
  4. +----------------------------------------------------------------------+
  5. | Copyright (c) 2010-2014 Facebook, Inc. (http://www.facebook.com) |
  6. +----------------------------------------------------------------------+
  7. | This source file is subject to version 3.01 of the PHP license, |
  8. | that is bundled with this package in the file LICENSE, and is |
  9. | available through the world-wide-web at the following url: |
  10. | http://www.php.net/license/3_01.txt |
  11. | If you did not receive a copy of the PHP license and are unable to |
  12. | obtain it through the world-wide-web, please send a note to |
  13. | license@php.net so we can mail you a copy immediately. |
  14. +----------------------------------------------------------------------+
  15. */
  16. #include "hphp/runtime/base/array-iterator.h"
  17. #include <algorithm>
  18. #include <folly/Likely.h>
  19. #include "hphp/runtime/base/array-data.h"
  20. #include "hphp/runtime/base/collections.h"
  21. #include "hphp/runtime/base/mixed-array.h"
  22. #include "hphp/runtime/base/packed-array.h"
  23. #include "hphp/runtime/base/struct-array.h"
  24. #include "hphp/runtime/base/struct-array-defs.h"
  25. #include "hphp/runtime/base/shape.h"
  26. #include "hphp/runtime/base/apc-local-array.h"
  27. #include "hphp/runtime/base/builtin-functions.h"
  28. #include "hphp/runtime/ext/ext_collections.h"
  29. #include "hphp/runtime/base/mixed-array-defs.h"
  30. #include "hphp/runtime/base/packed-array-defs.h"
  31. #include "hphp/runtime/base/array-iterator-defs.h"
  32. #include "hphp/runtime/base/apc-local-array-defs.h"
  33. #include "hphp/runtime/vm/vm-regs.h"
  34. namespace HPHP {
  35. TRACE_SET_MOD(runtime);
  36. //////////////////////////////////////////////////////////////////////
  37. const StaticString
  38. s_rewind("rewind"),
  39. s_valid("valid"),
  40. s_next("next"),
  41. s_key("key"),
  42. s_current("current");
  43. __thread MIterTable tl_miter_table;
  44. //////////////////////////////////////////////////////////////////////
  45. ArrayIter::ArrayIter(const ArrayData* data) {
  46. arrInit(data);
  47. }
  48. ArrayIter::ArrayIter(const Array& array) {
  49. arrInit(array.get());
  50. }
  51. ArrayIter::ArrayIter(ObjectData* obj) {
  52. objInit<true>(obj);
  53. }
  54. ArrayIter::ArrayIter(ObjectData* obj, NoInc) {
  55. objInit<false>(obj);
  56. }
  57. ArrayIter::ArrayIter(const Object& obj) {
  58. objInit<true>(obj.get());
  59. }
  60. ArrayIter::ArrayIter(const Cell c) {
  61. cellInit(c);
  62. }
  63. ArrayIter::ArrayIter(const Variant& v) {
  64. cellInit(*v.asCell());
  65. }
  66. ArrayIter::ArrayIter(const ArrayIter& iter) {
  67. m_data = iter.m_data;
  68. m_pos = iter.m_pos;
  69. m_version = iter.m_version;
  70. m_itype = iter.m_itype;
  71. m_nextHelperIdx = iter.m_nextHelperIdx;
  72. if (hasArrayData()) {
  73. const ArrayData* ad = getArrayData();
  74. if (ad) const_cast<ArrayData*>(ad)->incRefCount();
  75. } else {
  76. ObjectData* obj = getObject();
  77. assert(obj);
  78. obj->incRefCount();
  79. }
  80. }
  81. void ArrayIter::arrInit(const ArrayData* arr) {
  82. setArrayData(arr);
  83. if (arr) {
  84. arr->incRefCount();
  85. m_pos = arr->iter_begin();
  86. }
  87. }
  88. void ArrayIter::VectorInit(ArrayIter* iter, ObjectData* obj) {
  89. auto vec = static_cast<c_Vector*>(obj);
  90. iter->m_version = vec->getVersion();
  91. iter->m_pos = 0;
  92. }
  93. void ArrayIter::MapInit(ArrayIter* iter, ObjectData* obj) {
  94. auto mp = static_cast<c_Map*>(obj);
  95. iter->m_version = mp->getVersion();
  96. iter->m_pos = mp->iter_begin();
  97. }
  98. void ArrayIter::ImmMapInit(ArrayIter* iter, ObjectData* obj) {
  99. auto smp = static_cast<c_ImmMap*>(obj);
  100. iter->m_version = smp->getVersion();
  101. iter->m_pos = smp->iter_begin();
  102. }
  103. void ArrayIter::SetInit(ArrayIter* iter, ObjectData* obj) {
  104. auto st = static_cast<c_Set*>(obj);
  105. iter->m_version = st->getVersion();
  106. iter->m_pos = st->iter_begin();
  107. }
  108. void ArrayIter::PairInit(ArrayIter* iter, ObjectData* obj) {
  109. iter->m_pos = 0;
  110. }
  111. void ArrayIter::ImmVectorInit(ArrayIter* iter, ObjectData* obj) {
  112. auto vec = static_cast<c_ImmVector*>(obj);
  113. iter->m_version = vec->getVersion();
  114. iter->m_pos = 0;
  115. }
  116. void ArrayIter::ImmSetInit(ArrayIter* iter, ObjectData* obj) {
  117. auto st = static_cast<c_ImmSet*>(obj);
  118. iter->m_version = st->getVersion();
  119. iter->m_pos = st->iter_begin();
  120. }
  121. IterNextIndex ArrayIter::getNextHelperIdx(ObjectData* obj) {
  122. if (obj->isCollection()) {
  123. switch (obj->collectionType()) {
  124. #define X(type) case CollectionType::type: return IterNextIndex::type;
  125. COLLECTIONS_ALL_TYPES(X)
  126. #undef X
  127. }
  128. not_reached();
  129. } else {
  130. return IterNextIndex::Object;
  131. }
  132. }
  133. void ArrayIter::IteratorObjInit(ArrayIter* iter, ObjectData* obj) {
  134. assert(obj->instanceof(SystemLib::s_IteratorClass));
  135. try {
  136. obj->o_invoke_few_args(s_rewind, 0);
  137. } catch (...) {
  138. // Regardless of whether the incRef template parameter is true or
  139. // false, at this point this ArrayIter "owns" a reference to the
  140. // object and is responsible for decreffing the object when the
  141. // ArrayIter's lifetime ends. Normally ArrayIter's destructor takes
  142. // care of this, but the destructor will not get invoked if an
  143. // exception is thrown before the constructor finishes so we have
  144. // to manually handle decreffing the object here.
  145. iter->m_data = nullptr;
  146. if (debug) iter->m_itype = TypeUndefined;
  147. decRefObj(obj);
  148. throw;
  149. }
  150. }
  151. constexpr unsigned ctype_index(CollectionType t) {
  152. return unsigned(t) - unsigned(CollectionType::Vector);
  153. }
  154. static_assert(ctype_index(CollectionType::Vector) == 0, "");
  155. static_assert(ctype_index(CollectionType::Map) == 1, "");
  156. static_assert(ctype_index(CollectionType::Set) == 2, "");
  157. static_assert(ctype_index(CollectionType::Pair) == 3, "");
  158. static_assert(ctype_index(CollectionType::ImmVector) == 4, "");
  159. static_assert(ctype_index(CollectionType::ImmMap) == 5, "");
  160. static_assert(ctype_index(CollectionType::ImmSet) == 6, "");
  161. const unsigned MaxCollectionTypes = 7;
  162. const ArrayIter::InitFuncPtr
  163. ArrayIter::initFuncTable[MaxCollectionTypes + 1] = {
  164. &ArrayIter::VectorInit,
  165. &ArrayIter::MapInit,
  166. &ArrayIter::SetInit,
  167. &ArrayIter::PairInit,
  168. &ArrayIter::ImmVectorInit,
  169. &ArrayIter::ImmMapInit,
  170. &ArrayIter::ImmSetInit,
  171. &ArrayIter::IteratorObjInit,
  172. };
  173. template <bool incRef>
  174. void ArrayIter::objInit(ObjectData* obj) {
  175. assert(obj);
  176. setObject(obj);
  177. if (incRef) obj->incRefCount();
  178. auto i = obj->isCollection() ? ctype_index(obj->collectionType()) :
  179. MaxCollectionTypes;
  180. initFuncTable[i](this, obj);
  181. }
  182. void ArrayIter::cellInit(const Cell c) {
  183. assert(cellIsPlausible(c));
  184. if (LIKELY(c.m_type == KindOfArray)) {
  185. arrInit(c.m_data.parr);
  186. } else if (LIKELY(c.m_type == KindOfObject)) {
  187. objInit<true>(c.m_data.pobj);
  188. } else {
  189. arrInit(nullptr);
  190. }
  191. }
  192. void ArrayIter::destruct() {
  193. if (hasArrayData()) {
  194. const ArrayData* ad = getArrayData();
  195. if (debug) m_itype = TypeUndefined;
  196. if (ad) decRefArr(const_cast<ArrayData*>(ad));
  197. return;
  198. }
  199. ObjectData* obj = getObject();
  200. if (debug) m_itype = TypeUndefined;
  201. assert(obj);
  202. decRefObj(obj);
  203. }
  204. ArrayIter& ArrayIter::operator=(const ArrayIter& iter) {
  205. reset();
  206. m_data = iter.m_data;
  207. m_pos = iter.m_pos;
  208. m_version = iter.m_version;
  209. m_itype = iter.m_itype;
  210. m_nextHelperIdx = iter.m_nextHelperIdx;
  211. if (hasArrayData()) {
  212. const ArrayData* ad = getArrayData();
  213. if (ad) const_cast<ArrayData*>(ad)->incRefCount();
  214. } else {
  215. ObjectData* obj = getObject();
  216. assert(obj);
  217. obj->incRefCount();
  218. }
  219. return *this;
  220. }
  221. ArrayIter& ArrayIter::operator=(ArrayIter&& iter) {
  222. reset();
  223. m_data = iter.m_data;
  224. m_pos = iter.m_pos;
  225. m_version = iter.m_version;
  226. m_itype = iter.m_itype;
  227. m_nextHelperIdx = iter.m_nextHelperIdx;
  228. iter.m_data = nullptr;
  229. return *this;
  230. }
  231. bool ArrayIter::endHelper() const {
  232. auto obj = getObject();
  233. if (obj->isCollection()) {
  234. switch (obj->collectionType()) {
  235. case CollectionType::Vector:
  236. return m_pos >= static_cast<BaseVector*>(obj)->size();
  237. case CollectionType::Map:
  238. case CollectionType::ImmMap:
  239. return !static_cast<BaseMap*>(obj)->iter_valid(m_pos);
  240. case CollectionType::Set:
  241. case CollectionType::ImmSet:
  242. return !static_cast<BaseSet*>(obj)->iter_valid(m_pos);
  243. case CollectionType::Pair:
  244. return m_pos >= static_cast<c_Pair*>(obj)->size();
  245. case CollectionType::ImmVector:
  246. return m_pos >= static_cast<c_ImmVector*>(obj)->size();
  247. }
  248. } else {
  249. return !obj->o_invoke_few_args(s_valid, 0).toBoolean();
  250. }
  251. not_reached();
  252. }
  253. void ArrayIter::nextHelper() {
  254. auto obj = getObject();
  255. if (obj->isCollection()) {
  256. switch (obj->collectionType()) {
  257. case CollectionType::Pair:
  258. case CollectionType::ImmVector:
  259. case CollectionType::Vector:
  260. m_pos++;
  261. return;
  262. case CollectionType::Map:
  263. case CollectionType::ImmMap: {
  264. auto map = static_cast<BaseMap*>(obj);
  265. if (UNLIKELY(m_version != map->getVersion())) {
  266. throw_collection_modified();
  267. }
  268. m_pos = map->iter_next(m_pos);
  269. return;
  270. }
  271. case CollectionType::Set: {
  272. auto set = static_cast<BaseSet*>(obj);
  273. if (UNLIKELY(m_version != set->getVersion())) {
  274. throw_collection_modified();
  275. }
  276. m_pos = set->iter_next(m_pos);
  277. return;
  278. }
  279. case CollectionType::ImmSet: {
  280. auto set = static_cast<c_ImmSet*>(obj);
  281. assert(m_version == set->getVersion());
  282. m_pos = set->iter_next(m_pos);
  283. return;
  284. }
  285. }
  286. } else {
  287. obj->o_invoke_few_args(s_next, 0);
  288. }
  289. }
  290. Variant ArrayIter::firstHelper() {
  291. auto obj = getObject();
  292. if (obj->isCollection()) {
  293. switch (obj->collectionType()) {
  294. case CollectionType::Vector:
  295. case CollectionType::Pair:
  296. case CollectionType::ImmVector:
  297. return m_pos;
  298. case CollectionType::Map:
  299. case CollectionType::ImmMap: {
  300. auto map = static_cast<BaseMap*>(obj);
  301. if (UNLIKELY(m_version != map->getVersion())) {
  302. throw_collection_modified();
  303. }
  304. return map->iter_key(m_pos);
  305. }
  306. case CollectionType::Set: {
  307. auto set = static_cast<BaseSet*>(obj);
  308. if (UNLIKELY(m_version != set->getVersion())) {
  309. throw_collection_modified();
  310. }
  311. return set->iter_key(m_pos);
  312. }
  313. case CollectionType::ImmSet: {
  314. auto set = static_cast<c_ImmSet*>(obj);
  315. if (UNLIKELY(m_version != set->getVersion())) {
  316. throw_collection_modified();
  317. }
  318. return set->iter_key(m_pos);
  319. }
  320. }
  321. }
  322. return obj->o_invoke_few_args(s_key, 0);
  323. }
  324. Variant ArrayIter::second() {
  325. if (LIKELY(hasArrayData())) {
  326. const ArrayData* ad = getArrayData();
  327. assert(ad);
  328. assert(m_pos != ad->iter_end());
  329. return ad->getValue(m_pos);
  330. }
  331. auto obj = getObject();
  332. if (obj->isCollection()) {
  333. switch (obj->collectionType()) {
  334. case CollectionType::Vector: {
  335. auto vec = static_cast<BaseVector*>(obj);
  336. if (UNLIKELY(m_version != vec->getVersion())) {
  337. throw_collection_modified();
  338. }
  339. return tvAsCVarRef(vec->at(m_pos));
  340. }
  341. case CollectionType::Map:
  342. case CollectionType::ImmMap: {
  343. auto map = static_cast<BaseMap*>(obj);
  344. if (UNLIKELY(m_version != map->getVersion())) {
  345. throw_collection_modified();
  346. }
  347. return tvAsCVarRef(map->iter_value(m_pos));
  348. }
  349. case CollectionType::Set: {
  350. auto set = static_cast<BaseSet*>(obj);
  351. if (UNLIKELY(m_version != set->getVersion())) {
  352. throw_collection_modified();
  353. }
  354. return tvAsCVarRef(set->iter_value(m_pos));
  355. }
  356. case CollectionType::Pair: {
  357. auto pair = static_cast<c_Pair*>(obj);
  358. return tvAsCVarRef(pair->at(m_pos));
  359. }
  360. case CollectionType::ImmVector: {
  361. auto fvec = static_cast<c_ImmVector*>(obj);
  362. if (UNLIKELY(m_version != fvec->getVersion())) {
  363. throw_collection_modified();
  364. }
  365. return tvAsCVarRef(fvec->at(m_pos));
  366. }
  367. case CollectionType::ImmSet: {
  368. auto set = static_cast<c_ImmSet*>(obj);
  369. assert(m_version == set->getVersion());
  370. return tvAsCVarRef(set->iter_value(m_pos));
  371. }
  372. }
  373. }
  374. return obj->o_invoke_few_args(s_current, 0);
  375. }
  376. const Variant& ArrayIter::secondRef() {
  377. if (!hasArrayData()) {
  378. throw FatalErrorException("taking reference on iterator objects");
  379. }
  380. assert(hasArrayData());
  381. const ArrayData* ad = getArrayData();
  382. assert(ad);
  383. assert(m_pos != ad->iter_end());
  384. return ad->getValueRef(m_pos);
  385. }
  386. const Variant& ArrayIter::secondRefPlus() {
  387. if (LIKELY(hasArrayData())) {
  388. const ArrayData* ad = getArrayData();
  389. assert(ad);
  390. assert(m_pos != ad->iter_end());
  391. return ad->getValueRef(m_pos);
  392. }
  393. auto obj = getObject();
  394. if (obj->isCollection()) {
  395. switch (obj->collectionType()) {
  396. case CollectionType::Vector: {
  397. auto vec = static_cast<BaseVector*>(obj);
  398. if (UNLIKELY(m_version != vec->getVersion())) {
  399. throw_collection_modified();
  400. }
  401. return tvAsCVarRef(vec->at(m_pos));
  402. }
  403. case CollectionType::Map:
  404. case CollectionType::ImmMap: {
  405. auto map = static_cast<BaseMap*>(obj);
  406. if (UNLIKELY(m_version != map->getVersion())) {
  407. throw_collection_modified();
  408. }
  409. return tvAsCVarRef(map->iter_value(m_pos));
  410. }
  411. case CollectionType::Set: {
  412. auto set = static_cast<BaseSet*>(obj);
  413. if (UNLIKELY(m_version != set->getVersion())) {
  414. throw_collection_modified();
  415. }
  416. return tvAsCVarRef(set->iter_value(m_pos));
  417. }
  418. case CollectionType::Pair: {
  419. auto pair = static_cast<c_Pair*>(obj);
  420. return tvAsCVarRef(pair->at(m_pos));
  421. }
  422. case CollectionType::ImmVector: {
  423. auto fvec = static_cast<c_ImmVector*>(obj);
  424. if (UNLIKELY(m_version != fvec->getVersion())) {
  425. throw_collection_modified();
  426. }
  427. return tvAsCVarRef(fvec->at(m_pos));
  428. }
  429. case CollectionType::ImmSet: {
  430. auto set = static_cast<c_ImmSet*>(obj);
  431. assert(m_version == set->getVersion());
  432. return tvAsCVarRef(set->iter_value(m_pos));
  433. }
  434. }
  435. }
  436. throw_param_is_not_container();
  437. not_reached();
  438. }
  439. //
  440. // Collection iterator specialized functions.
  441. //
  442. template<class Tuplish>
  443. ArrayIter::ArrayIter(Tuplish* coll, Fixed)
  444. : m_pos(0), m_itype(ArrayIter::TypeIterator) {
  445. assert(coll);
  446. setObject(coll);
  447. // TODO Task #4204598: In theory, we might be able to squeeze out a win
  448. // here by not checking the version for immutable collections, but we'd
  449. // to make sure all iteration implementations are consistent about this.
  450. m_version = coll->getVersion();
  451. }
  452. template<class Vectorish>
  453. ArrayIter::ArrayIter(Vectorish* coll, Versionable)
  454. : m_pos(0), m_itype(ArrayIter::TypeIterator) {
  455. assert(coll && coll->size() > 0);
  456. setObject(coll);
  457. m_version = coll->getVersion();
  458. }
  459. template<class Mappish>
  460. ArrayIter::ArrayIter(Mappish* coll, VersionableSparse)
  461. : m_itype(ArrayIter::TypeIterator) {
  462. assert(coll && coll->size() > 0);
  463. setObject(coll);
  464. m_version = coll->getVersion();
  465. m_pos = coll->iter_begin();
  466. }
  467. template<class Tuplish>
  468. ALWAYS_INLINE
  469. bool ArrayIter::iterNext(Fixed) {
  470. return ++m_pos < static_cast<Tuplish*>(getObject())->size();
  471. }
  472. template<class Vectorish>
  473. ALWAYS_INLINE
  474. bool ArrayIter::iterNext(Versionable) {
  475. Vectorish* vec = static_cast<Vectorish*>(getObject());
  476. if (UNLIKELY(m_version != vec->getVersion())) {
  477. throw_collection_modified();
  478. }
  479. return ++m_pos < vec->size();
  480. }
  481. template<class Mappish>
  482. ALWAYS_INLINE
  483. bool ArrayIter::iterNext(VersionableSparse) {
  484. Mappish* coll = static_cast<Mappish*>(getObject());
  485. if (UNLIKELY(m_version != coll->getVersion())) {
  486. throw_collection_modified();
  487. }
  488. m_pos = coll->iter_next(m_pos);
  489. return coll->iter_valid(m_pos);
  490. }
  491. template<class Tuplish>
  492. ALWAYS_INLINE
  493. Variant ArrayIter::iterKey(Fixed) {
  494. return m_pos;
  495. }
  496. template<class Vectorish>
  497. ALWAYS_INLINE
  498. Variant ArrayIter::iterKey(Versionable) {
  499. return m_pos;
  500. }
  501. template<class Mappish>
  502. ALWAYS_INLINE
  503. Variant ArrayIter::iterKey(VersionableSparse) {
  504. return static_cast<Mappish*>(getObject())->iter_key(m_pos);
  505. }
  506. template<class Tuplish>
  507. ALWAYS_INLINE
  508. Variant ArrayIter::iterValue(Fixed) {
  509. return tvAsCVarRef(static_cast<Tuplish*>(getObject())->get(m_pos));
  510. }
  511. template<class Vectorish>
  512. ALWAYS_INLINE
  513. Variant ArrayIter::iterValue(Versionable) {
  514. return tvAsCVarRef(static_cast<Vectorish*>(getObject())->get(m_pos));
  515. }
  516. template<class Mappish>
  517. ALWAYS_INLINE
  518. Variant ArrayIter::iterValue(VersionableSparse) {
  519. return tvAsCVarRef(static_cast<Mappish*>(getObject())->iter_value(m_pos));
  520. }
  521. //////////////////////////////////////////////////////////////////////
  522. namespace {
  523. // Handle the cases where we didn't have enough preallocated Ents in
  524. // tl_miter_table, and we need to allocate from `extras'.
  525. NEVER_INLINE
  526. MIterTable::Ent* find_empty_strong_iter_slower() {
  527. return tl_miter_table.extras.find_unpopulated();
  528. }
  529. // Handle finding an empty strong iterator slot when the first slot
  530. // was already in use.
  531. NEVER_INLINE
  532. MIterTable::Ent* find_empty_strong_iter_slow() {
  533. #define X(i) \
  534. if (LIKELY(!tl_miter_table.ents[i].array)) return &tl_miter_table.ents[i];
  535. X(1);
  536. X(2);
  537. X(3);
  538. X(4);
  539. X(5);
  540. X(6);
  541. static_assert(tl_miter_table.ents.size() == 7, "");
  542. #undef X
  543. return find_empty_strong_iter_slower();
  544. }
  545. // Find a strong iterator slot that is empty. Almost always the first
  546. // one will be empty, so that path is inlined---everything else
  547. // delegates to slow.
  548. ALWAYS_INLINE
  549. MIterTable::Ent* find_empty_strong_iter() {
  550. if (LIKELY(!tl_miter_table.ents[0].array)) {
  551. return &tl_miter_table.ents[0];
  552. }
  553. return find_empty_strong_iter_slow();
  554. }
  555. void newMArrayIter(MArrayIter* marr, ArrayData* ad) {
  556. assert(!marr->getContainer());
  557. auto const slot = find_empty_strong_iter();
  558. assert(!slot->array);
  559. slot->iter = marr;
  560. slot->array = ad;
  561. marr->setContainer(ad);
  562. marr->m_pos = ad->getPosition();
  563. assert(strong_iterators_exist());
  564. }
  565. template<class Cond>
  566. void free_strong_iterator_impl(Cond cond) {
  567. assert(strong_iterators_exist());
  568. // We need to maintain the invariant that if there are any strong
  569. // iterators bound to arrays, one of the bindings is in slot zero.
  570. // This pvalid will point to something we can move into the first
  571. // slot if alreadyValid is false. If when we're done alreadyValid
  572. // is false, and pvalid is also nullptr, it means this function
  573. // freed the last strong iterator.
  574. MIterTable::Ent* pvalid = nullptr;
  575. bool alreadyValid = true; // because strong_iterators_exist()
  576. auto rm = [&] (MIterTable::Ent& ent) {
  577. if (cond(ent)) {
  578. ent.iter->setContainer(nullptr);
  579. ent.array = nullptr;
  580. ent.iter = nullptr;
  581. } else if (!alreadyValid && ent.array) {
  582. pvalid = &ent;
  583. }
  584. };
  585. if (cond(tl_miter_table.ents[0])) {
  586. tl_miter_table.ents[0].iter->setContainer(nullptr);
  587. tl_miter_table.ents[0].array = nullptr;
  588. tl_miter_table.ents[0].iter = nullptr;
  589. alreadyValid = false;
  590. }
  591. rm(tl_miter_table.ents[1]);
  592. rm(tl_miter_table.ents[2]);
  593. rm(tl_miter_table.ents[3]);
  594. rm(tl_miter_table.ents[4]);
  595. rm(tl_miter_table.ents[5]);
  596. rm(tl_miter_table.ents[6]);
  597. static_assert(tl_miter_table.ents.size() == 7, "");
  598. if (UNLIKELY(pvalid != nullptr)) {
  599. std::swap(*pvalid, tl_miter_table.ents[0]);
  600. alreadyValid = true;
  601. }
  602. if (LIKELY(tl_miter_table.extras.empty())) return;
  603. tl_miter_table.extras.release_if([&] (const MIterTable::Ent& e) {
  604. if (cond(e)) {
  605. e.iter->setContainer(nullptr);
  606. return true;
  607. }
  608. return false;
  609. });
  610. // If we didn't manage to keep something in the first non-extra
  611. // slot, scan extras again to swap something over.
  612. if (LIKELY(alreadyValid)) return;
  613. if (!tl_miter_table.extras.empty()) {
  614. tl_miter_table.extras.visit_to_remove(
  615. [&] (const MIterTable::Ent& ent) {
  616. tl_miter_table.ents[0] = ent;
  617. }
  618. );
  619. }
  620. }
  621. void freeMArrayIter(MArrayIter* marr) {
  622. assert(strong_iterators_exist());
  623. free_strong_iterator_impl(
  624. [marr] (const MIterTable::Ent& e) {
  625. return e.iter == marr;
  626. }
  627. );
  628. }
  629. }
  630. void free_strong_iterators(ArrayData* ad) {
  631. free_strong_iterator_impl([ad] (const MIterTable::Ent& e) {
  632. return e.array == ad;
  633. });
  634. }
  635. /*
  636. * This function returns its first argument so that in some cases we
  637. * can do tails calls (or maybe avoid spills).
  638. *
  639. * Note that in some cases reusing the return value can be (very
  640. * slightly) worse. The compiler won't know that the return value is
  641. * going to be the same as the argument, so if it didn't already have
  642. * to spill to make the call, or it can't tail call for some other
  643. * reason, you can cause an extra move after the return.
  644. */
  645. ArrayData* move_strong_iterators(ArrayData* dst, ArrayData* src) {
  646. for_each_strong_iterator([&] (MIterTable::Ent& ent) {
  647. if (ent.array == src) {
  648. ent.array = dst;
  649. ent.iter->setContainer(dst);
  650. }
  651. });
  652. return dst;
  653. }
  654. //////////////////////////////////////////////////////////////////////
  655. MArrayIter::MArrayIter(RefData* ref)
  656. : m_pos(0)
  657. , m_container(nullptr)
  658. , m_resetFlag(false)
  659. {
  660. ref->incRefCount();
  661. setRef(ref);
  662. assert(hasRef());
  663. escalateCheck();
  664. auto const data = cowCheck();
  665. if (!data) return;
  666. data->reset();
  667. newMArrayIter(this, data);
  668. setResetFlag(true);
  669. data->next();
  670. assert(getContainer() == data);
  671. }
  672. MArrayIter::MArrayIter(ArrayData* data)
  673. : m_ref(nullptr)
  674. , m_pos(0)
  675. , m_container(nullptr)
  676. , m_resetFlag(false)
  677. {
  678. if (!data) return;
  679. assert(!data->isStatic());
  680. setAd(data);
  681. escalateCheck();
  682. data = cowCheck();
  683. data->reset();
  684. newMArrayIter(this, data);
  685. setResetFlag(true);
  686. data->next();
  687. assert(getContainer() == data);
  688. }
  689. MArrayIter::~MArrayIter() {
  690. auto const container = getContainer();
  691. if (container) {
  692. freeMArrayIter(this);
  693. assert(getContainer() == nullptr);
  694. }
  695. if (hasRef()) {
  696. decRefRef(getRef());
  697. } else if (hasAd()) {
  698. decRefArr(getAd());
  699. }
  700. }
  701. bool MArrayIter::end() const {
  702. return !const_cast<MArrayIter*>(this)->prepare();
  703. }
  704. bool MArrayIter::advance() {
  705. ArrayData* data = getArray();
  706. ArrayData* container = getContainer();
  707. if (!data) {
  708. if (container) {
  709. freeMArrayIter(this);
  710. }
  711. setResetFlag(false);
  712. return false;
  713. }
  714. if (container == data) {
  715. return cowCheck()->advanceMArrayIter(*this);
  716. }
  717. data = reregister();
  718. assert(data && data == getContainer());
  719. assert(!getResetFlag());
  720. if (!data->validMArrayIter(*this)) return false;
  721. // To conform to PHP behavior, we need to set the internal
  722. // cursor to point to the next element.
  723. data->next();
  724. return true;
  725. }
  726. bool MArrayIter::prepare() {
  727. ArrayData* data = getArray();
  728. ArrayData* container = getContainer();
  729. if (!data) {
  730. if (container) {
  731. freeMArrayIter(this);
  732. }
  733. setResetFlag(false);
  734. return false;
  735. }
  736. if (container != data) {
  737. data = reregister();
  738. }
  739. return data->validMArrayIter(*this);
  740. }
  741. void MArrayIter::escalateCheck() {
  742. if (hasRef()) {
  743. auto const data = getData();
  744. if (!data) return;
  745. auto const esc = data->escalate();
  746. if (data != esc) {
  747. cellSet(make_tv<KindOfArray>(esc), *getRef()->tv());
  748. }
  749. return;
  750. }
  751. assert(hasAd());
  752. auto const data = getAd();
  753. auto const esc = data->escalate();
  754. if (data != esc) {
  755. esc->incRefCount();
  756. decRefArr(data);
  757. setAd(esc);
  758. }
  759. }
  760. ArrayData* MArrayIter::cowCheck() {
  761. if (hasRef()) {
  762. auto data = getData();
  763. if (!data) return nullptr;
  764. if (data->hasMultipleRefs() && !data->noCopyOnWrite()) {
  765. data = data->copyWithStrongIterators();
  766. cellSet(make_tv<KindOfArray>(data), *getRef()->tv());
  767. }
  768. return data;
  769. }
  770. assert(hasAd());
  771. auto const data = getAd();
  772. if (data->hasMultipleRefs() && !data->noCopyOnWrite()) {
  773. ArrayData* copied = data->copyWithStrongIterators();
  774. copied->incRefCount();
  775. decRefArr(data);
  776. setAd(copied);
  777. return copied;
  778. }
  779. return data;
  780. }
  781. ArrayData* MArrayIter::reregister() {
  782. ArrayData* container = getContainer();
  783. assert(getArray() != nullptr && container != getArray());
  784. if (container != nullptr) {
  785. freeMArrayIter(this);
  786. }
  787. setResetFlag(false);
  788. assert(getContainer() == nullptr);
  789. escalateCheck();
  790. ArrayData* data = cowCheck();
  791. newMArrayIter(this, data);
  792. return data;
  793. }
  794. //////////////////////////////////////////////////////////////////////
  795. CufIter::~CufIter() {
  796. if (m_ctx && !(uintptr_t(m_ctx) & 1)) {
  797. decRefObj((ObjectData*)m_ctx);
  798. }
  799. if (m_name) decRefStr(m_name);
  800. }
  801. bool Iter::init(TypedValue* c1) {
  802. assert(c1->m_type != KindOfRef);
  803. bool hasElems = true;
  804. if (c1->m_type == KindOfArray) {
  805. if (!c1->m_data.parr->empty()) {
  806. (void) new (&arr()) ArrayIter(c1->m_data.parr);
  807. arr().setIterType(ArrayIter::TypeArray);
  808. } else {
  809. hasElems = false;
  810. }
  811. } else if (c1->m_type == KindOfObject) {
  812. bool isIterator;
  813. if (c1->m_data.pobj->isCollection()) {
  814. isIterator = true;
  815. (void) new (&arr()) ArrayIter(c1->m_data.pobj);
  816. } else {
  817. Object obj = c1->m_data.pobj->iterableObject(isIterator);
  818. if (isIterator) {
  819. (void) new (&arr()) ArrayIter(obj.detach(), ArrayIter::noInc);
  820. } else {
  821. Class* ctx = arGetContextClass(vmfp());
  822. auto ctxStr = ctx ? ctx->nameStr() : StrNR();
  823. Array iterArray(obj->o_toIterArray(ctxStr, ObjectData::EraseRefs));
  824. ArrayData* ad = iterArray.get();
  825. (void) new (&arr()) ArrayIter(ad);
  826. }
  827. }
  828. try {
  829. if (arr().end()) {
  830. // Iterator was empty; call the destructor on the iterator we
  831. // just constructed and branch to done case
  832. arr().~ArrayIter();
  833. hasElems = false;
  834. } else {
  835. arr().setIterType(
  836. isIterator ? ArrayIter::TypeIterator : ArrayIter::TypeArray);
  837. }
  838. } catch (...) {
  839. arr().~ArrayIter();
  840. throw;
  841. }
  842. } else {
  843. raise_warning("Invalid argument supplied for foreach()");
  844. hasElems = false;
  845. }
  846. return hasElems;
  847. }
  848. bool Iter::next() {
  849. assert(arr().getIterType() == ArrayIter::TypeArray ||
  850. arr().getIterType() == ArrayIter::TypeIterator);
  851. // The emitter should never generate bytecode where the iterator
  852. // is at the end before IterNext is executed. However, even if
  853. // the iterator is at the end, it is safe to call next().
  854. ArrayIter* ai = &arr();
  855. ai->next();
  856. if (ai->end()) {
  857. // If after advancing the iterator we have reached the end, free
  858. // the iterator and fall through to the next instruction.
  859. // The ArrayIter destructor will decRef the array.
  860. ai->~ArrayIter();
  861. return false;
  862. }
  863. // If after advancing the iterator we have not reached the end,
  864. // jump to the location specified by the second immediate argument.
  865. return true;
  866. }
  867. void Iter::free() {
  868. assert(arr().getIterType() == ArrayIter::TypeArray ||
  869. arr().getIterType() == ArrayIter::TypeIterator);
  870. arr().~ArrayIter();
  871. }
  872. void Iter::mfree() {
  873. marr().~MArrayIter();
  874. }
  875. void Iter::cfree() {
  876. cuf().~CufIter();
  877. }
  878. /**
  879. * Helper functions for collection style iterators.
  880. * Iterators over collections are never by-ref so there is no reason to
  881. * unbox any value.
  882. * Templates are instantiated over the collection class and the iterator
  883. * style. See the definition of Fixed, Versionable and VersionableSparse
  884. * in the header for details.
  885. * IterInit and IterNext can be called directly from the JIT for specialized
  886. * iterators.
  887. */
  888. template<class Coll, class Style>
  889. static void iterValue(ArrayIter* iter, TypedValue* out) {
  890. Variant val = iter->iterValue<Coll>(Style());
  891. assert(val.getRawType() != KindOfRef);
  892. cellDup(*val.asTypedValue(), *out);
  893. }
  894. template<class Coll, class Style>
  895. static void iterKey(ArrayIter* iter, TypedValue* out) {
  896. Variant key = iter->iterKey<Coll>(Style());
  897. cellDup(*key.asTypedValue(), *out);
  898. }
  899. template<class Coll, class Style>
  900. static int64_t iterInit(Iter* dest, Coll* coll,
  901. TypedValue* valOut, TypedValue* keyOut) {
  902. int64_t size = coll->size();
  903. if (UNLIKELY(size == 0)) {
  904. decRefObj(coll);
  905. return 0LL;
  906. }
  907. (void) new (&dest->arr()) ArrayIter(coll, Style());
  908. DataType vType = valOut->m_type;
  909. assert(vType != KindOfRef);
  910. uint64_t vDatum = valOut->m_data.num;
  911. iterValue<Coll, Style>(&dest->arr(), valOut);
  912. tvRefcountedDecRefHelper(vType, vDatum);
  913. if (keyOut) {
  914. DataType kType = keyOut->m_type;
  915. uint64_t kDatum = keyOut->m_data.num;
  916. iterKey<Coll, Style>(&dest->arr(), keyOut);
  917. tvRefcountedDecRefHelper(kType, kDatum);
  918. }
  919. return 1LL;
  920. }
  921. template<class Coll, class Style>
  922. static
  923. int64_t iterNext(ArrayIter* iter, TypedValue* valOut, TypedValue* keyOut) {
  924. if (!iter->iterNext<Coll>(Style())) {
  925. iter->~ArrayIter();
  926. return 0LL;
  927. }
  928. DataType vType = valOut->m_type;
  929. assert(vType != KindOfRef);
  930. uint64_t vDatum = valOut->m_data.num;
  931. iterValue<Coll, Style>(iter, valOut);
  932. tvRefcountedDecRefHelper(vType, vDatum);
  933. if (keyOut) {
  934. DataType kType = keyOut->m_type;
  935. uint64_t kDatum = keyOut->m_data.num;
  936. iterKey<Coll, Style>(iter, keyOut);
  937. tvRefcountedDecRefHelper(kType, kDatum);
  938. }
  939. return 1LL;
  940. }
  941. /*
  942. * iter_value_cell* will store a copy of the current value at the address
  943. * given by 'out'. iter_value_cell* will increment the refcount of the current
  944. * value if appropriate.
  945. */
  946. template <bool typeArray, bool withRef>
  947. static inline void iter_value_cell_local_impl(Iter* iter, TypedValue* out) {
  948. DataType oldType = out->m_type;
  949. assert(withRef || oldType != KindOfRef);
  950. uint64_t oldDatum = out->m_data.num;
  951. TRACE(2, "%s: typeArray: %s, I %p, out %p\n",
  952. __func__, typeArray ? "true" : "false", iter, out);
  953. assert((typeArray && iter->arr().getIterType() == ArrayIter::TypeArray) ||
  954. (!typeArray && iter->arr().getIterType() == ArrayIter::TypeIterator));
  955. ArrayIter& arrIter = iter->arr();
  956. if (typeArray) {
  957. auto const cur = arrIter.nvSecond();
  958. if (cur->m_type == KindOfRef) {
  959. if (!withRef || !cur->m_data.pref->isReferenced()) {
  960. cellDup(*(cur->m_data.pref->tv()), *out);
  961. } else {
  962. refDup(*cur, *out);
  963. }
  964. } else {
  965. cellDup(*cur, *out);
  966. }
  967. } else {
  968. Variant val = arrIter.second();
  969. assert(val.getRawType() != KindOfRef);
  970. cellDup(*val.asTypedValue(), *out);
  971. }
  972. tvRefcountedDecRefHelper(oldType, oldDatum);
  973. }
  974. template <bool typeArray, bool withRef>
  975. static inline void iter_key_cell_local_impl(Iter* iter, TypedValue* out) {
  976. DataType oldType = out->m_type;
  977. assert(withRef || oldType != KindOfRef);
  978. uint64_t oldDatum = out->m_data.num;
  979. TRACE(2, "%s: I %p, out %p\n", __func__, iter, out);
  980. assert((typeArray && iter->arr().getIterType() == ArrayIter::TypeArray) ||
  981. (!typeArray && iter->arr().getIterType() == ArrayIter::TypeIterator));
  982. ArrayIter& arr = iter->arr();
  983. if (typeArray) {
  984. arr.nvFirst(out);
  985. } else {
  986. Variant key = arr.first();
  987. cellDup(*key.asTypedValue(), *out);
  988. }
  989. tvRefcountedDecRefHelper(oldType, oldDatum);
  990. }
  991. static NEVER_INLINE
  992. int64_t iter_next_free_packed(Iter* iter, ArrayData* arr) {
  993. assert(arr->hasExactlyOneRef());
  994. assert(arr->isPacked());
  995. // Use non-specialized release call so ArrayTracer can track its destruction
  996. arr->release();
  997. if (debug) {
  998. iter->arr().setIterType(ArrayIter::TypeUndefined);
  999. }
  1000. return 0;
  1001. }
  1002. static NEVER_INLINE
  1003. int64_t iter_next_free_struct(Iter* iter, ArrayData* arr) {
  1004. assert(arr->hasExactlyOneRef());
  1005. assert(arr->isStruct());
  1006. // Use non-specialized release call so ArrayTracer can track its destruction
  1007. arr->release();
  1008. if (debug) {
  1009. iter->arr().setIterType(ArrayIter::TypeUndefined);
  1010. }
  1011. return 0;
  1012. }
  1013. static NEVER_INLINE
  1014. int64_t iter_next_free_mixed(Iter* iter, ArrayData* arr) {
  1015. assert(arr->isMixed());
  1016. assert(arr->hasExactlyOneRef());
  1017. // Use non-specialized release call so ArrayTracer can track its destruction
  1018. arr->release();
  1019. if (debug) {
  1020. iter->arr().setIterType(ArrayIter::TypeUndefined);
  1021. }
  1022. return 0;
  1023. }
  1024. NEVER_INLINE
  1025. static int64_t iter_next_free_apc(Iter* iter, APCLocalArray* arr) {
  1026. assert(arr->hasExactlyOneRef());
  1027. APCLocalArray::Release(arr->asArrayData());
  1028. if (debug) {
  1029. iter->arr().setIterType(ArrayIter::TypeUndefined);
  1030. }
  1031. return 0;
  1032. }
  1033. /*
  1034. * new_iter_array creates an iterator for the specified array iff the
  1035. * array is not empty. If new_iter_array creates an iterator, it does
  1036. * not increment the refcount of the specified array. If
  1037. * new_iter_array does not create an iterator, it decRefs the array.
  1038. */
  1039. template <bool withRef>
  1040. NEVER_INLINE
  1041. int64_t new_iter_array_cold(Iter* dest, ArrayData* arr, TypedValue* valOut,
  1042. TypedValue* keyOut) {
  1043. TRACE(2, "%s: I %p, arr %p\n", __func__, dest, arr);
  1044. if (!withRef) {
  1045. valOut = tvToCell(valOut);
  1046. if (keyOut) keyOut = tvToCell(keyOut);
  1047. }
  1048. if (!arr->empty()) {
  1049. // We are transferring ownership of the array to the iterator, therefore
  1050. // we do not need to adjust the refcount.
  1051. (void) new (&dest->arr()) ArrayIter(arr, ArrayIter::noInc);
  1052. dest->arr().setIterType(ArrayIter::TypeArray);
  1053. iter_value_cell_local_impl<true, withRef>(dest, valOut);
  1054. if (keyOut) {
  1055. iter_key_cell_local_impl<true, withRef>(dest, keyOut);
  1056. }
  1057. return 1LL;
  1058. }
  1059. // We did not transfer ownership of the array to an iterator, so we need
  1060. // to decRef the array.
  1061. decRefArr(arr);
  1062. return 0LL;
  1063. }
  1064. int64_t new_iter_array(Iter* dest, ArrayData* ad, TypedValue* valOut) {
  1065. TRACE(2, "%s: I %p, ad %p\n", __func__, dest, ad);
  1066. if (UNLIKELY(ad->getSize() == 0)) {
  1067. if (UNLIKELY(ad->hasExactlyOneRef())) {
  1068. if (ad->isPacked()) return iter_next_free_packed(dest, ad);
  1069. if (ad->isMixed()) return iter_next_free_mixed(dest, ad);
  1070. if (ad->isStruct()) return iter_next_free_struct(dest, ad);
  1071. }
  1072. ad->decRefCount();
  1073. return 0;
  1074. }
  1075. if (UNLIKELY(IS_REFCOUNTED_TYPE(valOut->m_type))) {
  1076. return new_iter_array_cold<false>(dest, ad, valOut, nullptr);
  1077. }
  1078. // We are transferring ownership of the array to the iterator, therefore
  1079. // we do not need to adjust the refcount.
  1080. auto& aiter = dest->arr();
  1081. aiter.m_data = ad;
  1082. auto const itypeU32 = static_cast<uint32_t>(ArrayIter::TypeArray);
  1083. if (LIKELY(ad->isPacked())) {
  1084. aiter.m_pos = 0;
  1085. aiter.m_itypeAndNextHelperIdx =
  1086. static_cast<uint32_t>(IterNextIndex::ArrayPacked) << 16 | itypeU32;
  1087. assert(aiter.m_itype == ArrayIter::TypeArray);
  1088. assert(aiter.m_nextHelperIdx == IterNextIndex::ArrayPacked);
  1089. cellDup(*tvToCell(packedData(ad)), *valOut);
  1090. return 1;
  1091. }
  1092. if (LIKELY(ad->isMixed())) {
  1093. auto const mixed = MixedArray::asMixed(ad);
  1094. aiter.m_pos = mixed->getIterBegin();
  1095. aiter.m_itypeAndNextHelperIdx =
  1096. static_cast<uint32_t>(IterNextIndex::ArrayMixed) << 16 | itypeU32;
  1097. assert(aiter.m_itype == ArrayIter::TypeArray);
  1098. assert(aiter.m_nextHelperIdx == IterNextIndex::ArrayMixed);
  1099. mixed->getArrayElm(aiter.m_pos, valOut);
  1100. return 1;
  1101. }
  1102. if (ad->isStruct()) {
  1103. aiter.m_pos = 0;
  1104. aiter.m_itypeAndNextHelperIdx =
  1105. static_cast<uint32_t>(IterNextIndex::ArrayStruct) << 16 | itypeU32;
  1106. assert(aiter.m_itype == ArrayIter::TypeArray);
  1107. assert(aiter.m_nextHelperIdx == IterNextIndex::ArrayStruct);
  1108. cellDup(*tvToCell(StructArray::asStructArray(ad)->data()), *valOut);
  1109. return 1;
  1110. }
  1111. return new_iter_array_cold<false>(dest, ad, valOut, nullptr);
  1112. }
  1113. template<bool WithRef>
  1114. int64_t new_iter_array_key(Iter* dest,
  1115. ArrayData* ad,
  1116. TypedValue* valOut,
  1117. TypedValue* keyOut) {
  1118. if (UNLIKELY(ad->getSize() == 0)) {
  1119. if (UNLIKELY(ad->hasExactlyOneRef())) {
  1120. if (ad->isPacked()) return iter_next_free_packed(dest, ad);
  1121. if (ad->isMixed()) return iter_next_free_mixed(dest, ad);
  1122. if (ad->isStruct()) return iter_next_free_struct(dest, ad);
  1123. }
  1124. ad->decRefCount();
  1125. return 0;
  1126. }
  1127. if (UNLIKELY(IS_REFCOUNTED_TYPE(valOut->m_type))) {
  1128. return new_iter_array_cold<WithRef>(dest, ad, valOut, keyOut);
  1129. }
  1130. if (UNLIKELY(IS_REFCOUNTED_TYPE(keyOut->m_type))) {
  1131. return new_iter_array_cold<WithRef>(dest, ad, valOut, keyOut);
  1132. }
  1133. // We are transferring ownership of the array to the iterator, therefore
  1134. // we do not need to adjust the refcount.
  1135. auto& aiter = dest->arr();
  1136. aiter.m_data = ad;
  1137. auto const itypeU32 = static_cast<uint32_t>(ArrayIter::TypeArray);
  1138. if (ad->isPacked()) {
  1139. aiter.m_pos = 0;
  1140. aiter.m_itypeAndNextHelperIdx =
  1141. static_cast<uint32_t>(IterNextIndex::ArrayPacked) << 16 | itypeU32;
  1142. assert(aiter.m_itype == ArrayIter::TypeArray);
  1143. assert(aiter.m_nextHelperIdx == IterNextIndex::ArrayPacked);
  1144. if (WithRef) {
  1145. tvDupWithRef(*packedData(ad), *valOut);
  1146. } else {
  1147. cellDup(*tvToCell(packedData(ad)), *valOut);
  1148. }
  1149. keyOut->m_type = KindOfInt64;
  1150. keyOut->m_data.num = 0;
  1151. return 1;
  1152. }
  1153. if (ad->isMixed()) {
  1154. auto const mixed = MixedArray::asMixed(ad);
  1155. aiter.m_pos = mixed->getIterBegin();
  1156. aiter.m_itypeAndNextHelperIdx =
  1157. static_cast<uint32_t>(IterNextIndex::ArrayMixed) << 16 | itypeU32;
  1158. assert(aiter.m_itype == ArrayIter::TypeArray);
  1159. assert(aiter.m_nextHelperIdx == IterNextIndex::ArrayMixed);
  1160. if (WithRef) {
  1161. mixed->dupArrayElmWithRef(aiter.m_pos, valOut, keyOut);
  1162. } else {
  1163. mixed->getArrayElm(aiter.m_pos, valOut, keyOut);
  1164. }
  1165. return 1;
  1166. }
  1167. if (ad->isStruct()) {
  1168. aiter.m_pos = 0;
  1169. aiter.m_itypeAndNextHelperIdx =
  1170. static_cast<uint32_t>(IterNextIndex::ArrayStruct) << 16 | itypeU32;
  1171. assert(aiter.m_itype == ArrayIter::TypeArray);
  1172. assert(aiter.m_nextHelperIdx == IterNextIndex::ArrayStruct);
  1173. auto structArray = StructArray::asStructArray(ad);
  1174. if (WithRef) {
  1175. tvDupWithRef(*structArray->data(), *valOut);
  1176. } else {
  1177. cellDup(*tvToCell(structArray->data()), *valOut);
  1178. }
  1179. keyOut->m_type = KindOfStaticString;
  1180. keyOut->m_data.pstr = const_cast<StringData*>(
  1181. structArray->shape()->keyForOffset(0));
  1182. return 1;
  1183. }
  1184. return new_iter_array_cold<WithRef>(dest, ad, valOut, keyOut);
  1185. }
  1186. template int64_t new_iter_array_key<false>(Iter* dest, ArrayData* ad,
  1187. TypedValue* valOut,
  1188. TypedValue* keyOut);
  1189. template int64_t new_iter_array_key<true>(Iter* dest, ArrayData* ad,
  1190. TypedValue* valOut,
  1191. TypedValue* keyOut);
  1192. class FreeObj {
  1193. public:
  1194. FreeObj() : m_obj(0) {}
  1195. void operator=(ObjectData* obj) { m_obj = obj; }
  1196. ~FreeObj() { if (UNLIKELY(m_obj != nullptr)) decRefObj(m_obj); }
  1197. private:
  1198. ObjectData* m_obj;
  1199. };
  1200. /**
  1201. * new_iter_object_any creates an iterator for the specified object if the
  1202. * object is iterable and it is non-empty (has properties). If
  1203. * new_iter_object_any creates an iterator, it does not increment the refcount
  1204. * of the specified object. If new_iter_object does not create an iterator,
  1205. * it decRefs the object.
  1206. *
  1207. * If exceptions are thrown, new_iter_object_any takes care of decRefing the
  1208. * object.
  1209. */
  1210. static int64_t new_iter_object_any(Iter* dest, ObjectData* obj, Class* ctx,
  1211. TypedValue* valOut, TypedValue* keyOut) {
  1212. ArrayIter::Type itType;
  1213. {
  1214. FreeObj fo;
  1215. if (obj->isIterator()) {
  1216. TRACE(2, "%s: I %p, obj %p, ctx %p, collection or Iterator\n",
  1217. __func__, dest, obj, ctx);
  1218. (void) new (&dest->arr()) ArrayIter(obj, ArrayIter::noInc);
  1219. itType = ArrayIter::TypeIterator;
  1220. } else {
  1221. bool isIteratorAggregate;
  1222. /*
  1223. * We are not going to transfer ownership of obj to the iterator,
  1224. * so arrange to decRef it later. The actual decRef has to happen
  1225. * after the call to arr().end() below, because both can have visible side
  1226. * effects (calls to __destruct() and valid()). Similarly it has to
  1227. * happen before the iter_*_cell_local_impl calls below, because they call
  1228. * current() and key() (hence the explicit scope around FreeObj fo;)
  1229. */
  1230. fo = obj;
  1231. Object itObj = obj->iterableObject(isIteratorAggregate, false);
  1232. if (isIteratorAggregate) {
  1233. TRACE(2, "%s: I %p, obj %p, ctx %p, IteratorAggregate\n",
  1234. __func__, dest, obj, ctx);
  1235. (void) new (&dest->arr()) ArrayIter(itObj.detach(), ArrayIter::noInc);
  1236. itType = ArrayIter::TypeIterator;
  1237. } else {
  1238. TRACE(2, "%s: I %p, obj %p, ctx %p, iterate as array\n",
  1239. __func__, dest, obj, ctx);
  1240. auto ctxStr = ctx ? ctx->nameStr() : StrNR();
  1241. Array iterArray(itObj->o_toIterArray(ctxStr, ObjectData::EraseRefs));
  1242. ArrayData* ad = iterArray.get();
  1243. (void) new (&dest->arr()) ArrayIter(ad);
  1244. itType = ArrayIter::TypeArray;
  1245. }
  1246. }
  1247. try {
  1248. if (dest->arr().end()) {
  1249. // Iterator was empty; call the destructor on the iterator we just
  1250. // constructed.
  1251. dest->arr().~ArrayIter();
  1252. return 0LL;
  1253. }
  1254. } catch (...) {
  1255. dest->arr().~ArrayIter();
  1256. throw;
  1257. }
  1258. }
  1259. dest->arr().setIterType(itType);
  1260. if (itType == ArrayIter::TypeIterator) {
  1261. iter_value_cell_local_impl<false, false>(dest, valOut);
  1262. if (keyOut) {
  1263. iter_key_cell_local_impl<false, false>(dest, keyOut);
  1264. }
  1265. } else {
  1266. iter_value_cell_local_impl<true, false>(dest, valOut);
  1267. if (keyOut) {
  1268. iter_key_cell_local_impl<true, false>(dest, keyOut);
  1269. }
  1270. }
  1271. return 1LL;
  1272. }
  1273. int64_t new_iter_object(Iter* dest, ObjectData* obj, Class* ctx,
  1274. TypedValue* valOut, TypedValue* keyOut) {
  1275. TRACE(2, "%s: I %p, obj %p, ctx %p, collection or Iterator or Object\n",
  1276. __func__, dest, obj, ctx);
  1277. valOut = tvToCell(valOut);
  1278. if (keyOut) {
  1279. keyOut = tvToCell(keyOut);
  1280. }
  1281. if (obj->isCollection()) {
  1282. auto type = obj->collectionType();
  1283. switch (type) {
  1284. case CollectionType::Vector:
  1285. return iterInit<c_Vector, ArrayIter::Versionable>(
  1286. dest, static_cast<c_Vector*>(obj),
  1287. valOut, keyOut);
  1288. case CollectionType::Map:
  1289. case CollectionType::ImmMap:
  1290. return iterInit<BaseMap, ArrayIter::VersionableSparse>(
  1291. dest,
  1292. static_cast<BaseMap*>(obj),
  1293. valOut, keyOut);
  1294. case CollectionType::Set:
  1295. return iterInit<c_Set, ArrayIter::VersionableSparse>(
  1296. dest,
  1297. static_cast<c_Set*>(obj),
  1298. valOut, keyOut);
  1299. case CollectionType::Pair:
  1300. return iterInit<c_Pair, ArrayIter::Fixed>(
  1301. dest,
  1302. static_cast<c_Pair*>(obj),
  1303. valOut, keyOut);
  1304. case CollectionType::ImmVector:
  1305. return iterInit<c_ImmVector, ArrayIter::Fixed>(
  1306. dest, static_cast<c_ImmVector*>(obj),
  1307. valOut, keyOut);
  1308. case CollectionType::ImmSet:
  1309. return iterInit<c_ImmSet, ArrayIter::VersionableSparse>(
  1310. dest,
  1311. static_cast<c_ImmSet*>(obj),
  1312. valOut, keyOut);
  1313. }
  1314. }
  1315. return new_iter_object_any(dest, obj, ctx, valOut, keyOut);
  1316. not_reached();
  1317. }
  1318. template <bool withRef>
  1319. NEVER_INLINE
  1320. static int64_t iter_next_collection(ArrayIter* ai,
  1321. TypedValue* valOut,
  1322. TypedValue* keyOut,
  1323. CollectionType type) {
  1324. assert(!ai->hasArrayData());
  1325. assert(isValidCollection(type));
  1326. switch (type) {
  1327. case CollectionType::Vector:
  1328. return iterNext<c_Vector, ArrayIter::Versionable>(
  1329. ai, valOut, keyOut);
  1330. case CollectionType::Map:
  1331. case CollectionType::ImmMap:
  1332. return iterNext<BaseMap, ArrayIter::VersionableSparse>(
  1333. ai, valOut, keyOut);
  1334. case CollectionType::Set:
  1335. return iterNext<c_Set, ArrayIter::VersionableSparse>(
  1336. ai, valOut, keyOut);
  1337. case CollectionType::Pair:
  1338. return iterNext<c_Pair, ArrayIter::Fixed>(
  1339. ai, valOut, keyOut);
  1340. case CollectionType::ImmVector:
  1341. return iterNext<c_ImmVector, ArrayIter::Fixed>(
  1342. ai, valOut, keyOut);
  1343. case CollectionType::ImmSet:
  1344. return iterNext<c_ImmSet, ArrayIter::VersionableSparse>(
  1345. ai, valOut, keyOut);
  1346. }
  1347. not_reached();
  1348. }
  1349. template <bool withRef>
  1350. NEVER_INLINE
  1351. int64_t iter_next_cold(Iter* iter, TypedValue* valOut, TypedValue* keyOut) {
  1352. auto const ai = &iter->arr();
  1353. assert(ai->getIterType() == ArrayIter::TypeArray ||
  1354. ai->getIterType() == ArrayIter::TypeIterator);
  1355. if (UNLIKELY(!ai->hasArrayData())) {
  1356. auto obj = ai->getObject();
  1357. if (UNLIKELY(obj->isCollection())) {
  1358. auto const coll = obj->collectionType();
  1359. return iter_next_collection<withRef>(ai, valOut, keyOut, coll);
  1360. }
  1361. }
  1362. ai->next();
  1363. if (ai->end()) {
  1364. // The ArrayIter destructor will decRef the array
  1365. ai->~ArrayIter();
  1366. return 0;
  1367. }
  1368. if (iter->arr().getIterType() == ArrayIter::TypeArray) {
  1369. iter_value_cell_local_impl<true, withRef>(iter, valOut);
  1370. if (keyOut) {
  1371. iter_key_cell_local_impl<true, withRef>(iter, keyOut);
  1372. }
  1373. } else {
  1374. iter_value_cell_local_impl<false, withRef>(iter, valOut);
  1375. if (keyOut) {
  1376. iter_key_cell_local_impl<false, withRef>(iter, keyOut);
  1377. }
  1378. }
  1379. return 1;
  1380. }
  1381. NEVER_INLINE
  1382. static int64_t iter_next_apc_array(Iter* iter,
  1383. TypedValue* valOut,
  1384. TypedValue* keyOut,
  1385. ArrayData* ad) {
  1386. assert(ad->kind() == ArrayData::kApcKind);
  1387. auto const arrIter = &iter->arr();
  1388. auto const arr = APCLocalArray::asApcArray(ad);
  1389. ssize_t const pos = arr->iterAdvanceImpl(arrIter->getPos());
  1390. if (UNLIKELY(pos == ad->getSize())) {
  1391. if (UNLIKELY(arr->hasExactlyOneRef())) {
  1392. return iter_next_free_apc(iter, arr);
  1393. }
  1394. arr->decRefCount();
  1395. if (debug) {
  1396. iter->arr().setIterType(ArrayIter::TypeUndefined);
  1397. }
  1398. return 0;
  1399. }
  1400. arrIter->setPos(pos);
  1401. // Note that APCLocalArray can never return KindOfRefs.
  1402. const Variant& var = APCLocalArray::GetValueRef(arr->asArrayData(), pos);
  1403. assert(var.asTypedValue()->m_type != KindOfRef);
  1404. cellSet(*var.asTypedValue(), *valOut);
  1405. if (LIKELY(!keyOut)) return 1;
  1406. Cell key;
  1407. APCLocalArray::NvGetKey(ad, &key, pos);
  1408. auto const keyType = keyOut->m_type;
  1409. auto const keyDatum = keyOut->m_data.num;
  1410. cellCopy(key, *keyOut);
  1411. tvRefcountedDecRefHelper(keyType, keyDatum);
  1412. return 1;
  1413. }
  1414. int64_t witer_next_key(Iter* iter, TypedValue* valOut, TypedValue* keyOut) {
  1415. TRACE(2, "iter_next_key: I %p\n", iter);
  1416. assert(iter->arr().getIterType() == ArrayIter::TypeArray ||
  1417. iter->arr().getIterType() == ArrayIter::TypeIterator);
  1418. auto const arrIter = &iter->arr();
  1419. if (UNLIKELY(!arrIter->hasArrayData())) {
  1420. goto cold;
  1421. }
  1422. {
  1423. auto const ad = const_cast<ArrayData*>(arrIter->getArrayData());
  1424. auto const isPacked = ad->isPacked();
  1425. auto const isMixed = ad->isMixed();
  1426. auto const isStruct = ad->isStruct();
  1427. if (UNLIKELY(!isMixed && !isStruct && !isPacked)) {
  1428. if (ad->isApcArray()) {
  1429. // TODO(#4055855): what if a local value in an apc array has
  1430. // been turned into a ref? Is this actually ok to do?
  1431. return iter_next_apc_array(iter, valOut, keyOut, ad);
  1432. }
  1433. goto cold;
  1434. }
  1435. if (LIKELY(isPacked)) {
  1436. ssize_t pos = arrIter->getPos() + 1;
  1437. if (size_t(pos) >= size_t(ad->getSize())) {
  1438. if (UNLIKELY(ad->hasExactlyOneRef())) {
  1439. return iter_next_free_packed(iter, ad);
  1440. }
  1441. ad->decRefCount();
  1442. if (debug) {
  1443. iter->arr().setIterType(ArrayIter::TypeUndefined);
  1444. }
  1445. return 0;
  1446. }
  1447. if (UNLIKELY(tvDecRefWillCallHelper(valOut)) ||
  1448. UNLIKELY(tvDecRefWillCallHelper(keyOut))) {
  1449. goto cold;
  1450. }
  1451. tvDecRefOnly(valOut);
  1452. tvDecRefOnly(keyOut);
  1453. arrIter->setPos(pos);
  1454. tvDupWithRef(packedData(ad)[pos], *valOut);
  1455. keyOut->m_type = KindOfInt64;
  1456. keyOut->m_data.num = pos;
  1457. return 1;
  1458. }
  1459. if (isStruct) {
  1460. ssize_t pos = arrIter->getPos() + 1;
  1461. if (size_t(pos) >= size_t(ad->getSize())) {
  1462. if (UNLIKELY(ad->hasExactlyOneRef())) {
  1463. return iter_next_free_struct(iter, ad);
  1464. }
  1465. ad->decRefCount();
  1466. if (debug) {
  1467. iter->arr().setIterType(ArrayIter::TypeUndefined);
  1468. }
  1469. return 0;
  1470. }
  1471. if (UNLIKELY(tvDecRefWillCallHelper(valOut)) ||
  1472. UNLIKELY(tvDecRefWillCallHelper(keyOut))) {
  1473. goto cold;
  1474. }
  1475. tvDecRefOnly(valOut);
  1476. tvDecRefOnly(keyOut);
  1477. auto structArray = StructArray::asStructArray(ad);
  1478. arrIter->setPos(pos);
  1479. tvDupWithRef(structArray->data()[pos], *valOut);
  1480. keyOut->m_type = KindOfStaticString;
  1481. keyOut->m_data.pstr = const_cast<StringData*>(
  1482. structArray->shape()->keyForOffset(pos));
  1483. return 1;
  1484. }
  1485. auto const mixed = MixedArray::asMixed(ad);
  1486. ssize_t pos = arrIter->getPos();
  1487. do {
  1488. ++pos;
  1489. if (size_t(pos) >= size_t(mixed->iterLimit())) {
  1490. if (UNLIKELY(mixed->hasExactlyOneRef())) {
  1491. return iter_next_free_mixed(iter, mixed->asArrayData());
  1492. }
  1493. mixed->decRefCount();
  1494. if (debug) {
  1495. iter->arr().setIterType(ArrayIter::TypeUndefined);
  1496. }
  1497. return 0;
  1498. }
  1499. } while (UNLIKELY(mixed->isTombstone(pos)));
  1500. if (UNLIKELY(tvDecRefWillCallHelper(valOut)) ||
  1501. UNLIKELY(tvDecRefWillCallHelper(keyOut))) {
  1502. goto cold;
  1503. }