PageRenderTime 79ms CodeModel.GetById 22ms RepoModel.GetById 1ms app.codeStats 1ms

/erts/emulator/beam/beam_emu.c

https://github.com/erlang/otp
C | 7154 lines | 5270 code | 818 blank | 1066 comment | 944 complexity | 8fafc27c9f0239013dc4323c88a73dcb MD5 | raw file
Possible License(s): BSD-3-Clause, Apache-2.0, Unlicense, LGPL-2.1

Large files files are truncated, but you can click here to view the full file

  1. /*
  2. * %CopyrightBegin%
  3. *
  4. * Copyright Ericsson AB 1996-2014. All Rights Reserved.
  5. *
  6. * Licensed under the Apache License, Version 2.0 (the "License");
  7. * you may not use this file except in compliance with the License.
  8. * You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. *
  18. * %CopyrightEnd%
  19. */
  20. #ifdef HAVE_CONFIG_H
  21. # include "config.h"
  22. #endif
  23. #include <stddef.h> /* offsetof() */
  24. #include "sys.h"
  25. #include "erl_vm.h"
  26. #include "global.h"
  27. #include "erl_process.h"
  28. #include "error.h"
  29. #include "bif.h"
  30. #include "big.h"
  31. #include "beam_load.h"
  32. #include "erl_binary.h"
  33. #include "erl_map.h"
  34. #include "erl_bits.h"
  35. #include "dist.h"
  36. #include "beam_bp.h"
  37. #include "beam_catches.h"
  38. #include "erl_thr_progress.h"
  39. #ifdef HIPE
  40. #include "hipe_mode_switch.h"
  41. #include "hipe_bif1.h"
  42. #endif
  43. #include "dtrace-wrapper.h"
  44. /* #define HARDDEBUG 1 */
  45. #if defined(NO_JUMP_TABLE)
  46. # define OpCase(OpCode) case op_##OpCode
  47. # define CountCase(OpCode) case op_count_##OpCode
  48. # define OpCode(OpCode) ((Uint*)op_##OpCode)
  49. # define Goto(Rel) {Go = (int)(UWord)(Rel); goto emulator_loop;}
  50. # define LabelAddr(Addr) &&##Addr
  51. #else
  52. # define OpCase(OpCode) lb_##OpCode
  53. # define CountCase(OpCode) lb_count_##OpCode
  54. # define Goto(Rel) goto *((void *)Rel)
  55. # define LabelAddr(Label) &&Label
  56. # define OpCode(OpCode) (&&lb_##OpCode)
  57. #endif
  58. #ifdef ERTS_ENABLE_LOCK_CHECK
  59. # ifdef ERTS_SMP
  60. # define PROCESS_MAIN_CHK_LOCKS(P) \
  61. do { \
  62. if ((P)) { \
  63. erts_proc_lc_chk_only_proc_main((P)); \
  64. } \
  65. else \
  66. erts_lc_check_exact(NULL, 0); \
  67. ERTS_SMP_LC_ASSERT(!erts_thr_progress_is_blocking()); \
  68. } while (0)
  69. # define ERTS_SMP_REQ_PROC_MAIN_LOCK(P) \
  70. if ((P)) erts_proc_lc_require_lock((P), ERTS_PROC_LOCK_MAIN,\
  71. __FILE__, __LINE__)
  72. # define ERTS_SMP_UNREQ_PROC_MAIN_LOCK(P) \
  73. if ((P)) erts_proc_lc_unrequire_lock((P), ERTS_PROC_LOCK_MAIN)
  74. # else
  75. # define ERTS_SMP_REQ_PROC_MAIN_LOCK(P)
  76. # define ERTS_SMP_UNREQ_PROC_MAIN_LOCK(P)
  77. # define PROCESS_MAIN_CHK_LOCKS(P) erts_lc_check_exact(NULL, 0)
  78. # endif
  79. #else
  80. # define PROCESS_MAIN_CHK_LOCKS(P)
  81. # define ERTS_SMP_REQ_PROC_MAIN_LOCK(P)
  82. # define ERTS_SMP_UNREQ_PROC_MAIN_LOCK(P)
  83. #endif
  84. /*
  85. * Define macros for deep checking of terms.
  86. */
  87. #if defined(HARDDEBUG)
  88. # define CHECK_TERM(T) size_object(T)
  89. # define CHECK_ARGS(PC) \
  90. do { \
  91. int i_; \
  92. int Arity_ = PC[-1]; \
  93. if (Arity_ > 0) { \
  94. CHECK_TERM(r(0)); \
  95. } \
  96. for (i_ = 1; i_ < Arity_; i_++) { \
  97. CHECK_TERM(x(i_)); \
  98. } \
  99. } while (0)
  100. #else
  101. # define CHECK_TERM(T) ASSERT(!is_CP(T))
  102. # define CHECK_ARGS(T)
  103. #endif
  104. #ifndef MAX
  105. #define MAX(x, y) (((x) > (y)) ? (x) : (y))
  106. #endif
  107. #define GET_BIF_ADDRESS(p) ((BifFunction) (((Export *) p)->code[4]))
  108. #define TermWords(t) (((t) / (sizeof(BeamInstr)/sizeof(Eterm))) + !!((t) % (sizeof(BeamInstr)/sizeof(Eterm))))
  109. /*
  110. * We reuse some of fields in the save area in the process structure.
  111. * This is safe to do, since this space is only activly used when
  112. * the process is switched out.
  113. */
  114. #define REDS_IN(p) ((p)->def_arg_reg[5])
  115. /*
  116. * Add a byte offset to a pointer to Eterm. This is useful when the
  117. * the loader has precalculated a byte offset.
  118. */
  119. #define ADD_BYTE_OFFSET(ptr, offset) \
  120. ((Eterm *) (((unsigned char *)ptr) + (offset)))
  121. /* We don't check the range if an ordinary switch is used */
  122. #ifdef NO_JUMP_TABLE
  123. #define VALID_INSTR(IP) ((UWord)(IP) < (NUMBER_OF_OPCODES*2+10))
  124. #else
  125. #define VALID_INSTR(IP) \
  126. ((SWord)LabelAddr(emulator_loop) <= (SWord)(IP) && \
  127. (SWord)(IP) < (SWord)LabelAddr(end_emulator_loop))
  128. #endif /* NO_JUMP_TABLE */
  129. #define SET_CP(p, ip) \
  130. ASSERT(VALID_INSTR(*(ip))); \
  131. (p)->cp = (ip)
  132. #define SET_I(ip) \
  133. ASSERT(VALID_INSTR(* (Eterm *)(ip))); \
  134. I = (ip)
  135. #define FetchArgs(S1, S2) tmp_arg1 = (S1); tmp_arg2 = (S2)
  136. /*
  137. * Store a result into a register given a destination descriptor.
  138. */
  139. #define StoreResult(Result, DestDesc) \
  140. do { \
  141. Eterm stb_reg; \
  142. stb_reg = (DestDesc); \
  143. CHECK_TERM(Result); \
  144. switch (beam_reg_tag(stb_reg)) { \
  145. case R_REG_DEF: \
  146. r(0) = (Result); break; \
  147. case X_REG_DEF: \
  148. xb(x_reg_offset(stb_reg)) = (Result); break; \
  149. default: \
  150. yb(y_reg_offset(stb_reg)) = (Result); break; \
  151. } \
  152. } while (0)
  153. #define StoreSimpleDest(Src, Dest) Dest = (Src)
  154. /*
  155. * Store a result into a register and execute the next instruction.
  156. * Dst points to the word with a destination descriptor, which MUST
  157. * be just before the next instruction.
  158. */
  159. #define StoreBifResult(Dst, Result) \
  160. do { \
  161. BeamInstr* stb_next; \
  162. Eterm stb_reg; \
  163. stb_reg = Arg(Dst); \
  164. I += (Dst) + 2; \
  165. stb_next = (BeamInstr *) *I; \
  166. CHECK_TERM(Result); \
  167. switch (beam_reg_tag(stb_reg)) { \
  168. case R_REG_DEF: \
  169. r(0) = (Result); Goto(stb_next); \
  170. case X_REG_DEF: \
  171. xb(x_reg_offset(stb_reg)) = (Result); Goto(stb_next); \
  172. default: \
  173. yb(y_reg_offset(stb_reg)) = (Result); Goto(stb_next); \
  174. } \
  175. } while (0)
  176. #define ClauseFail() goto jump_f
  177. #define SAVE_CP(X) \
  178. do { \
  179. *(X) = make_cp(c_p->cp); \
  180. c_p->cp = 0; \
  181. } while(0)
  182. #define RESTORE_CP(X) SET_CP(c_p, (BeamInstr *) cp_val(*(X)))
  183. #define ISCATCHEND(instr) ((Eterm *) *(instr) == OpCode(catch_end_y))
  184. /*
  185. * Special Beam instructions.
  186. */
  187. BeamInstr beam_apply[2];
  188. BeamInstr beam_exit[1];
  189. BeamInstr beam_continue_exit[1];
  190. BeamInstr* em_call_error_handler;
  191. BeamInstr* em_apply_bif;
  192. BeamInstr* em_call_nif;
  193. /* NOTE These should be the only variables containing trace instructions.
  194. ** Sometimes tests are form the instruction value, and sometimes
  195. ** for the refering variable (one of these), and rouge references
  196. ** will most likely cause chaos.
  197. */
  198. BeamInstr beam_return_to_trace[1]; /* OpCode(i_return_to_trace) */
  199. BeamInstr beam_return_trace[1]; /* OpCode(i_return_trace) */
  200. BeamInstr beam_exception_trace[1]; /* UGLY also OpCode(i_return_trace) */
  201. BeamInstr beam_return_time_trace[1]; /* OpCode(i_return_time_trace) */
  202. /*
  203. * All Beam instructions in numerical order.
  204. */
  205. #ifndef NO_JUMP_TABLE
  206. void** beam_ops;
  207. #endif
  208. #define SWAPIN \
  209. HTOP = HEAP_TOP(c_p); \
  210. E = c_p->stop
  211. #define SWAPOUT \
  212. HEAP_TOP(c_p) = HTOP; \
  213. c_p->stop = E
  214. /*
  215. * Use LIGHT_SWAPOUT when the called function
  216. * will call HeapOnlyAlloc() (and never HAlloc()).
  217. */
  218. #ifdef DEBUG
  219. # /* The stack pointer is used in an assertion. */
  220. # define LIGHT_SWAPOUT SWAPOUT
  221. #else
  222. # define LIGHT_SWAPOUT HEAP_TOP(c_p) = HTOP
  223. #endif
  224. /*
  225. * Use LIGHT_SWAPIN when we know that c_p->stop cannot
  226. * have been updated (i.e. if there cannot have been
  227. * a garbage-collection).
  228. */
  229. #define LIGHT_SWAPIN HTOP = HEAP_TOP(c_p)
  230. #ifdef FORCE_HEAP_FRAGS
  231. # define HEAP_SPACE_VERIFIED(Words) do { \
  232. c_p->space_verified = (Words); \
  233. c_p->space_verified_from = HTOP; \
  234. }while(0)
  235. #else
  236. # define HEAP_SPACE_VERIFIED(Words) ((void)0)
  237. #endif
  238. #define PRE_BIF_SWAPOUT(P) \
  239. HEAP_TOP((P)) = HTOP; \
  240. (P)->stop = E; \
  241. PROCESS_MAIN_CHK_LOCKS((P)); \
  242. ERTS_SMP_UNREQ_PROC_MAIN_LOCK((P))
  243. #define db(N) (N)
  244. #define tb(N) (N)
  245. #define xb(N) (*(Eterm *) (((unsigned char *)reg) + (N)))
  246. #define yb(N) (*(Eterm *) (((unsigned char *)E) + (N)))
  247. #define fb(N) (*(double *) (((unsigned char *)&(freg[0].fd)) + (N)))
  248. #define Qb(N) (N)
  249. #define Ib(N) (N)
  250. #define x(N) reg[N]
  251. #define y(N) E[N]
  252. #define r(N) x##N
  253. /*
  254. * Makes sure that there are StackNeed + HeapNeed + 1 words available
  255. * on the combined heap/stack segment, then allocates StackNeed + 1
  256. * words on the stack and saves CP.
  257. *
  258. * M is number of live registers to preserve during garbage collection
  259. */
  260. #define AH(StackNeed, HeapNeed, M) \
  261. do { \
  262. int needed; \
  263. needed = (StackNeed) + 1; \
  264. if (E - HTOP < (needed + (HeapNeed))) { \
  265. SWAPOUT; \
  266. reg[0] = r(0); \
  267. PROCESS_MAIN_CHK_LOCKS(c_p); \
  268. FCALLS -= erts_garbage_collect(c_p, needed + (HeapNeed), reg, (M)); \
  269. ERTS_VERIFY_UNUSED_TEMP_ALLOC(c_p); \
  270. PROCESS_MAIN_CHK_LOCKS(c_p); \
  271. r(0) = reg[0]; \
  272. SWAPIN; \
  273. } \
  274. E -= needed; \
  275. SAVE_CP(E); \
  276. } while (0)
  277. #define Allocate(Ns, Live) AH(Ns, 0, Live)
  278. #define AllocateZero(Ns, Live) \
  279. do { Eterm* ptr; \
  280. int i = (Ns); \
  281. AH(i, 0, Live); \
  282. for (ptr = E + i; ptr > E; ptr--) { \
  283. make_blank(*ptr); \
  284. } \
  285. } while (0)
  286. #define AllocateHeap(Ns, Nh, Live) AH(Ns, Nh, Live)
  287. #define AllocateHeapZero(Ns, Nh, Live) \
  288. do { Eterm* ptr; \
  289. int i = (Ns); \
  290. AH(i, Nh, Live); \
  291. for (ptr = E + i; ptr > E; ptr--) { \
  292. make_blank(*ptr); \
  293. } \
  294. } while (0)
  295. #define AllocateInit(Ns, Live, Y) \
  296. do { AH(Ns, 0, Live); make_blank(Y); } while (0)
  297. /*
  298. * Like the AH macro, but allocates no additional heap space.
  299. */
  300. #define A(StackNeed, M) AH(StackNeed, 0, M)
  301. #define D(N) \
  302. RESTORE_CP(E); \
  303. E += (N) + 1;
  304. #define TestBinVHeap(VNh, Nh, Live) \
  305. do { \
  306. unsigned need = (Nh); \
  307. if ((E - HTOP < need) || (MSO(c_p).overhead + (VNh) >= BIN_VHEAP_SZ(c_p))) {\
  308. SWAPOUT; \
  309. reg[0] = r(0); \
  310. PROCESS_MAIN_CHK_LOCKS(c_p); \
  311. FCALLS -= erts_garbage_collect(c_p, need, reg, (Live)); \
  312. ERTS_VERIFY_UNUSED_TEMP_ALLOC(c_p); \
  313. PROCESS_MAIN_CHK_LOCKS(c_p); \
  314. r(0) = reg[0]; \
  315. SWAPIN; \
  316. } \
  317. HEAP_SPACE_VERIFIED(need); \
  318. } while (0)
  319. /*
  320. * Check if Nh words of heap are available; if not, do a garbage collection.
  321. * Live is number of active argument registers to be preserved.
  322. */
  323. #define TestHeap(Nh, Live) \
  324. do { \
  325. unsigned need = (Nh); \
  326. if (E - HTOP < need) { \
  327. SWAPOUT; \
  328. reg[0] = r(0); \
  329. PROCESS_MAIN_CHK_LOCKS(c_p); \
  330. FCALLS -= erts_garbage_collect(c_p, need, reg, (Live)); \
  331. ERTS_VERIFY_UNUSED_TEMP_ALLOC(c_p); \
  332. PROCESS_MAIN_CHK_LOCKS(c_p); \
  333. r(0) = reg[0]; \
  334. SWAPIN; \
  335. } \
  336. HEAP_SPACE_VERIFIED(need); \
  337. } while (0)
  338. /*
  339. * Check if Nh words of heap are available; if not, do a garbage collection.
  340. * Live is number of active argument registers to be preserved.
  341. * Takes special care to preserve Extra if a garbage collection occurs.
  342. */
  343. #define TestHeapPreserve(Nh, Live, Extra) \
  344. do { \
  345. unsigned need = (Nh); \
  346. if (E - HTOP < need) { \
  347. SWAPOUT; \
  348. reg[0] = r(0); \
  349. reg[Live] = Extra; \
  350. PROCESS_MAIN_CHK_LOCKS(c_p); \
  351. FCALLS -= erts_garbage_collect(c_p, need, reg, (Live)+1); \
  352. ERTS_VERIFY_UNUSED_TEMP_ALLOC(c_p); \
  353. PROCESS_MAIN_CHK_LOCKS(c_p); \
  354. if (Live > 0) { \
  355. r(0) = reg[0]; \
  356. } \
  357. Extra = reg[Live]; \
  358. SWAPIN; \
  359. } \
  360. HEAP_SPACE_VERIFIED(need); \
  361. } while (0)
  362. #define TestHeapPutList(Need, Reg) \
  363. do { \
  364. TestHeap((Need), 1); \
  365. PutList(Reg, r(0), r(0), StoreSimpleDest); \
  366. CHECK_TERM(r(0)); \
  367. } while (0)
  368. #define Init(N) make_blank(yb(N))
  369. #define Init2(Y1, Y2) do { make_blank(Y1); make_blank(Y2); } while (0)
  370. #define Init3(Y1, Y2, Y3) \
  371. do { make_blank(Y1); make_blank(Y2); make_blank(Y3); } while (0)
  372. #define MakeFun(FunP, NumFree) \
  373. do { \
  374. SWAPOUT; \
  375. reg[0] = r(0); \
  376. r(0) = new_fun(c_p, reg, (ErlFunEntry *) FunP, NumFree); \
  377. SWAPIN; \
  378. } while (0)
  379. #define PutTuple(Dst, Arity) \
  380. do { \
  381. Dst = make_tuple(HTOP); \
  382. pt_arity = (Arity); \
  383. } while (0)
  384. /*
  385. * Check that we haven't used the reductions and jump to function pointed to by
  386. * the I register. If we are out of reductions, do a context switch.
  387. */
  388. #define DispatchMacro() \
  389. do { \
  390. BeamInstr* dis_next; \
  391. dis_next = (BeamInstr *) *I; \
  392. CHECK_ARGS(I); \
  393. if (FCALLS > 0 || FCALLS > neg_o_reds) { \
  394. FCALLS--; \
  395. Goto(dis_next); \
  396. } else { \
  397. goto context_switch; \
  398. } \
  399. } while (0)
  400. #define DispatchMacroFun() \
  401. do { \
  402. BeamInstr* dis_next; \
  403. dis_next = (BeamInstr *) *I; \
  404. CHECK_ARGS(I); \
  405. if (FCALLS > 0 || FCALLS > neg_o_reds) { \
  406. FCALLS--; \
  407. Goto(dis_next); \
  408. } else { \
  409. goto context_switch_fun; \
  410. } \
  411. } while (0)
  412. #define DispatchMacrox() \
  413. do { \
  414. if (FCALLS > 0) { \
  415. Eterm* dis_next; \
  416. SET_I(((Export *) Arg(0))->addressv[erts_active_code_ix()]); \
  417. dis_next = (Eterm *) *I; \
  418. FCALLS--; \
  419. CHECK_ARGS(I); \
  420. Goto(dis_next); \
  421. } else if (ERTS_PROC_GET_SAVED_CALLS_BUF(c_p) \
  422. && FCALLS > neg_o_reds) { \
  423. goto save_calls1; \
  424. } else { \
  425. SET_I(((Export *) Arg(0))->addressv[erts_active_code_ix()]); \
  426. CHECK_ARGS(I); \
  427. goto context_switch; \
  428. } \
  429. } while (0)
  430. #ifdef DEBUG
  431. /*
  432. * To simplify breakpoint setting, put the code in one place only and jump to it.
  433. */
  434. # define Dispatch() goto do_dispatch
  435. # define Dispatchx() goto do_dispatchx
  436. # define Dispatchfun() goto do_dispatchfun
  437. #else
  438. /*
  439. * Inline for speed.
  440. */
  441. # define Dispatch() DispatchMacro()
  442. # define Dispatchx() DispatchMacrox()
  443. # define Dispatchfun() DispatchMacroFun()
  444. #endif
  445. #define Self(R) R = c_p->common.id
  446. #define Node(R) R = erts_this_node->sysname
  447. #define Arg(N) I[(N)+1]
  448. #define Next(N) \
  449. I += (N) + 1; \
  450. ASSERT(VALID_INSTR(*I)); \
  451. Goto(*I)
  452. #define PreFetch(N, Dst) do { Dst = (BeamInstr *) *(I + N + 1); } while (0)
  453. #define NextPF(N, Dst) \
  454. I += N + 1; \
  455. ASSERT(VALID_INSTR(Dst)); \
  456. Goto(Dst)
  457. #define GetR(pos, tr) \
  458. do { \
  459. tr = Arg(pos); \
  460. switch (beam_reg_tag(tr)) { \
  461. case R_REG_DEF: tr = r(0); break; \
  462. case X_REG_DEF: tr = xb(x_reg_offset(tr)); break; \
  463. case Y_REG_DEF: ASSERT(y_reg_offset(tr) >= 1); tr = yb(y_reg_offset(tr)); break; \
  464. } \
  465. CHECK_TERM(tr); \
  466. } while (0)
  467. #define GetArg1(N, Dst) GetR((N), Dst)
  468. #define GetArg2(N, Dst1, Dst2) \
  469. do { \
  470. GetR(N, Dst1); \
  471. GetR((N)+1, Dst2); \
  472. } while (0)
  473. #define PutList(H, T, Dst, Store) \
  474. do { \
  475. HTOP[0] = (H); HTOP[1] = (T); \
  476. Store(make_list(HTOP), Dst); \
  477. HTOP += 2; \
  478. } while (0)
  479. #define Move(Src, Dst, Store) \
  480. do { \
  481. Eterm term = (Src); \
  482. Store(term, Dst); \
  483. } while (0)
  484. #define Move2(S1, D1, S2, D2) D1 = (S1); D2 = (S2)
  485. #define Move3(S1, D1, S2, D2, S3, D3) D1 = (S1); D2 = (S2); D3 = (S3)
  486. #define MoveGenDest(src, dstp) \
  487. if ((dstp) == NULL) { r(0) = (src); } else { *(dstp) = src; }
  488. #define MoveReturn(Src, Dest) \
  489. (Dest) = (Src); \
  490. I = c_p->cp; \
  491. ASSERT(VALID_INSTR(*c_p->cp)); \
  492. c_p->cp = 0; \
  493. CHECK_TERM(r(0)); \
  494. Goto(*I)
  495. #define DeallocateReturn(Deallocate) \
  496. do { \
  497. int words_to_pop = (Deallocate); \
  498. SET_I((BeamInstr *) cp_val(*E)); \
  499. E = ADD_BYTE_OFFSET(E, words_to_pop); \
  500. CHECK_TERM(r(0)); \
  501. Goto(*I); \
  502. } while (0)
  503. #define MoveDeallocateReturn(Src, Dest, Deallocate) \
  504. (Dest) = (Src); \
  505. DeallocateReturn(Deallocate)
  506. #define MoveCall(Src, Dest, CallDest, Size) \
  507. (Dest) = (Src); \
  508. SET_CP(c_p, I+Size+1); \
  509. SET_I((BeamInstr *) CallDest); \
  510. Dispatch();
  511. #define MoveCallLast(Src, Dest, CallDest, Deallocate) \
  512. (Dest) = (Src); \
  513. RESTORE_CP(E); \
  514. E = ADD_BYTE_OFFSET(E, (Deallocate)); \
  515. SET_I((BeamInstr *) CallDest); \
  516. Dispatch();
  517. #define MoveCallOnly(Src, Dest, CallDest) \
  518. (Dest) = (Src); \
  519. SET_I((BeamInstr *) CallDest); \
  520. Dispatch();
  521. #define MoveJump(Src) \
  522. r(0) = (Src); \
  523. SET_I((BeamInstr *) Arg(0)); \
  524. Goto(*I);
  525. #define GetList(Src, H, T) do { \
  526. Eterm* tmp_ptr = list_val(Src); \
  527. H = CAR(tmp_ptr); \
  528. T = CDR(tmp_ptr); } while (0)
  529. #define GetTupleElement(Src, Element, Dest) \
  530. do { \
  531. tmp_arg1 = (Eterm) COMPRESS_POINTER(((unsigned char *) tuple_val(Src)) + \
  532. (Element)); \
  533. (Dest) = (*(Eterm *) EXPAND_POINTER(tmp_arg1)); \
  534. } while (0)
  535. #define ExtractNextElement(Dest) \
  536. tmp_arg1 += sizeof(Eterm); \
  537. (Dest) = (* (Eterm *) (((unsigned char *) EXPAND_POINTER(tmp_arg1))))
  538. #define ExtractNextElement2(Dest) \
  539. do { \
  540. Eterm* ene_dstp = &(Dest); \
  541. ene_dstp[0] = ((Eterm *) EXPAND_POINTER(tmp_arg1))[1]; \
  542. ene_dstp[1] = ((Eterm *) EXPAND_POINTER(tmp_arg1))[2]; \
  543. tmp_arg1 += sizeof(Eterm) + sizeof(Eterm); \
  544. } while (0)
  545. #define ExtractNextElement3(Dest) \
  546. do { \
  547. Eterm* ene_dstp = &(Dest); \
  548. ene_dstp[0] = ((Eterm *) EXPAND_POINTER(tmp_arg1))[1]; \
  549. ene_dstp[1] = ((Eterm *) EXPAND_POINTER(tmp_arg1))[2]; \
  550. ene_dstp[2] = ((Eterm *) EXPAND_POINTER(tmp_arg1))[3]; \
  551. tmp_arg1 += 3*sizeof(Eterm); \
  552. } while (0)
  553. #define ExtractNextElement4(Dest) \
  554. do { \
  555. Eterm* ene_dstp = &(Dest); \
  556. ene_dstp[0] = ((Eterm *) EXPAND_POINTER(tmp_arg1))[1]; \
  557. ene_dstp[1] = ((Eterm *) EXPAND_POINTER(tmp_arg1))[2]; \
  558. ene_dstp[2] = ((Eterm *) EXPAND_POINTER(tmp_arg1))[3]; \
  559. ene_dstp[3] = ((Eterm *) EXPAND_POINTER(tmp_arg1))[4]; \
  560. tmp_arg1 += 4*sizeof(Eterm); \
  561. } while (0)
  562. #define ExtractElement(Element, Dest) \
  563. do { \
  564. tmp_arg1 += (Element); \
  565. (Dest) = (* (Eterm *) EXPAND_POINTER(tmp_arg1)); \
  566. } while (0)
  567. #define EqualImmed(X, Y, Action) if (X != Y) { Action; }
  568. #define NotEqualImmed(X, Y, Action) if (X == Y) { Action; }
  569. #define EqualExact(X, Y, Action) if (!EQ(X,Y)) { Action; }
  570. #define IsLessThan(X, Y, Action) if (CMP_GE(X, Y)) { Action; }
  571. #define IsGreaterEqual(X, Y, Action) if (CMP_LT(X, Y)) { Action; }
  572. #define IsFloat(Src, Fail) if (is_not_float(Src)) { Fail; }
  573. #define IsInteger(Src, Fail) if (is_not_integer(Src)) { Fail; }
  574. #define IsNumber(X, Fail) if (is_not_integer(X) && is_not_float(X)) { Fail; }
  575. #define IsAtom(Src, Fail) if (is_not_atom(Src)) { Fail; }
  576. #define IsIntegerAllocate(Src, Need, Alive, Fail) \
  577. if (is_not_integer(Src)) { Fail; } \
  578. A(Need, Alive)
  579. #define IsNil(Src, Fail) if (is_not_nil(Src)) { Fail; }
  580. #define IsList(Src, Fail) if (is_not_list(Src) && is_not_nil(Src)) { Fail; }
  581. #define IsNonemptyList(Src, Fail) if (is_not_list(Src)) { Fail; }
  582. #define IsNonemptyListAllocate(Src, Need, Alive, Fail) \
  583. if (is_not_list(Src)) { Fail; } \
  584. A(Need, Alive)
  585. #define IsNonemptyListTestHeap(Src, Need, Alive, Fail) \
  586. if (is_not_list(Src)) { Fail; } \
  587. TestHeap(Need, Alive)
  588. #define IsTuple(X, Action) if (is_not_tuple(X)) Action
  589. #define IsArity(Pointer, Arity, Fail) \
  590. if (*(Eterm *) \
  591. EXPAND_POINTER(tmp_arg1 = (Eterm) \
  592. COMPRESS_POINTER(tuple_val(Pointer))) != (Arity)) \
  593. { \
  594. Fail; \
  595. }
  596. #define IsMap(Src, Fail) if (!is_map(Src)) { Fail; }
  597. #define GetMapElement(Src, Key, Dst, Fail) \
  598. do { \
  599. Eterm _res = get_map_element(Src, Key); \
  600. if (is_non_value(_res)) { \
  601. Fail; \
  602. } \
  603. Dst = _res; \
  604. } while (0)
  605. #define GetMapElementHash(Src, Key, Hx, Dst, Fail) \
  606. do { \
  607. Eterm _res = get_map_element_hash(Src, Key, Hx); \
  608. if (is_non_value(_res)) { \
  609. Fail; \
  610. } \
  611. Dst = _res; \
  612. } while (0)
  613. #define IsFunction(X, Action) \
  614. do { \
  615. if ( !(is_any_fun(X)) ) { \
  616. Action; \
  617. } \
  618. } while (0)
  619. #define IsFunction2(F, A, Action) \
  620. do { \
  621. if (erl_is_function(c_p, F, A) != am_true ) { \
  622. Action; \
  623. } \
  624. } while (0)
  625. #define IsTupleOfArity(Src, Arity, Fail) \
  626. do { \
  627. if (is_not_tuple(Src) || \
  628. *(Eterm *) \
  629. EXPAND_POINTER(tmp_arg1 = \
  630. (Eterm) COMPRESS_POINTER(tuple_val(Src))) != Arity) { \
  631. Fail; \
  632. } \
  633. } while (0)
  634. #define IsBoolean(X, Fail) if ((X) != am_true && (X) != am_false) { Fail; }
  635. #define IsBinary(Src, Fail) \
  636. if (is_not_binary(Src) || binary_bitsize(Src) != 0) { Fail; }
  637. #define IsBitstring(Src, Fail) \
  638. if (is_not_binary(Src)) { Fail; }
  639. #if defined(ARCH_64) && !HALFWORD_HEAP
  640. #define BsSafeMul(A, B, Fail, Target) \
  641. do { Uint64 _res = (A) * (B); \
  642. if (_res / B != A) { Fail; } \
  643. Target = _res; \
  644. } while (0)
  645. #else
  646. #define BsSafeMul(A, B, Fail, Target) \
  647. do { Uint64 _res = (Uint64)(A) * (Uint64)(B); \
  648. if ((_res >> (8*sizeof(Uint))) != 0) { Fail; } \
  649. Target = _res; \
  650. } while (0)
  651. #endif
  652. #define BsGetFieldSize(Bits, Unit, Fail, Target) \
  653. do { \
  654. Sint _signed_size; Uint _uint_size; \
  655. if (is_small(Bits)) { \
  656. _signed_size = signed_val(Bits); \
  657. if (_signed_size < 0) { Fail; } \
  658. _uint_size = (Uint) _signed_size; \
  659. } else { \
  660. if (!term_to_Uint(Bits, &temp_bits)) { Fail; } \
  661. _uint_size = temp_bits; \
  662. } \
  663. BsSafeMul(_uint_size, Unit, Fail, Target); \
  664. } while (0)
  665. #define BsGetUncheckedFieldSize(Bits, Unit, Fail, Target) \
  666. do { \
  667. Sint _signed_size; Uint _uint_size; \
  668. if (is_small(Bits)) { \
  669. _signed_size = signed_val(Bits); \
  670. if (_signed_size < 0) { Fail; } \
  671. _uint_size = (Uint) _signed_size; \
  672. } else { \
  673. if (!term_to_Uint(Bits, &temp_bits)) { Fail; } \
  674. _uint_size = (Uint) temp_bits; \
  675. } \
  676. Target = _uint_size * Unit; \
  677. } while (0)
  678. #define BsGetFloat2(Ms, Live, Sz, Flags, Dst, Store, Fail) \
  679. do { \
  680. ErlBinMatchBuffer *_mb; \
  681. Eterm _result; Sint _size; \
  682. if (!is_small(Sz) || (_size = unsigned_val(Sz)) > 64) { Fail; } \
  683. _size *= ((Flags) >> 3); \
  684. TestHeap(FLOAT_SIZE_OBJECT, Live); \
  685. _mb = ms_matchbuffer(Ms); \
  686. LIGHT_SWAPOUT; \
  687. _result = erts_bs_get_float_2(c_p, _size, (Flags), _mb); \
  688. LIGHT_SWAPIN; \
  689. HEAP_SPACE_VERIFIED(0); \
  690. if (is_non_value(_result)) { Fail; } \
  691. else { Store(_result, Dst); } \
  692. } while (0)
  693. #define BsGetBinaryImm_2(Ms, Live, Sz, Flags, Dst, Store, Fail) \
  694. do { \
  695. ErlBinMatchBuffer *_mb; \
  696. Eterm _result; \
  697. TestHeap(heap_bin_size(ERL_ONHEAP_BIN_LIMIT), Live); \
  698. _mb = ms_matchbuffer(Ms); \
  699. LIGHT_SWAPOUT; \
  700. _result = erts_bs_get_binary_2(c_p, (Sz), (Flags), _mb); \
  701. LIGHT_SWAPIN; \
  702. HEAP_SPACE_VERIFIED(0); \
  703. if (is_non_value(_result)) { Fail; } \
  704. else { Store(_result, Dst); } \
  705. } while (0)
  706. #define BsGetBinary_2(Ms, Live, Sz, Flags, Dst, Store, Fail) \
  707. do { \
  708. ErlBinMatchBuffer *_mb; \
  709. Eterm _result; Uint _size; \
  710. BsGetFieldSize(Sz, ((Flags) >> 3), Fail, _size); \
  711. TestHeap(ERL_SUB_BIN_SIZE, Live); \
  712. _mb = ms_matchbuffer(Ms); \
  713. LIGHT_SWAPOUT; \
  714. _result = erts_bs_get_binary_2(c_p, _size, (Flags), _mb); \
  715. LIGHT_SWAPIN; \
  716. HEAP_SPACE_VERIFIED(0); \
  717. if (is_non_value(_result)) { Fail; } \
  718. else { Store(_result, Dst); } \
  719. } while (0)
  720. #define BsGetBinaryAll_2(Ms, Live, Unit, Dst, Store, Fail) \
  721. do { \
  722. ErlBinMatchBuffer *_mb; \
  723. Eterm _result; \
  724. TestHeap(ERL_SUB_BIN_SIZE, Live); \
  725. _mb = ms_matchbuffer(Ms); \
  726. if (((_mb->size - _mb->offset) % Unit) == 0) { \
  727. LIGHT_SWAPOUT; \
  728. _result = erts_bs_get_binary_all_2(c_p, _mb); \
  729. LIGHT_SWAPIN; \
  730. HEAP_SPACE_VERIFIED(0); \
  731. ASSERT(is_value(_result)); \
  732. Store(_result, Dst); \
  733. } else { \
  734. HEAP_SPACE_VERIFIED(0); \
  735. Fail; } \
  736. } while (0)
  737. #define BsSkipBits2(Ms, Bits, Unit, Fail) \
  738. do { \
  739. ErlBinMatchBuffer *_mb; \
  740. size_t new_offset; \
  741. Uint _size; \
  742. _mb = ms_matchbuffer(Ms); \
  743. BsGetFieldSize(Bits, Unit, Fail, _size); \
  744. new_offset = _mb->offset + _size; \
  745. if (new_offset <= _mb->size) { _mb->offset = new_offset; } \
  746. else { Fail; } \
  747. } while (0)
  748. #define BsSkipBitsAll2(Ms, Unit, Fail) \
  749. do { \
  750. ErlBinMatchBuffer *_mb; \
  751. _mb = ms_matchbuffer(Ms); \
  752. if (((_mb->size - _mb->offset) % Unit) == 0) {_mb->offset = _mb->size; } \
  753. else { Fail; } \
  754. } while (0)
  755. #define BsSkipBitsImm2(Ms, Bits, Fail) \
  756. do { \
  757. ErlBinMatchBuffer *_mb; \
  758. size_t new_offset; \
  759. _mb = ms_matchbuffer(Ms); \
  760. new_offset = _mb->offset + (Bits); \
  761. if (new_offset <= _mb->size) { _mb->offset = new_offset; } \
  762. else { Fail; } \
  763. } while (0)
  764. #define NewBsPutIntegerImm(Sz, Flags, Src) \
  765. do { \
  766. if (!erts_new_bs_put_integer(ERL_BITS_ARGS_3((Src), (Sz), (Flags)))) { goto badarg; } \
  767. } while (0)
  768. #define NewBsPutInteger(Sz, Flags, Src) \
  769. do { \
  770. Sint _size; \
  771. BsGetUncheckedFieldSize(Sz, ((Flags) >> 3), goto badarg, _size); \
  772. if (!erts_new_bs_put_integer(ERL_BITS_ARGS_3((Src), _size, (Flags)))) \
  773. { goto badarg; } \
  774. } while (0)
  775. #define NewBsPutFloatImm(Sz, Flags, Src) \
  776. do { \
  777. if (!erts_new_bs_put_float(c_p, (Src), (Sz), (Flags))) { goto badarg; } \
  778. } while (0)
  779. #define NewBsPutFloat(Sz, Flags, Src) \
  780. do { \
  781. Sint _size; \
  782. BsGetUncheckedFieldSize(Sz, ((Flags) >> 3), goto badarg, _size); \
  783. if (!erts_new_bs_put_float(c_p, (Src), _size, (Flags))) { goto badarg; } \
  784. } while (0)
  785. #define NewBsPutBinary(Sz, Flags, Src) \
  786. do { \
  787. Sint _size; \
  788. BsGetUncheckedFieldSize(Sz, ((Flags) >> 3), goto badarg, _size); \
  789. if (!erts_new_bs_put_binary(ERL_BITS_ARGS_2((Src), _size))) { goto badarg; } \
  790. } while (0)
  791. #define NewBsPutBinaryImm(Sz, Src) \
  792. do { \
  793. if (!erts_new_bs_put_binary(ERL_BITS_ARGS_2((Src), (Sz)))) { goto badarg; } \
  794. } while (0)
  795. #define NewBsPutBinaryAll(Src, Unit) \
  796. do { \
  797. if (!erts_new_bs_put_binary_all(ERL_BITS_ARGS_2((Src), (Unit)))) { goto badarg; } \
  798. } while (0)
  799. #define IsPort(Src, Fail) if (is_not_port(Src)) { Fail; }
  800. #define IsPid(Src, Fail) if (is_not_pid(Src)) { Fail; }
  801. #define IsRef(Src, Fail) if (is_not_ref(Src)) { Fail; }
  802. /*
  803. * process_main() is already huge, so we want to avoid inlining
  804. * into it. Especially functions that are seldom used.
  805. */
  806. #ifdef __GNUC__
  807. # define NOINLINE __attribute__((__noinline__))
  808. #else
  809. # define NOINLINE
  810. #endif
  811. /*
  812. * The following functions are called directly by process_main().
  813. * Don't inline them.
  814. */
  815. static BifFunction translate_gc_bif(void* gcf) NOINLINE;
  816. static BeamInstr* handle_error(Process* c_p, BeamInstr* pc,
  817. Eterm* reg, BifFunction bf) NOINLINE;
  818. static BeamInstr* call_error_handler(Process* p, BeamInstr* ip,
  819. Eterm* reg, Eterm func) NOINLINE;
  820. static BeamInstr* fixed_apply(Process* p, Eterm* reg, Uint arity) NOINLINE;
  821. static BeamInstr* apply(Process* p, Eterm module, Eterm function,
  822. Eterm args, Eterm* reg) NOINLINE;
  823. static BeamInstr* call_fun(Process* p, int arity,
  824. Eterm* reg, Eterm args) NOINLINE;
  825. static BeamInstr* apply_fun(Process* p, Eterm fun,
  826. Eterm args, Eterm* reg) NOINLINE;
  827. static Eterm new_fun(Process* p, Eterm* reg,
  828. ErlFunEntry* fe, int num_free) NOINLINE;
  829. static Eterm new_map(Process* p, Eterm* reg, BeamInstr* I) NOINLINE;
  830. static Eterm update_map_assoc(Process* p, Eterm* reg,
  831. Eterm map, BeamInstr* I) NOINLINE;
  832. static Eterm update_map_exact(Process* p, Eterm* reg,
  833. Eterm map, BeamInstr* I) NOINLINE;
  834. static Eterm get_map_element(Eterm map, Eterm key);
  835. static Eterm get_map_element_hash(Eterm map, Eterm key, Uint32 hx);
  836. /*
  837. * Functions not directly called by process_main(). OK to inline.
  838. */
  839. static BeamInstr* next_catch(Process* c_p, Eterm *reg);
  840. static void terminate_proc(Process* c_p, Eterm Value);
  841. static Eterm add_stacktrace(Process* c_p, Eterm Value, Eterm exc);
  842. static void save_stacktrace(Process* c_p, BeamInstr* pc, Eterm* reg,
  843. BifFunction bf, Eterm args);
  844. static struct StackTrace * get_trace_from_exc(Eterm exc);
  845. static Eterm make_arglist(Process* c_p, Eterm* reg, int a);
  846. void
  847. init_emulator(void)
  848. {
  849. process_main();
  850. }
  851. /*
  852. * On certain platforms, make sure that the main variables really are placed
  853. * in registers.
  854. */
  855. #if defined(__GNUC__) && defined(sparc) && !defined(DEBUG)
  856. # define REG_x0 asm("%l0")
  857. # define REG_xregs asm("%l1")
  858. # define REG_htop asm("%l2")
  859. # define REG_stop asm("%l3")
  860. # define REG_I asm("%l4")
  861. # define REG_fcalls asm("%l5")
  862. # define REG_tmp_arg1 asm("%l6")
  863. # define REG_tmp_arg2 asm("%l7")
  864. #else
  865. # define REG_x0
  866. # define REG_xregs
  867. # define REG_htop
  868. # define REG_stop
  869. # define REG_I
  870. # define REG_fcalls
  871. # define REG_tmp_arg1
  872. # define REG_tmp_arg2
  873. #endif
  874. #ifdef USE_VM_PROBES
  875. # define USE_VM_CALL_PROBES
  876. #endif
  877. #ifdef USE_VM_CALL_PROBES
  878. #define DTRACE_LOCAL_CALL(p, m, f, a) \
  879. if (DTRACE_ENABLED(local_function_entry)) { \
  880. DTRACE_CHARBUF(process_name, DTRACE_TERM_BUF_SIZE); \
  881. DTRACE_CHARBUF(mfa, DTRACE_TERM_BUF_SIZE); \
  882. int depth = STACK_START(p) - STACK_TOP(p); \
  883. dtrace_fun_decode(p, m, f, a, \
  884. process_name, mfa); \
  885. DTRACE3(local_function_entry, process_name, mfa, depth); \
  886. }
  887. #define DTRACE_GLOBAL_CALL(p, m, f, a) \
  888. if (DTRACE_ENABLED(global_function_entry)) { \
  889. DTRACE_CHARBUF(process_name, DTRACE_TERM_BUF_SIZE); \
  890. DTRACE_CHARBUF(mfa, DTRACE_TERM_BUF_SIZE); \
  891. int depth = STACK_START(p) - STACK_TOP(p); \
  892. dtrace_fun_decode(p, m, f, a, \
  893. process_name, mfa); \
  894. DTRACE3(global_function_entry, process_name, mfa, depth); \
  895. }
  896. #define DTRACE_RETURN(p, m, f, a) \
  897. if (DTRACE_ENABLED(function_return)) { \
  898. DTRACE_CHARBUF(process_name, DTRACE_TERM_BUF_SIZE); \
  899. DTRACE_CHARBUF(mfa, DTRACE_TERM_BUF_SIZE); \
  900. int depth = STACK_START(p) - STACK_TOP(p); \
  901. dtrace_fun_decode(p, m, f, a, \
  902. process_name, mfa); \
  903. DTRACE3(function_return, process_name, mfa, depth); \
  904. }
  905. #define DTRACE_BIF_ENTRY(p, m, f, a) \
  906. if (DTRACE_ENABLED(bif_entry)) { \
  907. DTRACE_CHARBUF(process_name, DTRACE_TERM_BUF_SIZE); \
  908. DTRACE_CHARBUF(mfa, DTRACE_TERM_BUF_SIZE); \
  909. dtrace_fun_decode(p, m, f, a, \
  910. process_name, mfa); \
  911. DTRACE2(bif_entry, process_name, mfa); \
  912. }
  913. #define DTRACE_BIF_RETURN(p, m, f, a) \
  914. if (DTRACE_ENABLED(bif_return)) { \
  915. DTRACE_CHARBUF(process_name, DTRACE_TERM_BUF_SIZE); \
  916. DTRACE_CHARBUF(mfa, DTRACE_TERM_BUF_SIZE); \
  917. dtrace_fun_decode(p, m, f, a, \
  918. process_name, mfa); \
  919. DTRACE2(bif_return, process_name, mfa); \
  920. }
  921. #define DTRACE_NIF_ENTRY(p, m, f, a) \
  922. if (DTRACE_ENABLED(nif_entry)) { \
  923. DTRACE_CHARBUF(process_name, DTRACE_TERM_BUF_SIZE); \
  924. DTRACE_CHARBUF(mfa, DTRACE_TERM_BUF_SIZE); \
  925. dtrace_fun_decode(p, m, f, a, \
  926. process_name, mfa); \
  927. DTRACE2(nif_entry, process_name, mfa); \
  928. }
  929. #define DTRACE_NIF_RETURN(p, m, f, a) \
  930. if (DTRACE_ENABLED(nif_return)) { \
  931. DTRACE_CHARBUF(process_name, DTRACE_TERM_BUF_SIZE); \
  932. DTRACE_CHARBUF(mfa, DTRACE_TERM_BUF_SIZE); \
  933. dtrace_fun_decode(p, m, f, a, \
  934. process_name, mfa); \
  935. DTRACE2(nif_return, process_name, mfa); \
  936. }
  937. #define DTRACE_GLOBAL_CALL_FROM_EXPORT(p,e) \
  938. do { \
  939. if (DTRACE_ENABLED(global_function_entry)) { \
  940. BeamInstr* fp = (BeamInstr *) (((Export *) (e))->addressv[erts_active_code_ix()]); \
  941. DTRACE_GLOBAL_CALL((p), (Eterm)fp[-3], (Eterm)fp[-2], fp[-1]); \
  942. } \
  943. } while(0)
  944. #define DTRACE_RETURN_FROM_PC(p) \
  945. do { \
  946. BeamInstr* fp; \
  947. if (DTRACE_ENABLED(function_return) && (fp = find_function_from_pc((p)->cp))) { \
  948. DTRACE_RETURN((p), (Eterm)fp[0], (Eterm)fp[1], (Uint)fp[2]); \
  949. } \
  950. } while(0)
  951. #else /* USE_VM_PROBES */
  952. #define DTRACE_LOCAL_CALL(p, m, f, a) do {} while (0)
  953. #define DTRACE_GLOBAL_CALL(p, m, f, a) do {} while (0)
  954. #define DTRACE_GLOBAL_CALL_FROM_EXPORT(p, e) do {} while (0)
  955. #define DTRACE_RETURN(p, m, f, a) do {} while (0)
  956. #define DTRACE_RETURN_FROM_PC(p) do {} while (0)
  957. #define DTRACE_BIF_ENTRY(p, m, f, a) do {} while (0)
  958. #define DTRACE_BIF_RETURN(p, m, f, a) do {} while (0)
  959. #define DTRACE_NIF_ENTRY(p, m, f, a) do {} while (0)
  960. #define DTRACE_NIF_RETURN(p, m, f, a) do {} while (0)
  961. #endif /* USE_VM_PROBES */
  962. /*
  963. * process_main() is called twice:
  964. * The first call performs some initialisation, including exporting
  965. * the instructions' C labels to the loader.
  966. * The second call starts execution of BEAM code. This call never returns.
  967. */
  968. void process_main(void)
  969. {
  970. static int init_done = 0;
  971. Process* c_p = NULL;
  972. int reds_used;
  973. #ifdef DEBUG
  974. ERTS_DECLARE_DUMMY(Eterm pid);
  975. #endif
  976. /*
  977. * X register zero; also called r(0)
  978. */
  979. register Eterm x0 REG_x0 = NIL;
  980. /* Pointer to X registers: x(1)..x(N); reg[0] is used when doing GC,
  981. * in all other cases x0 is used.
  982. */
  983. register Eterm* reg REG_xregs = NULL;
  984. /*
  985. * Top of heap (next free location); grows upwards.
  986. */
  987. register Eterm* HTOP REG_htop = NULL;
  988. /* Stack pointer. Grows downwards; points
  989. * to last item pushed (normally a saved
  990. * continuation pointer).
  991. */
  992. register Eterm* E REG_stop = NULL;
  993. /*
  994. * Pointer to next threaded instruction.
  995. */
  996. register BeamInstr *I REG_I = NULL;
  997. /* Number of reductions left. This function
  998. * returns to the scheduler when FCALLS reaches zero.
  999. */
  1000. register Sint FCALLS REG_fcalls = 0;
  1001. /*
  1002. * Temporaries used for picking up arguments for instructions.
  1003. */
  1004. register Eterm tmp_arg1 REG_tmp_arg1 = NIL;
  1005. register Eterm tmp_arg2 REG_tmp_arg2 = NIL;
  1006. #if HEAP_ON_C_STACK
  1007. Eterm tmp_big[2]; /* Temporary buffer for small bignums if HEAP_ON_C_STACK. */
  1008. #else
  1009. Eterm *tmp_big; /* Temporary buffer for small bignums if !HEAP_ON_C_STACK. */
  1010. #endif
  1011. /*
  1012. * X registers and floating point registers are located in
  1013. * scheduler specific data.
  1014. */
  1015. register FloatDef *freg;
  1016. /*
  1017. * For keeping the negative old value of 'reds' when call saving is active.
  1018. */
  1019. int neg_o_reds = 0;
  1020. Eterm (*arith_func)(Process* p, Eterm* reg, Uint live);
  1021. #ifdef ERTS_OPCODE_COUNTER_SUPPORT
  1022. static void* counting_opcodes[] = { DEFINE_COUNTING_OPCODES };
  1023. #else
  1024. #ifndef NO_JUMP_TABLE
  1025. static void* opcodes[] = { DEFINE_OPCODES };
  1026. #else
  1027. int Go;
  1028. #endif
  1029. #endif
  1030. Uint temp_bits; /* Temporary used by BsSkipBits2 & BsGetInteger2 */
  1031. Eterm pt_arity; /* Used by do_put_tuple */
  1032. Uint64 start_time = 0; /* Monitor long schedule */
  1033. BeamInstr* start_time_i = NULL;
  1034. ERL_BITS_DECLARE_STATEP; /* Has to be last declaration */
  1035. /*
  1036. * Note: In this function, we attempt to place rarely executed code towards
  1037. * the end of the function, in the hope that the cache hit rate will be better.
  1038. * The initialization code is only run once, so it is at the very end.
  1039. *
  1040. * Note: c_p->arity must be set to reflect the number of useful terms in
  1041. * c_p->arg_reg before calling the scheduler.
  1042. */
  1043. if (!init_done) {
  1044. /* This should only be reached during the init phase when only the main
  1045. * process is running. I.e. there is no race for init_done.
  1046. */
  1047. init_done = 1;
  1048. goto init_emulator;
  1049. }
  1050. c_p = NULL;
  1051. reds_used = 0;
  1052. goto do_schedule1;
  1053. do_schedule:
  1054. reds_used = REDS_IN(c_p) - FCALLS;
  1055. do_schedule1:
  1056. if (start_time != 0) {
  1057. Sint64 diff = erts_timestamp_millis() - start_time;
  1058. if (diff > 0 && (Uint) diff > erts_system_monitor_long_schedule
  1059. #ifdef ERTS_DIRTY_SCHEDULERS
  1060. && !ERTS_SCHEDULER_IS_DIRTY(c_p->scheduler_data)
  1061. #endif
  1062. ) {
  1063. BeamInstr *inptr = find_function_from_pc(start_time_i);
  1064. BeamInstr *outptr = find_function_from_pc(c_p->i);
  1065. monitor_long_schedule_proc(c_p,inptr,outptr,(Uint) diff);
  1066. }
  1067. }
  1068. PROCESS_MAIN_CHK_LOCKS(c_p);
  1069. ERTS_SMP_UNREQ_PROC_MAIN_LOCK(c_p);
  1070. #if HALFWORD_HEAP
  1071. ASSERT(erts_get_scheduler_data()->num_tmp_heap_used == 0);
  1072. #endif
  1073. ERTS_VERIFY_UNUSED_TEMP_ALLOC(c_p);
  1074. c_p = schedule(c_p, reds_used);
  1075. ERTS_VERIFY_UNUSED_TEMP_ALLOC(c_p);
  1076. start_time = 0;
  1077. #ifdef DEBUG
  1078. pid = c_p->common.id; /* Save for debugging purpouses */
  1079. #endif
  1080. ERTS_SMP_REQ_PROC_MAIN_LOCK(c_p);
  1081. PROCESS_MAIN_CHK_LOCKS(c_p);
  1082. if (erts_system_monitor_long_schedule != 0) {
  1083. start_time = erts_timestamp_millis();
  1084. start_time_i = c_p->i;
  1085. }
  1086. reg = ERTS_PROC_GET_SCHDATA(c_p)->x_reg_array;
  1087. freg = ERTS_PROC_GET_SCHDATA(c_p)->f_reg_array;
  1088. #if !HEAP_ON_C_STACK
  1089. tmp_big = ERTS_PROC_GET_SCHDATA(c_p)->beam_emu_tmp_heap;
  1090. #endif
  1091. ERL_BITS_RELOAD_STATEP(c_p);
  1092. {
  1093. int reds;
  1094. Eterm* argp;
  1095. BeamInstr *next;
  1096. int i;
  1097. argp = c_p->arg_reg;
  1098. for (i = c_p->arity - 1; i > 0; i--) {
  1099. reg[i] = argp[i];
  1100. CHECK_TERM(reg[i]);
  1101. }
  1102. /*
  1103. * We put the original reduction count in the process structure, to reduce
  1104. * the code size (referencing a field in a struct through a pointer stored
  1105. * in a register gives smaller code than referencing a global variable).
  1106. */
  1107. SET_I(c_p->i);
  1108. reds = c_p->fcalls;
  1109. if (ERTS_PROC_GET_SAVED_CALLS_BUF(c_p)
  1110. && (ERTS_TRACE_FLAGS(c_p) & F_SENSITIVE) == 0) {
  1111. neg_o_reds = -reds;
  1112. FCALLS = REDS_IN(c_p) = 0;
  1113. } else {
  1114. neg_o_reds = 0;
  1115. FCALLS = REDS_IN(c_p) = reds;
  1116. }
  1117. next = (BeamInstr *) *I;
  1118. r(0) = c_p->arg_reg[0];
  1119. #ifdef HARDDEBUG
  1120. if (c_p->arity > 0) {
  1121. CHECK_TERM(r(0));
  1122. }
  1123. #endif
  1124. SWAPIN;
  1125. ASSERT(VALID_INSTR(next));
  1126. #ifdef USE_VM_PROBES
  1127. if (DTRACE_ENABLED(process_scheduled)) {
  1128. DTRACE_CHARBUF(process_buf, DTRACE_TERM_BUF_SIZE);
  1129. DTRACE_CHARBUF(fun_buf, DTRACE_TERM_BUF_SIZE);
  1130. dtrace_proc_str(c_p, process_buf);
  1131. if (ERTS_PROC_IS_EXITING(c_p)) {
  1132. strcpy(fun_buf, "<exiting>");
  1133. } else {
  1134. BeamInstr *fptr = find_function_from_pc(c_p->i);
  1135. if (fptr) {
  1136. dtrace_fun_decode(c_p, (Eterm)fptr[0],
  1137. (Eterm)fptr[1], (Uint)fptr[2],
  1138. NULL, fun_buf);
  1139. } else {
  1140. erts_snprintf(fun_buf, sizeof(DTRACE_CHARBUF_NAME(fun_buf)),
  1141. "<unknown/%p>", next);
  1142. }
  1143. }
  1144. DTRACE2(process_scheduled, process_buf, fun_buf);
  1145. }
  1146. #endif
  1147. Goto(next);
  1148. }
  1149. #if defined(DEBUG) || defined(NO_JUMP_TABLE)
  1150. emulator_loop:
  1151. #endif
  1152. #ifdef NO_JUMP_TABLE
  1153. switch (Go) {
  1154. #endif
  1155. #include "beam_hot.h"
  1156. #define STORE_ARITH_RESULT(res) StoreBifResult(2, (res));
  1157. #define ARITH_FUNC(name) erts_gc_##name
  1158. {
  1159. Eterm increment_reg_val;
  1160. Eterm increment_val;
  1161. Uint live;
  1162. Eterm result;
  1163. OpCase(i_increment_yIId):
  1164. increment_reg_val = yb(Arg(0));
  1165. goto do_increment;
  1166. OpCase(i_increment_xIId):
  1167. increment_reg_val = xb(Arg(0));
  1168. goto do_increment;
  1169. OpCase(i_increment_rIId):
  1170. increment_reg_val = r(0);
  1171. I--;
  1172. do_increment:
  1173. increment_val = Arg(1);
  1174. if (is_small(increment_reg_val)) {
  1175. Sint i = signed_val(increment_reg_val) + increment_val;
  1176. ASSERT(MY_IS_SSMALL(i) == IS_SSMALL(i));
  1177. if (MY_IS_SSMALL(i)) {
  1178. result = make_small(i);
  1179. store_result:
  1180. StoreBifResult(3, result);
  1181. }
  1182. }
  1183. live = Arg(2);
  1184. SWAPOUT;
  1185. reg[0] = r(0);
  1186. reg[live] = increment_reg_val;
  1187. reg[live+1] = make_small(increment_val);
  1188. result = erts_gc_mixed_plus(c_p, reg, live);
  1189. r(0) = reg[0];
  1190. SWAPIN;
  1191. ERTS_HOLE_CHECK(c_p);
  1192. if (is_value(result)) {
  1193. goto store_result;
  1194. }
  1195. ASSERT(c_p->freason != BADMATCH || is_value(c_p->fvalue));
  1196. goto find_func_info;
  1197. }
  1198. #define DO_BIG_ARITH(Func,Arg1,Arg2) \
  1199. do { \
  1200. Uint live = Arg(1); \
  1201. SWAPOUT; \
  1202. reg[0] = r(0); \
  1203. reg[live] = (Arg1); \
  1204. reg[live+1] = (Arg2); \
  1205. result = (Func)(c_p, reg, live); \
  1206. r(0) = reg[0]; \
  1207. SWAPIN; \
  1208. ERTS_HOLE_CHECK(c_p); \
  1209. if (is_value(result)) { \
  1210. StoreBifResult(4,result); \
  1211. } \
  1212. goto lb_Cl_error; \
  1213. } while(0)
  1214. OpCase(i_plus_jIxxd):
  1215. {
  1216. Eterm result;
  1217. if (is_both_small(xb(Arg(2)), xb(Arg(3)))) {
  1218. Sint i = signed_val(xb(Arg(2))) + signed_val(xb(Arg(3)));
  1219. ASSERT(MY_IS_SSMALL(i) == IS_SSMALL(i));
  1220. if (MY_IS_SSMALL(i)) {
  1221. result = make_small(i);
  1222. StoreBifResult(4, result);
  1223. }
  1224. }
  1225. DO_BIG_ARITH(ARITH_FUNC(mixed_plus), xb(Arg(2)), xb(Arg(3)));
  1226. }
  1227. OpCase(i_plus_jId):
  1228. {
  1229. Eterm result;
  1230. if (is_both_small(tmp_arg1, tmp_arg2)) {
  1231. Sint i = signed_val(tmp_arg1) + signed_val(tmp_arg2);
  1232. ASSERT(MY_IS_SSMALL(i) == IS_SSMALL(i));
  1233. if (MY_IS_SSMALL(i)) {
  1234. result = make_small(i);
  1235. STORE_ARITH_RESULT(result);
  1236. }
  1237. }
  1238. arith_func = ARITH_FUNC(mixed_plus);
  1239. goto do_big_arith2;
  1240. }
  1241. OpCase(i_minus_jIxxd):
  1242. {
  1243. Eterm result;
  1244. if (is_both_small(xb(Arg(2)), xb(Arg(3)))) {
  1245. Sint i = signed_val(xb(Arg(2))) - signed_val(xb(Arg(3)));
  1246. ASSERT(MY_IS_SSMALL(i) == IS_SSMALL(i));
  1247. if (MY_IS_SSMALL(i)) {
  1248. result = make_small(i);
  1249. StoreBifResult(4, result);
  1250. }
  1251. }
  1252. DO_BIG_ARITH(ARITH_FUNC(mixed_minus), xb(Arg(2)), xb(Arg(3)));
  1253. }
  1254. OpCase(i_minus_jId):
  1255. {
  1256. Eterm result;
  1257. if (is_both_small(tmp_arg1, tmp_arg2)) {
  1258. Sint i = signed_val(tmp_arg1) - signed_val(tmp_arg2);
  1259. ASSERT(MY_IS_SSMALL(i) == IS_SSMALL(i));
  1260. if (MY_IS_SSMALL(i)) {
  1261. result = make_small(i);
  1262. STORE_ARITH_RESULT(result);
  1263. }
  1264. }
  1265. arith_func = ARITH_FUNC(mixed_minus);
  1266. goto do_big_arith2;
  1267. }
  1268. OpCase(i_is_lt_f):
  1269. if (CMP_GE(tmp_arg1, tmp_arg2)) {
  1270. ClauseFail();
  1271. }
  1272. Next(1);
  1273. OpCase(i_is_ge_f):
  1274. if (CMP_LT(tmp_arg1, tmp_arg2)) {
  1275. ClauseFail();
  1276. }
  1277. Next(1);
  1278. OpCase(i_is_eq_f):
  1279. if (CMP_NE(tmp_arg1, tmp_arg2)) {
  1280. ClauseFail();
  1281. }
  1282. Next(1);
  1283. OpCase(i_is_ne_f):
  1284. if (CMP_EQ(tmp_arg1, tmp_arg2)) {
  1285. ClauseFail();
  1286. }
  1287. Next(1);
  1288. OpCase(i_is_eq_exact_f):
  1289. if (!EQ(tmp_arg1, tmp_arg2)) {
  1290. ClauseFail();
  1291. }
  1292. Next(1);
  1293. {
  1294. Eterm is_eq_exact_lit_val;
  1295. OpCase(i_is_eq_exact_literal_xfc):
  1296. is_eq_exact_lit_val = xb(Arg(0));
  1297. I++;
  1298. goto do_is_eq_exact_literal;
  1299. OpCase(i_is_eq_exact_literal_yfc):
  1300. is_eq_exact_lit_val = yb(Arg(0));
  1301. I++;
  1302. goto do_is_eq_exact_literal;
  1303. OpCase(i_is_eq_exact_literal_rfc):
  1304. is_eq_exact_lit_val = r(0);
  1305. do_is_eq_exact_literal:
  1306. if (!eq(Arg(1), is_eq_exact_lit_val)) {
  1307. ClauseFail();
  1308. }
  1309. Next(2);
  1310. }
  1311. {
  1312. Eterm is_ne_exact_lit_val;
  1313. OpCase(i_is_ne_exact_literal_xfc):
  1314. is_ne_exact_lit_val = xb(Arg(0));
  1315. I++;
  1316. goto do_is_ne_exact_literal;
  1317. OpCase(i_is_ne_exact_literal_yfc):
  1318. is_ne_exact_lit_val = yb(Arg(0));
  1319. I++;
  1320. goto do_is_ne_exact_literal;
  1321. OpCase(i_is_ne_exact_literal_rfc):
  1322. is_ne_exact_lit_val = r(0);
  1323. do_is_ne_exact_literal:
  1324. if (eq(Arg(1), is_ne_exact_lit_val)) {
  1325. ClauseFail();
  1326. }
  1327. Next(2);
  1328. }
  1329. OpCase(move_window3_xxxy): {
  1330. BeamInstr *next;
  1331. Eterm xt0, xt1, xt2;
  1332. Eterm *y = (Eterm *)(((unsigned char *)E) + (Arg(3)));
  1333. PreFetch(4, next);
  1334. xt0 = xb(Arg(0));
  1335. xt1 = xb(Arg(1));
  1336. xt2 = xb(Arg(2));
  1337. y[0] = xt0;
  1338. y[1] = xt1;
  1339. y[2] = xt2;
  1340. NextPF(4, next);
  1341. }
  1342. OpCase(move_window4_xxxxy): {
  1343. BeamInstr *next;
  1344. Eterm xt0, xt1, xt2, xt3;
  1345. Eterm *y = (Eterm *)(((unsigned char *)E) + (Arg(4)));
  1346. PreFetch(5, next);
  1347. xt0 = xb(Arg(0));
  1348. xt1 = xb(Arg(1));
  1349. xt2 = xb(Arg(2));
  1350. xt3 = xb(Arg(3));
  1351. y[0] = xt0;
  1352. y[1] = xt1;
  1353. y[2] = xt2;
  1354. y[3] = xt3;
  1355. NextPF(5, next);
  1356. }
  1357. OpCase(move_window5_xxxxxy): {
  1358. BeamInstr *next;
  1359. Eterm xt0, xt1, xt2, xt3, xt4;
  1360. Eterm *y = (Eterm *)(((unsigned char *)E) + (Arg(5)));
  1361. PreFetch(6, next);
  1362. xt0 = xb(Arg(0));
  1363. xt1 = xb(Arg(1));
  1364. xt2 = xb(Arg(2));
  1365. xt3 = xb(Arg(3));
  1366. xt4 = xb(Arg(4));
  1367. y[0] = xt0;
  1368. y[1] = xt1;
  1369. y[2] = xt2;
  1370. y[3] = xt3;
  1371. y[4] = xt4;
  1372. NextPF(6, next);
  1373. }
  1374. OpCase(i_move_call_only_fcr): {
  1375. r(0) = Arg(1);
  1376. }
  1377. /* FALL THROUGH */
  1378. OpCase(i_call_only_f): {
  1379. SET_I((BeamInstr *) Arg(0));
  1380. DTRACE_LOCAL_CALL(c_p, (Eterm)I[-3], (Eterm)I[-2], I[-1]);
  1381. Dispatch();
  1382. }
  1383. OpCase(i_move_call_last_fPcr): {
  1384. r(0) = Arg(2);
  1385. }
  1386. /* FALL THROUGH */
  1387. OpCase(i_call_last_fP): {
  1388. RESTORE_CP(E);
  1389. E = ADD_BYTE_OFFSET(E, Arg(1));
  1390. SET_I((BeamInstr *) Arg(0));
  1391. DTRACE_LOCAL_CALL(c_p, (Eterm)I[-3], (Eterm)I[-2], I[-1]);
  1392. Dispatch();
  1393. }
  1394. OpCase(i_move_call_crf): {
  1395. r(0) = Arg(0);
  1396. I++;
  1397. }
  1398. /* FALL THROUGH */
  1399. OpCase(i_call_f): {
  1400. SET_CP(c_p, I+2);
  1401. SET_I((BeamInstr *) Arg(0));

Large files files are truncated, but you can click here to view the full file