PageRenderTime 60ms CodeModel.GetById 21ms RepoModel.GetById 1ms app.codeStats 0ms

/jit/jit-gen-x86.h

https://bitbucket.org/philburr/libjit
C Header | 1746 lines | 1406 code | 198 blank | 142 comment | 348 complexity | 88c493b099a19a3a54b40a0c209539a8 MD5 | raw file
Possible License(s): GPL-2.0, LGPL-2.1
  1. /*
  2. * jit-gen-x86.h: Macros for generating x86 code
  3. *
  4. * Authors:
  5. * Paolo Molaro (lupus@ximian.com)
  6. * Intel Corporation (ORP Project)
  7. * Sergey Chaban (serge@wildwestsoftware.com)
  8. * Dietmar Maurer (dietmar@ximian.com)
  9. * Patrik Torstensson
  10. *
  11. * Copyright (C) 2000 Intel Corporation. All rights reserved.
  12. * Copyright (C) 2001, 2002 Ximian, Inc.
  13. *
  14. * This file originated with the Mono project (www.go-mono.com), and may
  15. * be redistributed under the terms of the Lesser General Public License.
  16. */
  17. #ifndef JIT_GEN_X86_H
  18. #define JIT_GEN_X86_H
  19. #define jit_assert(x) if (!(x)) break
  20. /*
  21. // x86 register numbers
  22. */
  23. typedef enum {
  24. X86_EAX = 0,
  25. X86_ECX = 1,
  26. X86_EDX = 2,
  27. X86_EBX = 3,
  28. X86_ESP = 4,
  29. X86_EBP = 5,
  30. X86_ESI = 6,
  31. X86_EDI = 7,
  32. X86_NREG
  33. } X86_Reg_No;
  34. /*
  35. // opcodes for alu instructions
  36. */
  37. typedef enum {
  38. X86_ADD = 0,
  39. X86_OR = 1,
  40. X86_ADC = 2,
  41. X86_SBB = 3,
  42. X86_AND = 4,
  43. X86_SUB = 5,
  44. X86_XOR = 6,
  45. X86_CMP = 7,
  46. X86_NALU
  47. } X86_ALU_Opcode;
  48. /*
  49. // opcodes for shift instructions
  50. */
  51. typedef enum {
  52. X86_SHLD,
  53. X86_SHLR,
  54. X86_ROL = 0,
  55. X86_ROR = 1,
  56. X86_RCL = 2,
  57. X86_RCR = 3,
  58. X86_SHL = 4,
  59. X86_SHR = 5,
  60. X86_SAR = 7,
  61. X86_NSHIFT = 8
  62. } X86_Shift_Opcode;
  63. /*
  64. // opcodes for floating-point instructions
  65. */
  66. typedef enum {
  67. X86_FADD = 0,
  68. X86_FMUL = 1,
  69. X86_FCOM = 2,
  70. X86_FCOMP = 3,
  71. X86_FSUB = 4,
  72. X86_FSUBR = 5,
  73. X86_FDIV = 6,
  74. X86_FDIVR = 7,
  75. X86_NFP = 8
  76. } X86_FP_Opcode;
  77. /*
  78. // integer conditions codes
  79. */
  80. typedef enum {
  81. X86_CC_EQ = 0, X86_CC_E = 0, X86_CC_Z = 0,
  82. X86_CC_NE = 1, X86_CC_NZ = 1,
  83. X86_CC_LT = 2, X86_CC_B = 2, X86_CC_C = 2, X86_CC_NAE = 2,
  84. X86_CC_LE = 3, X86_CC_BE = 3, X86_CC_NA = 3,
  85. X86_CC_GT = 4, X86_CC_A = 4, X86_CC_NBE = 4,
  86. X86_CC_GE = 5, X86_CC_AE = 5, X86_CC_NB = 5, X86_CC_NC = 5,
  87. X86_CC_LZ = 6, X86_CC_S = 6,
  88. X86_CC_GEZ = 7, X86_CC_NS = 7,
  89. X86_CC_P = 8, X86_CC_PE = 8,
  90. X86_CC_NP = 9, X86_CC_PO = 9,
  91. X86_CC_O = 10,
  92. X86_CC_NO = 11,
  93. X86_NCC
  94. } X86_CC;
  95. /* FP status */
  96. enum {
  97. X86_FP_C0 = 0x100,
  98. X86_FP_C1 = 0x200,
  99. X86_FP_C2 = 0x400,
  100. X86_FP_C3 = 0x4000,
  101. X86_FP_CC_MASK = 0x4500
  102. };
  103. /* FP control word */
  104. enum {
  105. X86_FPCW_INVOPEX_MASK = 0x1,
  106. X86_FPCW_DENOPEX_MASK = 0x2,
  107. X86_FPCW_ZERODIV_MASK = 0x4,
  108. X86_FPCW_OVFEX_MASK = 0x8,
  109. X86_FPCW_UNDFEX_MASK = 0x10,
  110. X86_FPCW_PRECEX_MASK = 0x20,
  111. X86_FPCW_PRECC_MASK = 0x300,
  112. X86_FPCW_ROUNDC_MASK = 0xc00,
  113. /* values for precision control */
  114. X86_FPCW_PREC_SINGLE = 0,
  115. X86_FPCW_PREC_DOUBLE = 0x200,
  116. X86_FPCW_PREC_EXTENDED = 0x300,
  117. /* values for rounding control */
  118. X86_FPCW_ROUND_NEAREST = 0,
  119. X86_FPCW_ROUND_DOWN = 0x400,
  120. X86_FPCW_ROUND_UP = 0x800,
  121. X86_FPCW_ROUND_TOZERO = 0xc00
  122. };
  123. /*
  124. // prefix code
  125. */
  126. typedef enum {
  127. X86_LOCK_PREFIX = 0xF0,
  128. X86_REPNZ_PREFIX = 0xF2,
  129. X86_REPZ_PREFIX = 0xF3,
  130. X86_REP_PREFIX = 0xF3,
  131. X86_CS_PREFIX = 0x2E,
  132. X86_SS_PREFIX = 0x36,
  133. X86_DS_PREFIX = 0x3E,
  134. X86_ES_PREFIX = 0x26,
  135. X86_FS_PREFIX = 0x64,
  136. X86_GS_PREFIX = 0x65,
  137. X86_UNLIKELY_PREFIX = 0x2E,
  138. X86_LIKELY_PREFIX = 0x3E,
  139. X86_OPERAND_PREFIX = 0x66,
  140. X86_ADDRESS_PREFIX = 0x67
  141. } X86_Prefix;
  142. static const unsigned char
  143. x86_cc_unsigned_map [X86_NCC] = {
  144. 0x74, /* eq */
  145. 0x75, /* ne */
  146. 0x72, /* lt */
  147. 0x76, /* le */
  148. 0x77, /* gt */
  149. 0x73, /* ge */
  150. 0x78, /* lz */
  151. 0x79, /* gez */
  152. 0x7a, /* p */
  153. 0x7b, /* np */
  154. 0x70, /* o */
  155. 0x71, /* no */
  156. };
  157. static const unsigned char
  158. x86_cc_signed_map [X86_NCC] = {
  159. 0x74, /* eq */
  160. 0x75, /* ne */
  161. 0x7c, /* lt */
  162. 0x7e, /* le */
  163. 0x7f, /* gt */
  164. 0x7d, /* ge */
  165. 0x78, /* lz */
  166. 0x79, /* gez */
  167. 0x7a, /* p */
  168. 0x7b, /* np */
  169. 0x70, /* o */
  170. 0x71, /* no */
  171. };
  172. typedef union {
  173. int val;
  174. unsigned char b [4];
  175. } x86_imm_buf;
  176. #define X86_NOBASEREG (-1)
  177. /*
  178. // bitvector mask for callee-saved registers
  179. */
  180. #define X86_ESI_MASK (1<<X86_ESI)
  181. #define X86_EDI_MASK (1<<X86_EDI)
  182. #define X86_EBX_MASK (1<<X86_EBX)
  183. #define X86_EBP_MASK (1<<X86_EBP)
  184. #define X86_CALLEE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX))
  185. #define X86_CALLER_REGS ((1<<X86_EBX) | (1<<X86_EBP) | (1<<X86_ESI) | (1<<X86_EDI))
  186. #define X86_BYTE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX) | (1<<X86_EBX))
  187. #define X86_IS_SCRATCH(reg) (X86_CALLER_REGS & (1 << (reg))) /* X86_EAX, X86_ECX, or X86_EDX */
  188. #define X86_IS_CALLEE(reg) (X86_CALLEE_REGS & (1 << (reg))) /* X86_ESI, X86_EDI, X86_EBX, or X86_EBP */
  189. #define X86_IS_BYTE_REG(reg) ((reg) < 4)
  190. /*
  191. // Frame structure:
  192. //
  193. // +--------------------------------+
  194. // | in_arg[0] = var[0] |
  195. // | in_arg[1] = var[1] |
  196. // | . . . |
  197. // | in_arg[n_arg-1] = var[n_arg-1] |
  198. // +--------------------------------+
  199. // | return IP |
  200. // +--------------------------------+
  201. // | saved EBP | <-- frame pointer (EBP)
  202. // +--------------------------------+
  203. // | ... | n_extra
  204. // +--------------------------------+
  205. // | var[n_arg] |
  206. // | var[n_arg+1] | local variables area
  207. // | . . . |
  208. // | var[n_var-1] |
  209. // +--------------------------------+
  210. // | |
  211. // | |
  212. // | spill area | area for spilling mimic stack
  213. // | |
  214. // +--------------------------------|
  215. // | ebx |
  216. // | ebp [ESP_Frame only] |
  217. // | esi | 0..3 callee-saved regs
  218. // | edi | <-- stack pointer (ESP)
  219. // +--------------------------------+
  220. // | stk0 |
  221. // | stk1 | operand stack area/
  222. // | . . . | out args
  223. // | stkn-1 |
  224. // +--------------------------------|
  225. //
  226. //
  227. */
  228. /*
  229. * useful building blocks
  230. */
  231. #define x86_modrm_mod(modrm) ((modrm) >> 6)
  232. #define x86_modrm_reg(modrm) (((modrm) >> 3) & 0x7)
  233. #define x86_modrm_rm(modrm) ((modrm) & 0x7)
  234. #define x86_address_byte(inst,m,o,r) do { *(inst)++ = ((((m)&0x03)<<6)|(((o)&0x07)<<3)|(((r)&0x07))); } while (0)
  235. #define x86_imm_emit32(inst,imm) \
  236. do { \
  237. x86_imm_buf imb; imb.val = (int) (imm); \
  238. *(inst)++ = imb.b [0]; \
  239. *(inst)++ = imb.b [1]; \
  240. *(inst)++ = imb.b [2]; \
  241. *(inst)++ = imb.b [3]; \
  242. } while (0)
  243. #define x86_imm_emit16(inst,imm) do { *(short*)(inst) = (imm); (inst) += 2; } while (0)
  244. #define x86_imm_emit8(inst,imm) do { *(inst) = (unsigned char)((imm) & 0xff); ++(inst); } while (0)
  245. #define x86_is_imm8(imm) (((int)(imm) >= -128 && (int)(imm) <= 127))
  246. #define x86_is_imm16(imm) (((int)(imm) >= -(1<<16) && (int)(imm) <= ((1<<16)-1)))
  247. #define x86_reg_emit(inst,r,regno) do { x86_address_byte ((inst), 3, (r), (regno)); } while (0)
  248. #define x86_reg8_emit(inst,r,regno,is_rh,is_rnoh) do {x86_address_byte ((inst), 3, (is_rh)?((r)|4):(r), (is_rnoh)?((regno)|4):(regno));} while (0)
  249. #define x86_regp_emit(inst,r,regno) do { x86_address_byte ((inst), 0, (r), (regno)); } while (0)
  250. #define x86_mem_emit(inst,r,disp) do { x86_address_byte ((inst), 0, (r), 5); x86_imm_emit32((inst), (disp)); } while (0)
  251. #define x86_membase_emit(inst,r,basereg,disp) do {\
  252. if ((basereg) == X86_ESP) { \
  253. if ((disp) == 0) { \
  254. x86_address_byte ((inst), 0, (r), X86_ESP); \
  255. x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
  256. } else if (x86_is_imm8((disp))) { \
  257. x86_address_byte ((inst), 1, (r), X86_ESP); \
  258. x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
  259. x86_imm_emit8 ((inst), (disp)); \
  260. } else { \
  261. x86_address_byte ((inst), 2, (r), X86_ESP); \
  262. x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
  263. x86_imm_emit32 ((inst), (disp)); \
  264. } \
  265. break; \
  266. } \
  267. if ((disp) == 0 && (basereg) != X86_EBP) { \
  268. x86_address_byte ((inst), 0, (r), (basereg)); \
  269. break; \
  270. } \
  271. if (x86_is_imm8((disp))) { \
  272. x86_address_byte ((inst), 1, (r), (basereg)); \
  273. x86_imm_emit8 ((inst), (disp)); \
  274. } else { \
  275. x86_address_byte ((inst), 2, (r), (basereg)); \
  276. x86_imm_emit32 ((inst), (disp)); \
  277. } \
  278. } while (0)
  279. #define x86_memindex_emit(inst,r,basereg,disp,indexreg,shift) \
  280. do { \
  281. if ((basereg) == X86_NOBASEREG) { \
  282. x86_address_byte ((inst), 0, (r), 4); \
  283. x86_address_byte ((inst), (shift), (indexreg), 5); \
  284. x86_imm_emit32 ((inst), (disp)); \
  285. } else if ((disp) == 0 && (basereg) != X86_EBP) { \
  286. x86_address_byte ((inst), 0, (r), 4); \
  287. x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
  288. } else if (x86_is_imm8((disp))) { \
  289. x86_address_byte ((inst), 1, (r), 4); \
  290. x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
  291. x86_imm_emit8 ((inst), (disp)); \
  292. } else { \
  293. x86_address_byte ((inst), 2, (r), 4); \
  294. x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
  295. x86_imm_emit32 ((inst), (disp)); \
  296. } \
  297. } while (0)
  298. /*
  299. * target is the position in the code where to jump to:
  300. * target = code;
  301. * .. output loop code...
  302. * x86_mov_reg_imm (code, X86_EAX, 0);
  303. * loop = code;
  304. * x86_loop (code, -1);
  305. * ... finish method
  306. *
  307. * patch displacement
  308. * x86_patch (loop, target);
  309. *
  310. * ins should point at the start of the instruction that encodes a target.
  311. * the instruction is inspected for validity and the correct displacement
  312. * is inserted.
  313. */
  314. #define x86_patch(ins,target) \
  315. do { \
  316. unsigned char* pos = (ins) + 1; \
  317. int disp, size = 0; \
  318. switch (*(unsigned char*)(ins)) { \
  319. case 0xe8: case 0xe9: ++size; break; /* call, jump32 */ \
  320. case 0x0f: if (!(*pos >= 0x70 && *pos <= 0x8f)) jit_assert (0); \
  321. ++size; ++pos; break; /* prefix for 32-bit disp */ \
  322. case 0xe0: case 0xe1: case 0xe2: /* loop */ \
  323. case 0xeb: /* jump8 */ \
  324. /* conditional jump opcodes */ \
  325. case 0x70: case 0x71: case 0x72: case 0x73: \
  326. case 0x74: case 0x75: case 0x76: case 0x77: \
  327. case 0x78: case 0x79: case 0x7a: case 0x7b: \
  328. case 0x7c: case 0x7d: case 0x7e: case 0x7f: \
  329. break; \
  330. default: jit_assert (0); \
  331. } \
  332. disp = (target) - pos; \
  333. if (size) x86_imm_emit32 (pos, disp - 4); \
  334. else if (x86_is_imm8 (disp - 1)) x86_imm_emit8 (pos, disp - 1); \
  335. else jit_assert (0); \
  336. } while (0)
  337. #define x86_breakpoint(inst) \
  338. do { \
  339. *(inst)++ = 0xcc; \
  340. } while (0)
  341. #define x86_cld(inst) do { *(inst)++ =(unsigned char)0xfc; } while (0)
  342. #define x86_stosb(inst) do { *(inst)++ =(unsigned char)0xaa; } while (0)
  343. #define x86_stosl(inst) do { *(inst)++ =(unsigned char)0xab; } while (0)
  344. #define x86_stosd(inst) x86_stosl((inst))
  345. #define x86_movsb(inst) do { *(inst)++ =(unsigned char)0xa4; } while (0)
  346. #define x86_movsl(inst) do { *(inst)++ =(unsigned char)0xa5; } while (0)
  347. #define x86_movsd(inst) x86_movsl((inst))
  348. #define x86_prefix(inst,p) do { *(inst)++ =(unsigned char) (p); } while (0)
  349. #define x86_rdtsc(inst) \
  350. do { \
  351. *(inst)++ = 0x0f; \
  352. *(inst)++ = 0x31; \
  353. } while (0)
  354. #define x86_cmpxchg_reg_reg(inst,dreg,reg) \
  355. do { \
  356. *(inst)++ = (unsigned char)0x0f; \
  357. *(inst)++ = (unsigned char)0xb1; \
  358. x86_reg_emit ((inst), (reg), (dreg)); \
  359. } while (0)
  360. #define x86_cmpxchg_mem_reg(inst,mem,reg) \
  361. do { \
  362. *(inst)++ = (unsigned char)0x0f; \
  363. *(inst)++ = (unsigned char)0xb1; \
  364. x86_mem_emit ((inst), (reg), (mem)); \
  365. } while (0)
  366. #define x86_cmpxchg_membase_reg(inst,basereg,disp,reg) \
  367. do { \
  368. *(inst)++ = (unsigned char)0x0f; \
  369. *(inst)++ = (unsigned char)0xb1; \
  370. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  371. } while (0)
  372. #define x86_xchg_reg_reg(inst,dreg,reg,size) \
  373. do { \
  374. if ((size) == 1) \
  375. *(inst)++ = (unsigned char)0x86; \
  376. else \
  377. *(inst)++ = (unsigned char)0x87; \
  378. x86_reg_emit ((inst), (reg), (dreg)); \
  379. } while (0)
  380. #define x86_xchg_mem_reg(inst,mem,reg,size) \
  381. do { \
  382. if ((size) == 1) \
  383. *(inst)++ = (unsigned char)0x86; \
  384. else \
  385. *(inst)++ = (unsigned char)0x87; \
  386. x86_mem_emit ((inst), (reg), (mem)); \
  387. } while (0)
  388. #define x86_xchg_membase_reg(inst,basereg,disp,reg,size) \
  389. do { \
  390. if ((size) == 1) \
  391. *(inst)++ = (unsigned char)0x86; \
  392. else \
  393. *(inst)++ = (unsigned char)0x87; \
  394. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  395. } while (0)
  396. #define x86_xadd_reg_reg(inst,dreg,reg,size) \
  397. do { \
  398. *(inst)++ = (unsigned char)0x0F; \
  399. if ((size) == 1) \
  400. *(inst)++ = (unsigned char)0xC0; \
  401. else \
  402. *(inst)++ = (unsigned char)0xC1; \
  403. x86_reg_emit ((inst), (reg), (dreg)); \
  404. } while (0)
  405. #define x86_xadd_mem_reg(inst,mem,reg,size) \
  406. do { \
  407. *(inst)++ = (unsigned char)0x0F; \
  408. if ((size) == 1) \
  409. *(inst)++ = (unsigned char)0xC0; \
  410. else \
  411. *(inst)++ = (unsigned char)0xC1; \
  412. x86_mem_emit ((inst), (reg), (mem)); \
  413. } while (0)
  414. #define x86_xadd_membase_reg(inst,basereg,disp,reg,size) \
  415. do { \
  416. *(inst)++ = (unsigned char)0x0F; \
  417. if ((size) == 1) \
  418. *(inst)++ = (unsigned char)0xC0; \
  419. else \
  420. *(inst)++ = (unsigned char)0xC1; \
  421. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  422. } while (0)
  423. #define x86_inc_mem(inst,mem) \
  424. do { \
  425. *(inst)++ = (unsigned char)0xff; \
  426. x86_mem_emit ((inst), 0, (mem)); \
  427. } while (0)
  428. #define x86_inc_membase(inst,basereg,disp) \
  429. do { \
  430. *(inst)++ = (unsigned char)0xff; \
  431. x86_membase_emit ((inst), 0, (basereg), (disp)); \
  432. } while (0)
  433. #define x86_inc_reg(inst,reg) do { *(inst)++ = (unsigned char)0x40 + (reg); } while (0)
  434. #define x86_dec_mem(inst,mem) \
  435. do { \
  436. *(inst)++ = (unsigned char)0xff; \
  437. x86_mem_emit ((inst), 1, (mem)); \
  438. } while (0)
  439. #define x86_dec_membase(inst,basereg,disp) \
  440. do { \
  441. *(inst)++ = (unsigned char)0xff; \
  442. x86_membase_emit ((inst), 1, (basereg), (disp)); \
  443. } while (0)
  444. #define x86_dec_reg(inst,reg) do { *(inst)++ = (unsigned char)0x48 + (reg); } while (0)
  445. #define x86_not_mem(inst,mem) \
  446. do { \
  447. *(inst)++ = (unsigned char)0xf7; \
  448. x86_mem_emit ((inst), 2, (mem)); \
  449. } while (0)
  450. #define x86_not_membase(inst,basereg,disp) \
  451. do { \
  452. *(inst)++ = (unsigned char)0xf7; \
  453. x86_membase_emit ((inst), 2, (basereg), (disp)); \
  454. } while (0)
  455. #define x86_not_reg(inst,reg) \
  456. do { \
  457. *(inst)++ = (unsigned char)0xf7; \
  458. x86_reg_emit ((inst), 2, (reg)); \
  459. } while (0)
  460. #define x86_neg_mem(inst,mem) \
  461. do { \
  462. *(inst)++ = (unsigned char)0xf7; \
  463. x86_mem_emit ((inst), 3, (mem)); \
  464. } while (0)
  465. #define x86_neg_membase(inst,basereg,disp) \
  466. do { \
  467. *(inst)++ = (unsigned char)0xf7; \
  468. x86_membase_emit ((inst), 3, (basereg), (disp)); \
  469. } while (0)
  470. #define x86_neg_reg(inst,reg) \
  471. do { \
  472. *(inst)++ = (unsigned char)0xf7; \
  473. x86_reg_emit ((inst), 3, (reg)); \
  474. } while (0)
  475. #define x86_nop(inst) do { *(inst)++ = (unsigned char)0x90; } while (0)
  476. #define x86_alu_reg_imm(inst,opc,reg,imm) \
  477. do { \
  478. if ((reg) == X86_EAX) { \
  479. *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
  480. x86_imm_emit32 ((inst), (imm)); \
  481. break; \
  482. } \
  483. if (x86_is_imm8((imm))) { \
  484. *(inst)++ = (unsigned char)0x83; \
  485. x86_reg_emit ((inst), (opc), (reg)); \
  486. x86_imm_emit8 ((inst), (imm)); \
  487. } else { \
  488. *(inst)++ = (unsigned char)0x81; \
  489. x86_reg_emit ((inst), (opc), (reg)); \
  490. x86_imm_emit32 ((inst), (imm)); \
  491. } \
  492. } while (0)
  493. #define x86_alu_reg16_imm(inst,opc,reg,imm) \
  494. do { \
  495. *(inst)++ = (unsigned char)0x66; \
  496. if ((reg) == X86_EAX) { \
  497. *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
  498. x86_imm_emit16 ((inst), (imm)); \
  499. break; \
  500. } \
  501. if (x86_is_imm8((imm))) { \
  502. *(inst)++ = (unsigned char)0x83; \
  503. x86_reg_emit ((inst), (opc), (reg)); \
  504. x86_imm_emit8 ((inst), (imm)); \
  505. } else { \
  506. *(inst)++ = (unsigned char)0x81; \
  507. x86_reg_emit ((inst), (opc), (reg)); \
  508. x86_imm_emit16 ((inst), (imm)); \
  509. } \
  510. } while (0)
  511. #define x86_alu_mem_imm(inst,opc,mem,imm) \
  512. do { \
  513. if (x86_is_imm8((imm))) { \
  514. *(inst)++ = (unsigned char)0x83; \
  515. x86_mem_emit ((inst), (opc), (mem)); \
  516. x86_imm_emit8 ((inst), (imm)); \
  517. } else { \
  518. *(inst)++ = (unsigned char)0x81; \
  519. x86_mem_emit ((inst), (opc), (mem)); \
  520. x86_imm_emit32 ((inst), (imm)); \
  521. } \
  522. } while (0)
  523. #define x86_alu_membase_imm(inst,opc,basereg,disp,imm) \
  524. do { \
  525. if (x86_is_imm8((imm))) { \
  526. *(inst)++ = (unsigned char)0x83; \
  527. x86_membase_emit ((inst), (opc), (basereg), (disp)); \
  528. x86_imm_emit8 ((inst), (imm)); \
  529. } else { \
  530. *(inst)++ = (unsigned char)0x81; \
  531. x86_membase_emit ((inst), (opc), (basereg), (disp)); \
  532. x86_imm_emit32 ((inst), (imm)); \
  533. } \
  534. } while (0)
  535. #define x86_alu_membase8_imm(inst,opc,basereg,disp,imm) \
  536. do { \
  537. *(inst)++ = (unsigned char)0x80; \
  538. x86_membase_emit ((inst), (opc), (basereg), (disp)); \
  539. x86_imm_emit8 ((inst), (imm)); \
  540. } while (0)
  541. #define x86_alu_mem_reg(inst,opc,mem,reg) \
  542. do { \
  543. *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
  544. x86_mem_emit ((inst), (reg), (mem)); \
  545. } while (0)
  546. #define x86_alu_membase_reg(inst,opc,basereg,disp,reg) \
  547. do { \
  548. *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
  549. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  550. } while (0)
  551. #define x86_alu_reg_reg(inst,opc,dreg,reg) \
  552. do { \
  553. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  554. x86_reg_emit ((inst), (dreg), (reg)); \
  555. } while (0)
  556. /**
  557. * @x86_alu_reg8_reg8:
  558. * Supports ALU operations between two 8-bit registers.
  559. * dreg := dreg opc reg
  560. * X86_Reg_No enum is used to specify the registers.
  561. * Additionally is_*_h flags are used to specify what part
  562. * of a given 32-bit register is used - high (TRUE) or low (FALSE).
  563. * For example: dreg = X86_EAX, is_dreg_h = TRUE -> use AH
  564. */
  565. #define x86_alu_reg8_reg8(inst,opc,dreg,reg,is_dreg_h,is_reg_h) \
  566. do { \
  567. *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
  568. x86_reg8_emit ((inst), (dreg), (reg), (is_dreg_h), (is_reg_h)); \
  569. } while (0)
  570. #define x86_alu_reg_mem(inst,opc,reg,mem) \
  571. do { \
  572. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  573. x86_mem_emit ((inst), (reg), (mem)); \
  574. } while (0)
  575. #define x86_alu_reg_membase(inst,opc,reg,basereg,disp) \
  576. do { \
  577. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  578. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  579. } while (0)
  580. #define x86_test_reg_imm(inst,reg,imm) \
  581. do { \
  582. if ((reg) == X86_EAX) { \
  583. *(inst)++ = (unsigned char)0xa9; \
  584. } else { \
  585. *(inst)++ = (unsigned char)0xf7; \
  586. x86_reg_emit ((inst), 0, (reg)); \
  587. } \
  588. x86_imm_emit32 ((inst), (imm)); \
  589. } while (0)
  590. #define x86_test_mem_imm(inst,mem,imm) \
  591. do { \
  592. *(inst)++ = (unsigned char)0xf7; \
  593. x86_mem_emit ((inst), 0, (mem)); \
  594. x86_imm_emit32 ((inst), (imm)); \
  595. } while (0)
  596. #define x86_test_membase_imm(inst,basereg,disp,imm) \
  597. do { \
  598. *(inst)++ = (unsigned char)0xf7; \
  599. x86_membase_emit ((inst), 0, (basereg), (disp)); \
  600. x86_imm_emit32 ((inst), (imm)); \
  601. } while (0)
  602. #define x86_test_reg_reg(inst,dreg,reg) \
  603. do { \
  604. *(inst)++ = (unsigned char)0x85; \
  605. x86_reg_emit ((inst), (reg), (dreg)); \
  606. } while (0)
  607. #define x86_test_mem_reg(inst,mem,reg) \
  608. do { \
  609. *(inst)++ = (unsigned char)0x85; \
  610. x86_mem_emit ((inst), (reg), (mem)); \
  611. } while (0)
  612. #define x86_test_membase_reg(inst,basereg,disp,reg) \
  613. do { \
  614. *(inst)++ = (unsigned char)0x85; \
  615. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  616. } while (0)
  617. #define x86_shift_reg_imm(inst,opc,reg,imm) \
  618. do { \
  619. if ((imm) == 1) { \
  620. *(inst)++ = (unsigned char)0xd1; \
  621. x86_reg_emit ((inst), (opc), (reg)); \
  622. } else { \
  623. *(inst)++ = (unsigned char)0xc1; \
  624. x86_reg_emit ((inst), (opc), (reg)); \
  625. x86_imm_emit8 ((inst), (imm)); \
  626. } \
  627. } while (0)
  628. #define x86_shift_mem_imm(inst,opc,mem,imm) \
  629. do { \
  630. if ((imm) == 1) { \
  631. *(inst)++ = (unsigned char)0xd1; \
  632. x86_mem_emit ((inst), (opc), (mem)); \
  633. } else { \
  634. *(inst)++ = (unsigned char)0xc1; \
  635. x86_mem_emit ((inst), (opc), (mem)); \
  636. x86_imm_emit8 ((inst), (imm)); \
  637. } \
  638. } while (0)
  639. #define x86_shift_membase_imm(inst,opc,basereg,disp,imm) \
  640. do { \
  641. if ((imm) == 1) { \
  642. *(inst)++ = (unsigned char)0xd1; \
  643. x86_membase_emit ((inst), (opc), (basereg), (disp)); \
  644. } else { \
  645. *(inst)++ = (unsigned char)0xc1; \
  646. x86_membase_emit ((inst), (opc), (basereg), (disp)); \
  647. x86_imm_emit8 ((inst), (imm)); \
  648. } \
  649. } while (0)
  650. #define x86_shift_reg(inst,opc,reg) \
  651. do { \
  652. *(inst)++ = (unsigned char)0xd3; \
  653. x86_reg_emit ((inst), (opc), (reg)); \
  654. } while (0)
  655. #define x86_shift_mem(inst,opc,mem) \
  656. do { \
  657. *(inst)++ = (unsigned char)0xd3; \
  658. x86_mem_emit ((inst), (opc), (mem)); \
  659. } while (0)
  660. #define x86_shift_membase(inst,opc,basereg,disp) \
  661. do { \
  662. *(inst)++ = (unsigned char)0xd3; \
  663. x86_membase_emit ((inst), (opc), (basereg), (disp)); \
  664. } while (0)
  665. /*
  666. * Multi op shift missing.
  667. */
  668. #define x86_shrd_reg(inst,dreg,reg) \
  669. do { \
  670. *(inst)++ = (unsigned char)0x0f; \
  671. *(inst)++ = (unsigned char)0xad; \
  672. x86_reg_emit ((inst), (reg), (dreg)); \
  673. } while (0)
  674. #define x86_shrd_reg_imm(inst,dreg,reg,shamt) \
  675. do { \
  676. *(inst)++ = (unsigned char)0x0f; \
  677. *(inst)++ = (unsigned char)0xac; \
  678. x86_reg_emit ((inst), (reg), (dreg)); \
  679. x86_imm_emit8 ((inst), (shamt)); \
  680. } while (0)
  681. #define x86_shld_reg(inst,dreg,reg) \
  682. do { \
  683. *(inst)++ = (unsigned char)0x0f; \
  684. *(inst)++ = (unsigned char)0xa5; \
  685. x86_reg_emit ((inst), (reg), (dreg)); \
  686. } while (0)
  687. #define x86_shld_reg_imm(inst,dreg,reg,shamt) \
  688. do { \
  689. *(inst)++ = (unsigned char)0x0f; \
  690. *(inst)++ = (unsigned char)0xa4; \
  691. x86_reg_emit ((inst), (reg), (dreg)); \
  692. x86_imm_emit8 ((inst), (shamt)); \
  693. } while (0)
  694. /*
  695. * EDX:EAX = EAX * rm
  696. */
  697. #define x86_mul_reg(inst,reg,is_signed) \
  698. do { \
  699. *(inst)++ = (unsigned char)0xf7; \
  700. x86_reg_emit ((inst), 4 + ((is_signed) ? 1 : 0), (reg)); \
  701. } while (0)
  702. #define x86_mul_mem(inst,mem,is_signed) \
  703. do { \
  704. *(inst)++ = (unsigned char)0xf7; \
  705. x86_mem_emit ((inst), 4 + ((is_signed) ? 1 : 0), (mem)); \
  706. } while (0)
  707. #define x86_mul_membase(inst,basereg,disp,is_signed) \
  708. do { \
  709. *(inst)++ = (unsigned char)0xf7; \
  710. x86_membase_emit ((inst), 4 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
  711. } while (0)
  712. /*
  713. * r *= rm
  714. */
  715. #define x86_imul_reg_reg(inst,dreg,reg) \
  716. do { \
  717. *(inst)++ = (unsigned char)0x0f; \
  718. *(inst)++ = (unsigned char)0xaf; \
  719. x86_reg_emit ((inst), (dreg), (reg)); \
  720. } while (0)
  721. #define x86_imul_reg_mem(inst,reg,mem) \
  722. do { \
  723. *(inst)++ = (unsigned char)0x0f; \
  724. *(inst)++ = (unsigned char)0xaf; \
  725. x86_mem_emit ((inst), (reg), (mem)); \
  726. } while (0)
  727. #define x86_imul_reg_membase(inst,reg,basereg,disp) \
  728. do { \
  729. *(inst)++ = (unsigned char)0x0f; \
  730. *(inst)++ = (unsigned char)0xaf; \
  731. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  732. } while (0)
  733. /*
  734. * dreg = rm * imm
  735. */
  736. #define x86_imul_reg_reg_imm(inst,dreg,reg,imm) \
  737. do { \
  738. if (x86_is_imm8 ((imm))) { \
  739. *(inst)++ = (unsigned char)0x6b; \
  740. x86_reg_emit ((inst), (dreg), (reg)); \
  741. x86_imm_emit8 ((inst), (imm)); \
  742. } else { \
  743. *(inst)++ = (unsigned char)0x69; \
  744. x86_reg_emit ((inst), (dreg), (reg)); \
  745. x86_imm_emit32 ((inst), (imm)); \
  746. } \
  747. } while (0)
  748. #define x86_imul_reg_mem_imm(inst,reg,mem,imm) \
  749. do { \
  750. if (x86_is_imm8 ((imm))) { \
  751. *(inst)++ = (unsigned char)0x6b; \
  752. x86_mem_emit ((inst), (reg), (mem)); \
  753. x86_imm_emit8 ((inst), (imm)); \
  754. } else { \
  755. *(inst)++ = (unsigned char)0x69; \
  756. x86_reg_emit ((inst), (reg), (mem)); \
  757. x86_imm_emit32 ((inst), (imm)); \
  758. } \
  759. } while (0)
  760. #define x86_imul_reg_membase_imm(inst,reg,basereg,disp,imm) \
  761. do { \
  762. if (x86_is_imm8 ((imm))) { \
  763. *(inst)++ = (unsigned char)0x6b; \
  764. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  765. x86_imm_emit8 ((inst), (imm)); \
  766. } else { \
  767. *(inst)++ = (unsigned char)0x69; \
  768. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  769. x86_imm_emit32 ((inst), (imm)); \
  770. } \
  771. } while (0)
  772. /*
  773. * divide EDX:EAX by rm;
  774. * eax = quotient, edx = remainder
  775. */
  776. #define x86_div_reg(inst,reg,is_signed) \
  777. do { \
  778. *(inst)++ = (unsigned char)0xf7; \
  779. x86_reg_emit ((inst), 6 + ((is_signed) ? 1 : 0), (reg)); \
  780. } while (0)
  781. #define x86_div_mem(inst,mem,is_signed) \
  782. do { \
  783. *(inst)++ = (unsigned char)0xf7; \
  784. x86_mem_emit ((inst), 6 + ((is_signed) ? 1 : 0), (mem)); \
  785. } while (0)
  786. #define x86_div_membase(inst,basereg,disp,is_signed) \
  787. do { \
  788. *(inst)++ = (unsigned char)0xf7; \
  789. x86_membase_emit ((inst), 6 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
  790. } while (0)
  791. #define x86_mov_mem_reg(inst,mem,reg,size) \
  792. do { \
  793. switch ((size)) { \
  794. case 1: *(inst)++ = (unsigned char)0x88; break; \
  795. case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
  796. case 4: *(inst)++ = (unsigned char)0x89; break; \
  797. default: jit_assert (0); \
  798. } \
  799. x86_mem_emit ((inst), (reg), (mem)); \
  800. } while (0)
  801. #define x86_mov_regp_reg(inst,regp,reg,size) \
  802. do { \
  803. switch ((size)) { \
  804. case 1: *(inst)++ = (unsigned char)0x88; break; \
  805. case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
  806. case 4: *(inst)++ = (unsigned char)0x89; break; \
  807. default: jit_assert (0); \
  808. } \
  809. x86_regp_emit ((inst), (reg), (regp)); \
  810. } while (0)
  811. #define x86_mov_membase_reg(inst,basereg,disp,reg,size) \
  812. do { \
  813. switch ((size)) { \
  814. case 1: *(inst)++ = (unsigned char)0x88; break; \
  815. case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
  816. case 4: *(inst)++ = (unsigned char)0x89; break; \
  817. default: jit_assert (0); \
  818. } \
  819. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  820. } while (0)
  821. #define x86_mov_memindex_reg(inst,basereg,disp,indexreg,shift,reg,size) \
  822. do { \
  823. switch ((size)) { \
  824. case 1: *(inst)++ = (unsigned char)0x88; break; \
  825. case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
  826. case 4: *(inst)++ = (unsigned char)0x89; break; \
  827. default: jit_assert (0); \
  828. } \
  829. x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
  830. } while (0)
  831. #define x86_mov_reg_reg(inst,dreg,reg,size) \
  832. do { \
  833. switch ((size)) { \
  834. case 1: *(inst)++ = (unsigned char)0x8a; break; \
  835. case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
  836. case 4: *(inst)++ = (unsigned char)0x8b; break; \
  837. default: jit_assert (0); \
  838. } \
  839. x86_reg_emit ((inst), (dreg), (reg)); \
  840. } while (0)
  841. #define x86_mov_reg_mem(inst,reg,mem,size) \
  842. do { \
  843. switch ((size)) { \
  844. case 1: *(inst)++ = (unsigned char)0x8a; break; \
  845. case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
  846. case 4: *(inst)++ = (unsigned char)0x8b; break; \
  847. default: jit_assert (0); \
  848. } \
  849. x86_mem_emit ((inst), (reg), (mem)); \
  850. } while (0)
  851. #define x86_mov_reg_membase(inst,reg,basereg,disp,size) \
  852. do { \
  853. switch ((size)) { \
  854. case 1: *(inst)++ = (unsigned char)0x8a; break; \
  855. case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
  856. case 4: *(inst)++ = (unsigned char)0x8b; break; \
  857. default: jit_assert (0); \
  858. } \
  859. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  860. } while (0)
  861. #define x86_mov_reg_memindex(inst,reg,basereg,disp,indexreg,shift,size) \
  862. do { \
  863. switch ((size)) { \
  864. case 1: *(inst)++ = (unsigned char)0x8a; break; \
  865. case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
  866. case 4: *(inst)++ = (unsigned char)0x8b; break; \
  867. default: jit_assert (0); \
  868. } \
  869. x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
  870. } while (0)
  871. /*
  872. * Note: x86_clear_reg () chacnges the condition code!
  873. */
  874. #define x86_clear_reg(inst,reg) x86_alu_reg_reg((inst), X86_XOR, (reg), (reg))
  875. #define x86_mov_reg_imm(inst,reg,imm) \
  876. do { \
  877. *(inst)++ = (unsigned char)0xb8 + (reg); \
  878. x86_imm_emit32 ((inst), (imm)); \
  879. } while (0)
  880. #define x86_mov_mem_imm(inst,mem,imm,size) \
  881. do { \
  882. if ((size) == 1) { \
  883. *(inst)++ = (unsigned char)0xc6; \
  884. x86_mem_emit ((inst), 0, (mem)); \
  885. x86_imm_emit8 ((inst), (imm)); \
  886. } else if ((size) == 2) { \
  887. *(inst)++ = (unsigned char)0x66; \
  888. *(inst)++ = (unsigned char)0xc7; \
  889. x86_mem_emit ((inst), 0, (mem)); \
  890. x86_imm_emit16 ((inst), (imm)); \
  891. } else { \
  892. *(inst)++ = (unsigned char)0xc7; \
  893. x86_mem_emit ((inst), 0, (mem)); \
  894. x86_imm_emit32 ((inst), (imm)); \
  895. } \
  896. } while (0)
  897. #define x86_mov_membase_imm(inst,basereg,disp,imm,size) \
  898. do { \
  899. if ((size) == 1) { \
  900. *(inst)++ = (unsigned char)0xc6; \
  901. x86_membase_emit ((inst), 0, (basereg), (disp)); \
  902. x86_imm_emit8 ((inst), (imm)); \
  903. } else if ((size) == 2) { \
  904. *(inst)++ = (unsigned char)0x66; \
  905. *(inst)++ = (unsigned char)0xc7; \
  906. x86_membase_emit ((inst), 0, (basereg), (disp)); \
  907. x86_imm_emit16 ((inst), (imm)); \
  908. } else { \
  909. *(inst)++ = (unsigned char)0xc7; \
  910. x86_membase_emit ((inst), 0, (basereg), (disp)); \
  911. x86_imm_emit32 ((inst), (imm)); \
  912. } \
  913. } while (0)
  914. #define x86_mov_memindex_imm(inst,basereg,disp,indexreg,shift,imm,size) \
  915. do { \
  916. if ((size) == 1) { \
  917. *(inst)++ = (unsigned char)0xc6; \
  918. x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift)); \
  919. x86_imm_emit8 ((inst), (imm)); \
  920. } else if ((size) == 2) { \
  921. *(inst)++ = (unsigned char)0x66; \
  922. *(inst)++ = (unsigned char)0xc7; \
  923. x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift)); \
  924. x86_imm_emit16 ((inst), (imm)); \
  925. } else { \
  926. *(inst)++ = (unsigned char)0xc7; \
  927. x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift)); \
  928. x86_imm_emit32 ((inst), (imm)); \
  929. } \
  930. } while (0)
  931. #define x86_lea_mem(inst,reg,mem) \
  932. do { \
  933. *(inst)++ = (unsigned char)0x8d; \
  934. x86_mem_emit ((inst), (reg), (mem)); \
  935. } while (0)
  936. #define x86_lea_membase(inst,reg,basereg,disp) \
  937. do { \
  938. *(inst)++ = (unsigned char)0x8d; \
  939. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  940. } while (0)
  941. #define x86_lea_memindex(inst,reg,basereg,disp,indexreg,shift) \
  942. do { \
  943. *(inst)++ = (unsigned char)0x8d; \
  944. x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
  945. } while (0)
  946. #define x86_widen_reg(inst,dreg,reg,is_signed,is_half) \
  947. do { \
  948. unsigned char op = 0xb6; \
  949. jit_assert (is_half || X86_IS_BYTE_REG (reg)); \
  950. *(inst)++ = (unsigned char)0x0f; \
  951. if ((is_signed)) op += 0x08; \
  952. if ((is_half)) op += 0x01; \
  953. *(inst)++ = op; \
  954. x86_reg_emit ((inst), (dreg), (reg)); \
  955. } while (0)
  956. #define x86_widen_mem(inst,dreg,mem,is_signed,is_half) \
  957. do { \
  958. unsigned char op = 0xb6; \
  959. *(inst)++ = (unsigned char)0x0f; \
  960. if ((is_signed)) op += 0x08; \
  961. if ((is_half)) op += 0x01; \
  962. *(inst)++ = op; \
  963. x86_mem_emit ((inst), (dreg), (mem)); \
  964. } while (0)
  965. #define x86_widen_membase(inst,dreg,basereg,disp,is_signed,is_half) \
  966. do { \
  967. unsigned char op = 0xb6; \
  968. *(inst)++ = (unsigned char)0x0f; \
  969. if ((is_signed)) op += 0x08; \
  970. if ((is_half)) op += 0x01; \
  971. *(inst)++ = op; \
  972. x86_membase_emit ((inst), (dreg), (basereg), (disp)); \
  973. } while (0)
  974. #define x86_widen_memindex(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half) \
  975. do { \
  976. unsigned char op = 0xb6; \
  977. *(inst)++ = (unsigned char)0x0f; \
  978. if ((is_signed)) op += 0x08; \
  979. if ((is_half)) op += 0x01; \
  980. *(inst)++ = op; \
  981. x86_memindex_emit ((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  982. } while (0)
  983. #define x86_cdq(inst) do { *(inst)++ = (unsigned char)0x99; } while (0)
  984. #define x86_wait(inst) do { *(inst)++ = (unsigned char)0x9b; } while (0)
  985. #define x86_fp_op_mem(inst,opc,mem,is_double) \
  986. do { \
  987. *(inst)++ = (is_double) ? (unsigned char)0xdc : (unsigned char)0xd8; \
  988. x86_mem_emit ((inst), (opc), (mem)); \
  989. } while (0)
  990. #define x86_fp_op_membase(inst,opc,basereg,disp,is_double) \
  991. do { \
  992. *(inst)++ = (is_double) ? (unsigned char)0xdc : (unsigned char)0xd8; \
  993. x86_membase_emit ((inst), (opc), (basereg), (disp)); \
  994. } while (0)
  995. #define x86_fp_op(inst,opc,index) \
  996. do { \
  997. *(inst)++ = (unsigned char)0xd8; \
  998. *(inst)++ = (unsigned char)0xc0+((opc)<<3)+((index)&0x07); \
  999. } while (0)
  1000. #define x86_fp_op_reg(inst,opc,index,pop_stack) \
  1001. do { \
  1002. static const unsigned char map[] = { 0, 1, 2, 3, 5, 4, 7, 6, 8}; \
  1003. *(inst)++ = (pop_stack) ? (unsigned char)0xde : (unsigned char)0xdc; \
  1004. *(inst)++ = (unsigned char)0xc0+(map[(opc)]<<3)+((index)&0x07); \
  1005. } while (0)
  1006. /**
  1007. * @x86_fp_int_op_membase
  1008. * Supports FPU operations between ST(0) and integer operand in memory.
  1009. * Operation encoded using X86_FP_Opcode enum.
  1010. * Operand is addressed by [basereg + disp].
  1011. * is_int specifies whether operand is int32 (TRUE) or int16 (FALSE).
  1012. */
  1013. #define x86_fp_int_op_membase(inst,opc,basereg,disp,is_int) \
  1014. do { \
  1015. *(inst)++ = (is_int) ? (unsigned char)0xda : (unsigned char)0xde; \
  1016. x86_membase_emit ((inst), opc, (basereg), (disp)); \
  1017. } while (0)
  1018. #define x86_fstp(inst,index) \
  1019. do { \
  1020. *(inst)++ = (unsigned char)0xdd; \
  1021. *(inst)++ = (unsigned char)0xd8+(index); \
  1022. } while (0)
  1023. #define x86_fcompp(inst) \
  1024. do { \
  1025. *(inst)++ = (unsigned char)0xde; \
  1026. *(inst)++ = (unsigned char)0xd9; \
  1027. } while (0)
  1028. #define x86_fucompp(inst) \
  1029. do { \
  1030. *(inst)++ = (unsigned char)0xda; \
  1031. *(inst)++ = (unsigned char)0xe9; \
  1032. } while (0)
  1033. #define x86_fnstsw(inst) \
  1034. do { \
  1035. *(inst)++ = (unsigned char)0xdf; \
  1036. *(inst)++ = (unsigned char)0xe0; \
  1037. } while (0)
  1038. #define x86_fnstcw(inst,mem) \
  1039. do { \
  1040. *(inst)++ = (unsigned char)0xd9; \
  1041. x86_mem_emit ((inst), 7, (mem)); \
  1042. } while (0)
  1043. #define x86_fnstcw_membase(inst,basereg,disp) \
  1044. do { \
  1045. *(inst)++ = (unsigned char)0xd9; \
  1046. x86_membase_emit ((inst), 7, (basereg), (disp)); \
  1047. } while (0)
  1048. #define x86_fldcw(inst,mem) \
  1049. do { \
  1050. *(inst)++ = (unsigned char)0xd9; \
  1051. x86_mem_emit ((inst), 5, (mem)); \
  1052. } while (0)
  1053. #define x86_fldcw_membase(inst,basereg,disp) \
  1054. do { \
  1055. *(inst)++ = (unsigned char)0xd9; \
  1056. x86_membase_emit ((inst), 5, (basereg), (disp)); \
  1057. } while (0)
  1058. #define x86_fchs(inst) \
  1059. do { \
  1060. *(inst)++ = (unsigned char)0xd9; \
  1061. *(inst)++ = (unsigned char)0xe0; \
  1062. } while (0)
  1063. #define x86_frem(inst) \
  1064. do { \
  1065. *(inst)++ = (unsigned char)0xd9; \
  1066. *(inst)++ = (unsigned char)0xf8; \
  1067. } while (0)
  1068. #define x86_fxch(inst,index) \
  1069. do { \
  1070. *(inst)++ = (unsigned char)0xd9; \
  1071. *(inst)++ = (unsigned char)0xc8 + ((index) & 0x07); \
  1072. } while (0)
  1073. #define x86_fcomi(inst,index) \
  1074. do { \
  1075. *(inst)++ = (unsigned char)0xdb; \
  1076. *(inst)++ = (unsigned char)0xf0 + ((index) & 0x07); \
  1077. } while (0)
  1078. #define x86_fcomip(inst,index) \
  1079. do { \
  1080. *(inst)++ = (unsigned char)0xdf; \
  1081. *(inst)++ = (unsigned char)0xf0 + ((index) & 0x07); \
  1082. } while (0)
  1083. #define x86_fucomi(inst,index) \
  1084. do { \
  1085. *(inst)++ = (unsigned char)0xdb; \
  1086. *(inst)++ = (unsigned char)0xe8 + ((index) & 0x07); \
  1087. } while (0)
  1088. #define x86_fucomip(inst,index) \
  1089. do { \
  1090. *(inst)++ = (unsigned char)0xdf; \
  1091. *(inst)++ = (unsigned char)0xe8 + ((index) & 0x07); \
  1092. } while (0)
  1093. #define x86_fld(inst,mem,is_double) \
  1094. do { \
  1095. *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
  1096. x86_mem_emit ((inst), 0, (mem)); \
  1097. } while (0)
  1098. #define x86_fld_membase(inst,basereg,disp,is_double) \
  1099. do { \
  1100. *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
  1101. x86_membase_emit ((inst), 0, (basereg), (disp)); \
  1102. } while (0)
  1103. #define x86_fld_memindex(inst,basereg,disp,indexreg,shift,is_double) \
  1104. do { \
  1105. *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
  1106. x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift)); \
  1107. } while (0)
  1108. #define x86_fld80_mem(inst,mem) \
  1109. do { \
  1110. *(inst)++ = (unsigned char)0xdb; \
  1111. x86_mem_emit ((inst), 5, (mem)); \
  1112. } while (0)
  1113. #define x86_fld80_membase(inst,basereg,disp) \
  1114. do { \
  1115. *(inst)++ = (unsigned char)0xdb; \
  1116. x86_membase_emit ((inst), 5, (basereg), (disp)); \
  1117. } while (0)
  1118. #define x86_fld80_memindex(inst,basereg,disp,indexreg,shift) \
  1119. do { \
  1120. *(inst)++ = (unsigned char)0xdb; \
  1121. x86_memindex_emit ((inst), 5, (basereg), (disp), (indexreg), (shift)); \
  1122. } while (0)
  1123. #define x86_fild(inst,mem,is_long) \
  1124. do { \
  1125. if ((is_long)) { \
  1126. *(inst)++ = (unsigned char)0xdf; \
  1127. x86_mem_emit ((inst), 5, (mem)); \
  1128. } else { \
  1129. *(inst)++ = (unsigned char)0xdb; \
  1130. x86_mem_emit ((inst), 0, (mem)); \
  1131. } \
  1132. } while (0)
  1133. #define x86_fild_membase(inst,basereg,disp,is_long) \
  1134. do { \
  1135. if ((is_long)) { \
  1136. *(inst)++ = (unsigned char)0xdf; \
  1137. x86_membase_emit ((inst), 5, (basereg), (disp)); \
  1138. } else { \
  1139. *(inst)++ = (unsigned char)0xdb; \
  1140. x86_membase_emit ((inst), 0, (basereg), (disp)); \
  1141. } \
  1142. } while (0)
  1143. #define x86_fld_reg(inst,index) \
  1144. do { \
  1145. *(inst)++ = (unsigned char)0xd9; \
  1146. *(inst)++ = (unsigned char)0xc0 + ((index) & 0x07); \
  1147. } while (0)
  1148. #define x86_fldz(inst) \
  1149. do { \
  1150. *(inst)++ = (unsigned char)0xd9; \
  1151. *(inst)++ = (unsigned char)0xee; \
  1152. } while (0)
  1153. #define x86_fld1(inst) \
  1154. do { \
  1155. *(inst)++ = (unsigned char)0xd9; \
  1156. *(inst)++ = (unsigned char)0xe8; \
  1157. } while (0)
  1158. #define x86_fldpi(inst) \
  1159. do { \
  1160. *(inst)++ = (unsigned char)0xd9; \
  1161. *(inst)++ = (unsigned char)0xeb; \
  1162. } while (0)
  1163. #define x86_fst(inst,mem,is_double,pop_stack) \
  1164. do { \
  1165. *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
  1166. x86_mem_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (mem)); \
  1167. } while (0)
  1168. #define x86_fst_membase(inst,basereg,disp,is_double,pop_stack) \
  1169. do { \
  1170. *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
  1171. x86_membase_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (basereg), (disp)); \
  1172. } while (0)
  1173. #define x86_fst_memindex(inst,basereg,disp,indexreg,shift,is_double,pop_stack) \
  1174. do { \
  1175. *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
  1176. x86_memindex_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (basereg), (disp), (indexreg), (shift)); \
  1177. } while (0)
  1178. #define x86_fst80_mem(inst,mem) \
  1179. do { \
  1180. *(inst)++ = (unsigned char)0xdb; \
  1181. x86_mem_emit ((inst), 7, (mem)); \
  1182. } while (0)
  1183. #define x86_fst80_membase(inst,basereg,disp) \
  1184. do { \
  1185. *(inst)++ = (unsigned char)0xdb; \
  1186. x86_membase_emit ((inst), 7, (basereg), (disp)); \
  1187. } while (0)
  1188. #define x86_fst80_memindex(inst,basereg,disp,indexreg,shift) \
  1189. do { \
  1190. *(inst)++ = (unsigned char)0xdb; \
  1191. x86_memindex_emit ((inst), 7, (basereg), (disp), (indexreg), (shift)); \
  1192. } while (0)
  1193. #define x86_fist_pop(inst,mem,is_long) \
  1194. do { \
  1195. if ((is_long)) { \
  1196. *(inst)++ = (unsigned char)0xdf; \
  1197. x86_mem_emit ((inst), 7, (mem)); \
  1198. } else { \
  1199. *(inst)++ = (unsigned char)0xdb; \
  1200. x86_mem_emit ((inst), 3, (mem)); \
  1201. } \
  1202. } while (0)
  1203. #define x86_fist_pop_membase(inst,basereg,disp,is_long) \
  1204. do { \
  1205. if ((is_long)) { \
  1206. *(inst)++ = (unsigned char)0xdf; \
  1207. x86_membase_emit ((inst), 7, (basereg), (disp)); \
  1208. } else { \
  1209. *(inst)++ = (unsigned char)0xdb; \
  1210. x86_membase_emit ((inst), 3, (basereg), (disp)); \
  1211. } \
  1212. } while (0)
  1213. #define x86_fstsw(inst) \
  1214. do { \
  1215. *(inst)++ = (unsigned char)0x9b; \
  1216. *(inst)++ = (unsigned char)0xdf; \
  1217. *(inst)++ = (unsigned char)0xe0; \
  1218. } while (0)
  1219. /**
  1220. * @x86_fist_membase
  1221. * Converts content of ST(0) to integer and stores it at memory location
  1222. * addressed by [basereg + disp].
  1223. * is_int specifies whether destination is int32 (TRUE) or int16 (FALSE).
  1224. */
  1225. #define x86_fist_membase(inst,basereg,disp,is_int) \
  1226. do { \
  1227. if ((is_int)) { \
  1228. *(inst)++ = (unsigned char)0xdb; \
  1229. x86_membase_emit ((inst), 2, (basereg), (disp)); \
  1230. } else { \
  1231. *(inst)++ = (unsigned char)0xdf; \
  1232. x86_membase_emit ((inst), 2, (basereg), (disp)); \
  1233. } \
  1234. } while (0)
  1235. #define x86_push_reg(inst,reg) \
  1236. do { \
  1237. *(inst)++ = (unsigned char)0x50 + (reg); \
  1238. } while (0)
  1239. #define x86_push_regp(inst,reg) \
  1240. do { \
  1241. *(inst)++ = (unsigned char)0xff; \
  1242. x86_regp_emit ((inst), 6, (reg)); \
  1243. } while (0)
  1244. #define x86_push_mem(inst,mem) \
  1245. do { \
  1246. *(inst)++ = (unsigned char)0xff; \
  1247. x86_mem_emit ((inst), 6, (mem)); \
  1248. } while (0)
  1249. #define x86_push_membase(inst,basereg,disp) \
  1250. do { \
  1251. *(inst)++ = (unsigned char)0xff; \
  1252. x86_membase_emit ((inst), 6, (basereg), (disp)); \
  1253. } while (0)
  1254. #define x86_push_memindex(inst,basereg,disp,indexreg,shift) \
  1255. do { \
  1256. *(inst)++ = (unsigned char)0xff; \
  1257. x86_memindex_emit ((inst), 6, (basereg), (disp), (indexreg), (shift)); \
  1258. } while (0)
  1259. #define x86_push_imm_template(inst) x86_push_imm (inst, 0xf0f0f0f0)
  1260. #define x86_push_imm(inst,imm) \
  1261. do { \
  1262. int _imm = (int) (imm); \
  1263. if (x86_is_imm8 (_imm)) { \
  1264. *(inst)++ = (unsigned char)0x6A; \
  1265. x86_imm_emit8 ((inst), (_imm)); \
  1266. } else { \
  1267. *(inst)++ = (unsigned char)0x68; \
  1268. x86_imm_emit32 ((inst), (_imm)); \
  1269. } \
  1270. } while (0)
  1271. #define x86_pop_reg(inst,reg) \
  1272. do { \
  1273. *(inst)++ = (unsigned char)0x58 + (reg); \
  1274. } while (0)
  1275. #define x86_pop_mem(inst,mem) \
  1276. do { \
  1277. *(inst)++ = (unsigned char)0x8f; \
  1278. x86_mem_emit ((inst), 0, (mem)); \
  1279. } while (0)
  1280. #define x86_pop_membase(inst,basereg,disp) \
  1281. do { \
  1282. *(inst)++ = (unsigned char)0x8f; \
  1283. x86_membase_emit ((inst), 0, (basereg), (disp)); \
  1284. } while (0)
  1285. #define x86_pushad(inst) do { *(inst)++ = (unsigned char)0x60; } while (0)
  1286. #define x86_pushfd(inst) do { *(inst)++ = (unsigned char)0x9c; } while (0)
  1287. #define x86_popad(inst) do { *(inst)++ = (unsigned char)0x61; } while (0)
  1288. #define x86_popfd(inst) do { *(inst)++ = (unsigned char)0x9d; } while (0)
  1289. #define x86_loop(inst,imm) \
  1290. do { \
  1291. *(inst)++ = (unsigned char)0xe2; \
  1292. x86_imm_emit8 ((inst), (imm)); \
  1293. } while (0)
  1294. #define x86_loope(inst,imm) \
  1295. do { \
  1296. *(inst)++ = (unsigned char)0xe1; \
  1297. x86_imm_emit8 ((inst), (imm)); \
  1298. } while (0)
  1299. #define x86_loopne(inst,imm) \
  1300. do { \
  1301. *(inst)++ = (unsigned char)0xe0; \
  1302. x86_imm_emit8 ((inst), (imm)); \
  1303. } while (0)
  1304. #define x86_jump32(inst,imm) \
  1305. do { \
  1306. *(inst)++ = (unsigned char)0xe9; \
  1307. x86_imm_emit32 ((inst), (imm)); \
  1308. } while (0)
  1309. #define x86_jump8(inst,imm) \
  1310. do { \
  1311. *(inst)++ = (unsigned char)0xeb; \
  1312. x86_imm_emit8 ((inst), (imm)); \
  1313. } while (0)
  1314. #define x86_jump_reg(inst,reg) \
  1315. do { \
  1316. *(inst)++ = (unsigned char)0xff; \
  1317. x86_reg_emit ((inst), 4, (reg)); \
  1318. } while (0)
  1319. #define x86_jump_mem(inst,mem) \
  1320. do { \
  1321. *(inst)++ = (unsigned char)0xff; \
  1322. x86_mem_emit ((inst), 4, (mem)); \
  1323. } while (0)
  1324. #define x86_jump_membase(inst,basereg,disp) \
  1325. do { \
  1326. *(inst)++ = (unsigned char)0xff; \
  1327. x86_membase_emit ((inst), 4, (basereg), (disp)); \
  1328. } while (0)
  1329. #define x86_jump_memindex(inst,basereg,disp,indexreg,shift) \
  1330. do { \
  1331. *(inst)++ = (unsigned char)0xff; \
  1332. x86_memindex_emit ((inst), 4, (basereg), (disp), (indexreg), (shift)); \
  1333. } while (0)
  1334. /*
  1335. * target is a pointer in our buffer.
  1336. */
  1337. #define x86_jump_code(inst,target) \
  1338. do { \
  1339. int t = (unsigned char*)(target) - (inst) - 2; \
  1340. if (x86_is_imm8(t)) { \
  1341. x86_jump8 ((inst), t); \
  1342. } else { \
  1343. t -= 3; \
  1344. x86_jump32 ((inst), t); \
  1345. } \
  1346. } while (0)
  1347. #define x86_jump_disp(inst,disp) \
  1348. do { \
  1349. int t = (disp) - 2; \
  1350. if (x86_is_imm8(t)) { \
  1351. x86_jump8 ((inst), t); \
  1352. } else { \
  1353. t -= 3; \
  1354. x86_jump32 ((inst), t); \
  1355. } \
  1356. } while (0)
  1357. #define x86_branch8(inst,cond,imm,is_signed) \
  1358. do { \
  1359. if ((is_signed)) \
  1360. *(inst)++ = x86_cc_signed_map [(cond)]; \
  1361. else \
  1362. *(inst)++ = x86_cc_unsigned_map [(cond)]; \
  1363. x86_imm_emit8 ((inst), (imm)); \
  1364. } while (0)
  1365. #define x86_branch32(inst,cond,imm,is_signed) \
  1366. do { \
  1367. *(inst)++ = (unsigned char)0x0f; \
  1368. if ((is_signed)) \
  1369. *(inst)++ = x86_cc_signed_map [(cond)] + 0x10; \
  1370. else \
  1371. *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x10; \
  1372. x86_imm_emit32 ((inst), (imm)); \
  1373. } while (0)
  1374. #define x86_branch(inst,cond,target,is_signed) \
  1375. do { \
  1376. int offset = (target) - (inst) - 2; \
  1377. if (x86_is_imm8 ((offset))) \
  1378. x86_branch8 ((inst), (cond), offset, (is_signed)); \
  1379. else { \
  1380. offset -= 4; \
  1381. x86_branch32 ((inst), (cond), offset, (is_signed)); \
  1382. } \
  1383. } while (0)
  1384. #define x86_branch_disp(inst,cond,disp,is_signed) \
  1385. do { \
  1386. int offset = (disp) - 2; \
  1387. if (x86_is_imm8 ((offset))) \
  1388. x86_branch8 ((inst), (cond), offset, (is_signed)); \
  1389. else { \
  1390. offset -= 4; \
  1391. x86_branch32 ((inst), (cond), offset, (is_signed)); \
  1392. } \
  1393. } while (0)
  1394. #define x86_set_reg(inst,cond,reg,is_signed) \
  1395. do { \
  1396. jit_assert (X86_IS_BYTE_REG (reg)); \
  1397. *(inst)++ = (unsigned char)0x0f; \
  1398. if ((is_signed)) \
  1399. *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
  1400. else \
  1401. *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
  1402. x86_reg_emit ((inst), 0, (reg)); \
  1403. } while (0)
  1404. #define x86_set_mem(inst,cond,mem,is_signed) \
  1405. do { \
  1406. *(inst)++ = (unsigned char)0x0f; \
  1407. if ((is_signed)) \
  1408. *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
  1409. else \
  1410. *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
  1411. x86_mem_emit ((inst), 0, (mem)); \
  1412. } while (0)
  1413. #define x86_set_membase(inst,cond,basereg,disp,is_signed) \
  1414. do { \
  1415. *(inst)++ = (unsigned char)0x0f; \
  1416. if ((is_signed)) \
  1417. *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
  1418. else \
  1419. *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
  1420. x86_membase_emit ((inst), 0, (basereg), (disp)); \
  1421. } while (0)
  1422. #define x86_call_imm(inst,disp) \
  1423. do { \
  1424. *(inst)++ = (unsigned char)0xe8; \
  1425. x86_imm_emit32 ((inst), (int)(disp)); \
  1426. } while (0)
  1427. #define x86_call_reg(inst,reg) \
  1428. do { \
  1429. *(inst)++ = (unsigned char)0xff; \
  1430. x86_reg_emit ((inst), 2, (reg)); \
  1431. } while (0)
  1432. #define x86_call_mem(inst,mem) \
  1433. do { \
  1434. *(inst)++ = (unsigned char)0xff; \
  1435. x86_mem_emit ((inst), 2, (mem)); \
  1436. } while (0)
  1437. #define x86_call_membase(inst,basereg,disp) \
  1438. do { \
  1439. *(inst)++ = (unsigned char)0xff; \
  1440. x86_membase_emit ((inst), 2, (basereg), (disp)); \
  1441. } while (0)
  1442. #define x86_call_code(inst,target) \
  1443. do { \
  1444. int _x86_offset = (unsigned char*)(target) - (inst); \
  1445. _x86_offset -= 5; \
  1446. x86_call_imm ((inst), _x86_offset); \
  1447. } while (0)
  1448. #define x86_ret(inst) do { *(inst)++ = (unsigned char)0xc3; } while (0)
  1449. #define x86_ret_imm(inst,imm) \
  1450. do { \
  1451. if ((imm) == 0) { \
  1452. x86_ret ((inst)); \
  1453. } else { \
  1454. *(inst)++ = (unsigned char)0xc2; \
  1455. x86_imm_emit16 ((inst), (imm)); \
  1456. } \
  1457. } while (0)
  1458. #define x86_cmov_reg(inst,cond,is_signed,dreg,reg) \
  1459. do { \
  1460. *(inst)++ = (unsigned char) 0x0f; \
  1461. if ((is_signed)) \
  1462. *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
  1463. else \
  1464. *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
  1465. x86_reg_emit ((inst), (dreg), (reg)); \
  1466. } while (0)
  1467. #define x86_cmov_mem(inst,cond,is_signed,reg,mem) \
  1468. do { \
  1469. *(inst)++ = (unsigned char) 0x0f; \
  1470. if ((is_signed)) \
  1471. *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
  1472. else \
  1473. *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
  1474. x86_mem_emit ((inst), (reg), (mem)); \
  1475. } while (0)
  1476. #define x86_cmov_membase(inst,cond,is_signed,reg,basereg,disp) \
  1477. do { \
  1478. *(inst)++ = (unsigned char) 0x0f; \
  1479. if ((is_signed)) \
  1480. *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
  1481. else \
  1482. *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
  1483. x86_membase_emit ((inst), (reg), (basereg), (disp)); \
  1484. } while (0)
  1485. #define x86_enter(inst,framesize) \
  1486. do { \
  1487. *(inst)++ = (unsigned char)0xc8; \
  1488. x86_imm_emit16 ((inst), (framesize)); \
  1489. *(inst)++ = 0; \
  1490. } while (0)
  1491. #define x86_leave(inst) do { *(inst)++ = (unsigned char)0xc9; } while (0)
  1492. #define x86_sahf(inst) do { *(inst)++ = (unsigned char)0x9e; } while (0)
  1493. #define x86_fsin(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfe; } while (0)
  1494. #define x86_fcos(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xff; } while (0)
  1495. #define x86_fabs(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe1; } while (0)
  1496. #define x86_ftst(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe4; } while (0)
  1497. #define x86_fxam(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe5; } while (0)
  1498. #define x86_fpatan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf3; } while (0)
  1499. #define x86_fprem(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf8; } while (0)
  1500. #define x86_fprem1(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf5; } while (0)
  1501. #define x86_frndint(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfc; } while (0)
  1502. #define x86_fsqrt(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfa; } while (0)
  1503. #define x86_fptan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf2; } while (0)
  1504. #define x86_padding(inst,size) \
  1505. do { \
  1506. switch ((size)) { \
  1507. case 1: x86_nop ((inst)); break; \
  1508. case 2: *(inst)++ = 0x8b; \
  1509. *(inst)++ = 0xc0; break; \
  1510. case 3: *(inst)++ = 0x8d; *(inst)++ = 0x6d; \
  1511. *(inst)++ = 0x00; break; \
  1512. case 4: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
  1513. *(inst)++ = 0x24; *(inst)++ = 0x00; \
  1514. break; \
  1515. case 5: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
  1516. *(inst)++ = 0x24; *(inst)++ = 0x00; \
  1517. x86_nop ((inst)); break; \
  1518. case 6: *(inst)++ = 0x8d; *(inst)++ = 0xad; \
  1519. *(inst)++ = 0x00; *(inst)++ = 0x00; \
  1520. *(inst)++ = 0x00; *(inst)++ = 0x00; \
  1521. break; \
  1522. case 7: *(inst)++ = 0x8d; *(inst)++ = 0xa4; \
  1523. *(inst)++ = 0x24; *(inst)++ = 0x00; \
  1524. *(inst)++ = 0x00; *(inst)++ = 0x00; \
  1525. *(inst)++ = 0x00; break; \
  1526. default: jit_assert (0); \
  1527. } \
  1528. } while (0)
  1529. #define x86_prolog(inst,frame_size,reg_mask) \
  1530. do { \
  1531. unsigned i, m = 1; \
  1532. x86_enter ((inst), (frame_size)); \
  1533. for (i = 0; i < X86_NREG; ++i, m <<= 1) { \
  1534. if ((reg_mask) & m) \
  1535. x86_push_reg ((inst), i); \
  1536. } \
  1537. } while (0)
  1538. #define x86_epilog(inst,reg_mask) \
  1539. do { \
  1540. unsigned i, m = 1 << X86_EDI; \
  1541. for (i = X86_EDI; m != 0; i--, m=m>>1) { \
  1542. if ((reg_mask) & m) \
  1543. x86_pop_reg ((inst), i); \
  1544. } \
  1545. x86_leave ((inst)); \
  1546. x86_ret ((inst)); \
  1547. } while (0)
  1548. #endif /* JIT_GEN_X86_H */