PageRenderTime 70ms CodeModel.GetById 20ms RepoModel.GetById 0ms app.codeStats 1ms

/jit/jit-gen-x86-64.h

https://bitbucket.org/philburr/libjit
C Header | 5579 lines | 4481 code | 649 blank | 449 comment | 279 complexity | 6faeb4f9f22ed74233a7a08d67540daa MD5 | raw file
Possible License(s): GPL-2.0, LGPL-2.1
  1. /*
  2. * jit-gen-x86-64.h - Macros for generating x86_64 code.
  3. *
  4. * Copyright (C) 2008 Southern Storm Software, Pty Ltd.
  5. *
  6. * This file is part of the libjit library.
  7. *
  8. * The libjit library is free software: you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public License
  10. * as published by the Free Software Foundation, either version 2.1 of
  11. * the License, or (at your option) any later version.
  12. *
  13. * The libjit library is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with the libjit library. If not, see
  20. * <http://www.gnu.org/licenses/>.
  21. */
  22. #ifndef _JIT_GEN_X86_64_H
  23. #define _JIT_GEN_X86_64_H
  24. #include <jit/jit-defs.h>
  25. #include "jit-gen-x86.h"
  26. #ifdef __cplusplus
  27. extern "C" {
  28. #endif
  29. /*
  30. * X86_64 64 bit general purpose integer registers.
  31. */
  32. typedef enum
  33. {
  34. X86_64_RAX = 0,
  35. X86_64_RCX = 1,
  36. X86_64_RDX = 2,
  37. X86_64_RBX = 3,
  38. X86_64_RSP = 4,
  39. X86_64_RBP = 5,
  40. X86_64_RSI = 6,
  41. X86_64_RDI = 7,
  42. X86_64_R8 = 8,
  43. X86_64_R9 = 9,
  44. X86_64_R10 = 10,
  45. X86_64_R11 = 11,
  46. X86_64_R12 = 12,
  47. X86_64_R13 = 13,
  48. X86_64_R14 = 14,
  49. X86_64_R15 = 15,
  50. X86_64_RIP = 16 /* This register encoding doesn't exist in the */
  51. /* instructions. It's used for RIP relative encoding. */
  52. } X86_64_Reg_No;
  53. /*
  54. * X86-64 xmm registers.
  55. */
  56. typedef enum
  57. {
  58. X86_64_XMM0 = 0,
  59. X86_64_XMM1 = 1,
  60. X86_64_XMM2 = 2,
  61. X86_64_XMM3 = 3,
  62. X86_64_XMM4 = 4,
  63. X86_64_XMM5 = 5,
  64. X86_64_XMM6 = 6,
  65. X86_64_XMM7 = 7,
  66. X86_64_XMM8 = 8,
  67. X86_64_XMM9 = 9,
  68. X86_64_XMM10 = 10,
  69. X86_64_XMM11 = 11,
  70. X86_64_XMM12 = 12,
  71. X86_64_XMM13 = 13,
  72. X86_64_XMM14 = 14,
  73. X86_64_XMM15 = 15
  74. } X86_64_XMM_Reg_No;
  75. /*
  76. * Bits in the REX prefix byte.
  77. */
  78. typedef enum
  79. {
  80. X86_64_REX_B = 1, /* 1-bit (high) extension of the ModRM r/m field */
  81. /* SIB base field, or opcode reg field, thus */
  82. /* permitting access to 16 registers. */
  83. X86_64_REX_X = 2, /* 1-bit (high) extension of the SIB index field */
  84. /* thus permitting access to 16 registers. */
  85. X86_64_REX_R = 4, /* 1-bit (high) extension of the ModRM reg field, */
  86. /* thus permitting access to 16 registers. */
  87. X86_64_REX_W = 8 /* 0 = Default operand size */
  88. /* 1 = 64 bit operand size */
  89. } X86_64_REX_Bits;
  90. /*
  91. * Third part of the opcodes for xmm instructions which are encoded
  92. * Opcode1: 0xF3 (single precision) or 0xF2 (double precision)
  93. * This is handled as a prefix.
  94. * Opcode2: 0x0F
  95. */
  96. typedef enum
  97. {
  98. XMM1_MOV = 0x10,
  99. XMM1_MOV_REV = 0x11,
  100. XMM1_ADD = 0x58,
  101. XMM1_MUL = 0x59,
  102. XMM1_SUB = 0x5C,
  103. XMM1_DIV = 0x5E
  104. } X86_64_XMM1_OP;
  105. /*
  106. * Logical opcodes used with packed single and double precision values.
  107. */
  108. typedef enum
  109. {
  110. XMM_ANDP = 0x54,
  111. XMM_ORP = 0x56,
  112. XMM_XORP = 0x57
  113. } X86_64_XMM_PLOP;
  114. /*
  115. * Rounding modes for xmm rounding instructions, the mxcsr register and
  116. * the fpu control word.
  117. */
  118. typedef enum
  119. {
  120. X86_ROUND_NEAREST = 0x00, /* Round to the nearest integer */
  121. X86_ROUND_DOWN = 0x01, /* Round towards negative infinity */
  122. X86_ROUND_UP = 0x02, /* Round towards positive infinity */
  123. X86_ROUND_ZERO = 0x03 /* Round towards zero (truncate) */
  124. } X86_64_ROUNDMODE;
  125. /*
  126. * Helper union for emmitting 64 bit immediate values.
  127. */
  128. typedef union
  129. {
  130. jit_long val;
  131. unsigned char b[8];
  132. } x86_64_imm_buf;
  133. #define x86_64_imm_emit64(inst, imm) \
  134. do { \
  135. x86_64_imm_buf imb; \
  136. imb.val = (jit_long)(imm); \
  137. *(inst)++ = imb.b[0]; \
  138. *(inst)++ = imb.b[1]; \
  139. *(inst)++ = imb.b[2]; \
  140. *(inst)++ = imb.b[3]; \
  141. *(inst)++ = imb.b[4]; \
  142. *(inst)++ = imb.b[5]; \
  143. *(inst)++ = imb.b[6]; \
  144. *(inst)++ = imb.b[7]; \
  145. } while (0)
  146. #define x86_64_imm_emit_max32(inst, imm, size) \
  147. do { \
  148. switch((size)) \
  149. { \
  150. case 1: \
  151. { \
  152. x86_imm_emit8(inst, (imm)); \
  153. } \
  154. break; \
  155. case 2: \
  156. { \
  157. x86_imm_emit16(inst, (imm)); \
  158. } \
  159. break; \
  160. case 4: \
  161. case 8: \
  162. { \
  163. x86_imm_emit32((inst), (imm)); \
  164. } \
  165. break; \
  166. default: \
  167. { \
  168. jit_assert(0); \
  169. } \
  170. } \
  171. } while(0)
  172. #define x86_64_imm_emit_max64(inst, imm, size) \
  173. do { \
  174. switch((size)) \
  175. { \
  176. case 1: \
  177. { \
  178. x86_imm_emit8(inst, (imm)); \
  179. } \
  180. break; \
  181. case 2: \
  182. { \
  183. x86_imm_emit16(inst, (imm)); \
  184. } \
  185. break; \
  186. case 4: \
  187. { \
  188. x86_imm_emit32((inst), (imm)); \
  189. } \
  190. break; \
  191. case 8: \
  192. { \
  193. x86_64_imm_emit64(inst, (imm)); \
  194. } \
  195. break; \
  196. default: \
  197. { \
  198. jit_assert(0); \
  199. } \
  200. } \
  201. } while(0)
  202. /*
  203. * Emit the Rex prefix.
  204. * The natural size is a power of 2 (1, 2, 4 or 8).
  205. * For accessing the low byte registers DIL, SIL, BPL and SPL we have to
  206. * generate a Rex prefix with the value 0x40 too.
  207. * To enable this OR the natural size with 1.
  208. */
  209. #define x86_64_rex(rex_bits) (0x40 | (rex_bits))
  210. #define x86_64_rex_emit(inst, width, modrm_reg, index_reg, rm_base_opcode_reg) \
  211. do { \
  212. unsigned char __rex_bits = \
  213. (((width) & 8) ? X86_64_REX_W : 0) | \
  214. (((modrm_reg) & 8) ? X86_64_REX_R : 0) | \
  215. (((index_reg) & 8) ? X86_64_REX_X : 0) | \
  216. (((rm_base_opcode_reg) & 8) ? X86_64_REX_B : 0); \
  217. if((__rex_bits != 0)) \
  218. { \
  219. *(inst)++ = x86_64_rex(__rex_bits); \
  220. } \
  221. else if(((width) & 1) && ((modrm_reg & 4) || (rm_base_opcode_reg & 4))) \
  222. { \
  223. *(inst)++ = x86_64_rex(0); \
  224. } \
  225. } while(0)
  226. /*
  227. * Helper for emitting the rex prefix for opcodes with 64bit default size.
  228. */
  229. #define x86_64_rex_emit64(inst, width, modrm_reg, index_reg, rm_base_opcode_reg) \
  230. do { \
  231. x86_64_rex_emit((inst), 0, (modrm_reg), (index_reg), (rm_base_opcode_reg)); \
  232. } while(0)
  233. /* In 64 bit mode, all registers have a low byte subregister */
  234. #undef X86_IS_BYTE_REG
  235. #define X86_IS_BYTE_REG(reg) 1
  236. #define x86_64_reg_emit(inst, r, regno) \
  237. do { \
  238. x86_reg_emit((inst), ((r) & 0x7), ((regno) & 0x7)); \
  239. } while(0)
  240. #define x86_64_mem_emit(inst, r, disp) \
  241. do { \
  242. x86_address_byte ((inst), 0, ((r) & 0x7), 4); \
  243. x86_address_byte ((inst), 0, 4, 5); \
  244. x86_imm_emit32((inst), (disp)); \
  245. } while(0)
  246. #define x86_64_mem64_emit(inst, r, disp) \
  247. do { \
  248. x86_address_byte ((inst), 0, ((r) & 0x7), 4); \
  249. x86_address_byte ((inst), 0, 4, 5); \
  250. x86_64_imm_emit64((inst), (disp)); \
  251. } while(0)
  252. #define x86_64_membase_emit(inst, reg, basereg, disp) \
  253. do { \
  254. if((basereg) == X86_64_RIP) \
  255. { \
  256. x86_address_byte((inst), 0, ((reg) & 0x7), 5); \
  257. x86_imm_emit32((inst), (disp)); \
  258. } \
  259. else \
  260. { \
  261. x86_membase_emit((inst), ((reg) & 0x7), ((basereg) & 0x7), (disp)); \
  262. } \
  263. } while(0)
  264. #define x86_64_memindex_emit(inst, r, basereg, disp, indexreg, shift) \
  265. do { \
  266. x86_memindex_emit((inst), ((r) & 0x7), ((basereg) & 0x7), (disp), ((indexreg) & 0x7), (shift)); \
  267. } while(0)
  268. /*
  269. * RSP, RBP and the corresponding upper registers (R12 and R13) can't be used
  270. * for relative addressing without displacement because their codes are used
  271. * for encoding addressing modes with diplacement.
  272. * So we do a membase addressing in this case with a zero offset.
  273. */
  274. #define x86_64_regp_emit(inst, r, regno) \
  275. do { \
  276. switch(regno) \
  277. { \
  278. case X86_64_RSP: \
  279. case X86_64_RBP: \
  280. case X86_64_R12: \
  281. case X86_64_R13: \
  282. { \
  283. x86_64_membase_emit((inst), (r), (regno), 0); \
  284. } \
  285. break; \
  286. default: \
  287. { \
  288. x86_address_byte((inst), 0, ((r) & 0x7), ((regno) & 0x7)); \
  289. } \
  290. break; \
  291. } \
  292. } while(0)
  293. /*
  294. * Helper to encode an opcode where the encoding is different between
  295. * 8bit and 16 ... 64 bit width in the following way:
  296. * 8 bit == opcode given
  297. * 16 ... 64 bit = opcode given | 0x1
  298. */
  299. #define x86_64_opcode1_emit(inst, opc, size) \
  300. do { \
  301. switch ((size)) \
  302. { \
  303. case 1: \
  304. { \
  305. *(inst)++ = (unsigned char)(opc); \
  306. } \
  307. break; \
  308. case 2: \
  309. case 4: \
  310. case 8: \
  311. { \
  312. *(inst)++ = ((unsigned char)(opc) | 0x1); \
  313. } \
  314. break;\
  315. default: \
  316. { \
  317. jit_assert(0); \
  318. } \
  319. } \
  320. } while(0)
  321. /*
  322. * Macros to implement the simple opcodes.
  323. */
  324. #define x86_64_alu_reg_reg_size(inst, opc, dreg, sreg, size) \
  325. do { \
  326. switch(size) \
  327. { \
  328. case 1: \
  329. { \
  330. x86_64_rex_emit(inst, size, (dreg), 0, (sreg)); \
  331. *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
  332. x86_64_reg_emit((inst), (dreg), (sreg)); \
  333. } \
  334. break; \
  335. case 2: \
  336. { \
  337. *(inst)++ = (unsigned char)0x66; \
  338. } \
  339. case 4: \
  340. case 8: \
  341. { \
  342. x86_64_rex_emit(inst, size, (dreg), 0, (sreg)); \
  343. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  344. x86_64_reg_emit((inst), (dreg), (sreg)); \
  345. } \
  346. } \
  347. } while(0)
  348. #define x86_64_alu_regp_reg_size(inst, opc, dregp, sreg, size) \
  349. do { \
  350. switch(size) \
  351. { \
  352. case 1: \
  353. { \
  354. x86_64_rex_emit(inst, size, (sreg), 0, (dregp)); \
  355. *(inst)++ = (((unsigned char)(opc)) << 3); \
  356. x86_64_regp_emit((inst), (sreg), (dregp)); \
  357. } \
  358. break; \
  359. case 2: \
  360. { \
  361. *(inst)++ = (unsigned char)0x66; \
  362. } \
  363. case 4: \
  364. case 8: \
  365. { \
  366. x86_64_rex_emit(inst, size, (sreg), 0, (dregp)); \
  367. *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
  368. x86_64_regp_emit((inst), (sreg), (dregp)); \
  369. } \
  370. } \
  371. } while(0)
  372. #define x86_64_alu_mem_reg_size(inst, opc, mem, sreg, size) \
  373. do { \
  374. switch(size) \
  375. { \
  376. case 1: \
  377. { \
  378. x86_64_rex_emit(inst, size, (sreg), 0, 0); \
  379. *(inst)++ = (((unsigned char)(opc)) << 3); \
  380. x86_64_mem_emit((inst), (sreg), (mem)); \
  381. } \
  382. break; \
  383. case 2: \
  384. { \
  385. *(inst)++ = (unsigned char)0x66; \
  386. } \
  387. case 4: \
  388. case 8: \
  389. { \
  390. x86_64_rex_emit(inst, size, (sreg), 0, 0); \
  391. *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
  392. x86_64_mem_emit((inst), (sreg), (mem)); \
  393. } \
  394. } \
  395. } while(0)
  396. #define x86_64_alu_membase_reg_size(inst, opc, basereg, disp, sreg, size) \
  397. do { \
  398. switch(size) \
  399. { \
  400. case 1: \
  401. { \
  402. x86_64_rex_emit(inst, size, (sreg), 0, (basereg)); \
  403. *(inst)++ = (((unsigned char)(opc)) << 3); \
  404. x86_64_membase_emit((inst), (sreg), (basereg), (disp)); \
  405. } \
  406. break; \
  407. case 2: \
  408. { \
  409. *(inst)++ = (unsigned char)0x66; \
  410. } \
  411. case 4: \
  412. case 8: \
  413. { \
  414. x86_64_rex_emit(inst, size, (sreg), 0, (basereg)); \
  415. *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
  416. x86_64_membase_emit((inst), (sreg), (basereg), (disp)); \
  417. } \
  418. } \
  419. } while(0)
  420. #define x86_64_alu_memindex_reg_size(inst, opc, basereg, disp, indexreg, shift, sreg, size) \
  421. do { \
  422. switch(size) \
  423. { \
  424. case 1: \
  425. { \
  426. x86_64_rex_emit(inst, size, (sreg), (indexreg), (basereg)); \
  427. *(inst)++ = (((unsigned char)(opc)) << 3); \
  428. x86_64_memindex_emit((inst), (sreg), (basereg), (disp), (indexreg), (shift)); \
  429. } \
  430. break; \
  431. case 2: \
  432. { \
  433. *(inst)++ = (unsigned char)0x66; \
  434. } \
  435. case 4: \
  436. case 8: \
  437. { \
  438. x86_64_rex_emit(inst, size, (sreg), (indexreg), (basereg)); \
  439. *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
  440. x86_64_memindex_emit((inst), (sreg), (basereg), (disp), (indexreg), (shift)); \
  441. } \
  442. } \
  443. } while(0)
  444. #define x86_64_alu_reg_regp_size(inst, opc, dreg, sregp, size) \
  445. do { \
  446. switch(size) \
  447. { \
  448. case 1: \
  449. { \
  450. x86_64_rex_emit(inst, size, (dreg), 0, (sregp)); \
  451. *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
  452. x86_64_regp_emit((inst), (dreg), (sregp)); \
  453. } \
  454. break; \
  455. case 2: \
  456. { \
  457. *(inst)++ = (unsigned char)0x66; \
  458. } \
  459. case 4: \
  460. case 8: \
  461. { \
  462. x86_64_rex_emit(inst, size, (dreg), 0, (sregp)); \
  463. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  464. x86_64_regp_emit((inst), (dreg), (sregp)); \
  465. } \
  466. } \
  467. } while(0)
  468. #define x86_64_alu_reg_mem_size(inst, opc, dreg, mem, size) \
  469. do { \
  470. switch(size) \
  471. { \
  472. case 1: \
  473. { \
  474. x86_64_rex_emit(inst, size, (dreg), 0, 0); \
  475. *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
  476. x86_64_mem_emit((inst), (dreg), (mem)); \
  477. } \
  478. break; \
  479. case 2: \
  480. { \
  481. *(inst)++ = (unsigned char)0x66; \
  482. } \
  483. case 4: \
  484. case 8: \
  485. { \
  486. x86_64_rex_emit(inst, size, (dreg), 0, 0); \
  487. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  488. x86_64_mem_emit((inst), (dreg), (mem)); \
  489. } \
  490. } \
  491. } while(0)
  492. #define x86_64_alu_reg_membase_size(inst, opc, dreg, basereg, disp, size) \
  493. do { \
  494. switch(size) \
  495. { \
  496. case 1: \
  497. { \
  498. x86_64_rex_emit(inst, size, (dreg), 0, (basereg)); \
  499. *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
  500. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  501. } \
  502. break; \
  503. case 2: \
  504. { \
  505. *(inst)++ = (unsigned char)0x66; \
  506. } \
  507. case 4: \
  508. case 8: \
  509. { \
  510. x86_64_rex_emit(inst, size, (dreg), 0, (basereg)); \
  511. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  512. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  513. } \
  514. } \
  515. } while(0)
  516. #define x86_64_alu_reg_memindex_size(inst, opc, dreg, basereg, disp, indexreg, shift, size) \
  517. do { \
  518. switch(size) \
  519. { \
  520. case 1: \
  521. { \
  522. x86_64_rex_emit(inst, size, (dreg), (indexreg), (basereg)); \
  523. *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
  524. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  525. } \
  526. break; \
  527. case 2: \
  528. { \
  529. *(inst)++ = (unsigned char)0x66; \
  530. } \
  531. case 4: \
  532. case 8: \
  533. { \
  534. x86_64_rex_emit(inst, size, (dreg), (indexreg), (basereg)); \
  535. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  536. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  537. } \
  538. } \
  539. } while(0)
  540. /*
  541. * The immediate value has to be at most 32 bit wide.
  542. */
  543. #define x86_64_alu_reg_imm_size(inst, opc, dreg, imm, size) \
  544. do { \
  545. if((dreg) == X86_64_RAX) \
  546. { \
  547. switch(size) \
  548. { \
  549. case 1: \
  550. { \
  551. *(inst)++ = (((unsigned char)(opc)) << 3) + 4; \
  552. x86_imm_emit8((inst), (imm)); \
  553. } \
  554. break; \
  555. case 2: \
  556. { \
  557. *(inst)++ = (unsigned char)0x66; \
  558. *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
  559. x86_imm_emit16((inst), (imm)); \
  560. } \
  561. break; \
  562. case 4: \
  563. case 8: \
  564. { \
  565. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  566. *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
  567. x86_imm_emit32((inst), (imm)); \
  568. } \
  569. } \
  570. } \
  571. else if(x86_is_imm8((imm))) \
  572. { \
  573. switch(size) \
  574. { \
  575. case 1: \
  576. { \
  577. x86_64_rex_emit(inst, size, 0, 0, (dreg)); \
  578. *(inst)++ = (unsigned char)0x80; \
  579. } \
  580. break; \
  581. case 2: \
  582. { \
  583. *(inst)++ = (unsigned char)0x66; \
  584. } \
  585. case 4: \
  586. case 8: \
  587. { \
  588. x86_64_rex_emit(inst, size, 0, 0, (dreg)); \
  589. *(inst)++ = (unsigned char)0x83; \
  590. } \
  591. } \
  592. x86_64_reg_emit((inst), (opc), (dreg)); \
  593. x86_imm_emit8((inst), (imm)); \
  594. } \
  595. else \
  596. { \
  597. switch(size) \
  598. { \
  599. case 1: \
  600. { \
  601. x86_64_rex_emit(inst, size, 0, 0, (dreg)); \
  602. *(inst)++ = (unsigned char)0x80; \
  603. x86_64_reg_emit((inst), (opc), (dreg)); \
  604. x86_imm_emit8((inst), (imm)); \
  605. jit_assert(1); \
  606. } \
  607. break; \
  608. case 2: \
  609. { \
  610. *(inst)++ = (unsigned char)0x66; \
  611. x86_64_rex_emit(inst, size, 0, 0, (dreg)); \
  612. *(inst)++ = (unsigned char)0x81; \
  613. x86_64_reg_emit((inst), (opc), (dreg)); \
  614. x86_imm_emit16((inst), (imm)); \
  615. } \
  616. break; \
  617. case 4: \
  618. case 8: \
  619. { \
  620. x86_64_rex_emit(inst, size, 0, 0, (dreg)); \
  621. *(inst)++ = (unsigned char)0x81; \
  622. x86_64_reg_emit((inst), (opc), (dreg)); \
  623. x86_imm_emit32((inst), (imm)); \
  624. } \
  625. } \
  626. } \
  627. } while(0)
  628. #define x86_64_alu_regp_imm_size(inst, opc, reg, imm, size) \
  629. do { \
  630. if(x86_is_imm8((imm))) \
  631. { \
  632. switch(size) \
  633. { \
  634. case 1: \
  635. { \
  636. x86_64_rex_emit(inst, size, 0, 0, (reg)); \
  637. *(inst)++ = (unsigned char)0x80; \
  638. } \
  639. break; \
  640. case 2: \
  641. { \
  642. *(inst)++ = (unsigned char)0x66; \
  643. } \
  644. case 4: \
  645. case 8: \
  646. { \
  647. x86_64_rex_emit(inst, size, 0, 0, (reg)); \
  648. *(inst)++ = (unsigned char)0x83; \
  649. } \
  650. } \
  651. x86_64_regp_emit((inst), (opc), (reg)); \
  652. x86_imm_emit8((inst), (imm)); \
  653. } \
  654. else \
  655. { \
  656. switch(size) \
  657. { \
  658. case 1: \
  659. { \
  660. x86_64_rex_emit(inst, size, 0, 0, (reg)); \
  661. *(inst)++ = (unsigned char)0x80; \
  662. x86_64_regp_emit((inst), (opc), (reg)); \
  663. x86_imm_emit8((inst), (imm)); \
  664. jit_assert(1); \
  665. } \
  666. break; \
  667. case 2: \
  668. { \
  669. *(inst)++ = (unsigned char)0x66; \
  670. x86_64_rex_emit(inst, size, 0, 0, (reg)); \
  671. *(inst)++ = (unsigned char)0x81; \
  672. x86_64_regp_emit((inst), (opc), (reg)); \
  673. x86_imm_emit16((inst), (imm)); \
  674. } \
  675. break; \
  676. case 4: \
  677. case 8: \
  678. { \
  679. x86_64_rex_emit(inst, size, 0, 0, (reg)); \
  680. *(inst)++ = (unsigned char)0x81; \
  681. x86_64_regp_emit((inst), (opc), (reg)); \
  682. x86_imm_emit32((inst), (imm)); \
  683. } \
  684. } \
  685. } \
  686. } while(0)
  687. #define x86_64_alu_mem_imm_size(inst, opc, mem, imm, size) \
  688. do { \
  689. if(x86_is_imm8((imm))) \
  690. { \
  691. switch(size) \
  692. { \
  693. case 1: \
  694. { \
  695. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  696. *(inst)++ = (unsigned char)0x80; \
  697. } \
  698. break; \
  699. case 2: \
  700. { \
  701. *(inst)++ = (unsigned char)0x66; \
  702. } \
  703. case 4: \
  704. case 8: \
  705. { \
  706. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  707. *(inst)++ = (unsigned char)0x83; \
  708. } \
  709. } \
  710. x86_64_mem_emit((inst), (opc), (mem)); \
  711. x86_imm_emit8((inst), (imm)); \
  712. } \
  713. else \
  714. { \
  715. switch(size) \
  716. { \
  717. case 1: \
  718. { \
  719. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  720. *(inst)++ = (unsigned char)0x80; \
  721. x86_64_mem_emit((inst), (opc), (mem)); \
  722. x86_imm_emit8((inst), (imm)); \
  723. jit_assert(1); \
  724. } \
  725. break; \
  726. case 2: \
  727. { \
  728. *(inst)++ = (unsigned char)0x66; \
  729. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  730. *(inst)++ = (unsigned char)0x81; \
  731. x86_64_mem_emit((inst), (opc), (mem)); \
  732. x86_imm_emit16((inst), (imm)); \
  733. } \
  734. break; \
  735. case 4: \
  736. case 8: \
  737. { \
  738. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  739. *(inst)++ = (unsigned char)0x81; \
  740. x86_64_mem_emit((inst), (opc), (mem)); \
  741. x86_imm_emit32((inst), (imm)); \
  742. } \
  743. } \
  744. } \
  745. } while(0)
  746. #define x86_64_alu_membase_imm_size(inst, opc, basereg, disp, imm, size) \
  747. do { \
  748. if(x86_is_imm8((imm))) \
  749. { \
  750. switch(size) \
  751. { \
  752. case 1: \
  753. { \
  754. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  755. *(inst)++ = (unsigned char)0x80; \
  756. } \
  757. break; \
  758. case 2: \
  759. { \
  760. *(inst)++ = (unsigned char)0x66; \
  761. } \
  762. case 4: \
  763. case 8: \
  764. { \
  765. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  766. *(inst)++ = (unsigned char)0x83; \
  767. } \
  768. } \
  769. x86_64_membase_emit((inst), (opc), (basereg), (disp)); \
  770. x86_imm_emit8((inst), (imm)); \
  771. } \
  772. else \
  773. { \
  774. switch(size) \
  775. { \
  776. case 1: \
  777. { \
  778. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  779. *(inst)++ = (unsigned char)0x80; \
  780. x86_64_membase_emit((inst), (opc), (basereg), (disp)); \
  781. x86_imm_emit8((inst), (imm)); \
  782. jit_assert(1); \
  783. } \
  784. break; \
  785. case 2: \
  786. { \
  787. *(inst)++ = (unsigned char)0x66; \
  788. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  789. *(inst)++ = (unsigned char)0x81; \
  790. x86_64_membase_emit((inst), (opc), (basereg), (disp)); \
  791. x86_imm_emit16((inst), (imm)); \
  792. } \
  793. break; \
  794. case 4: \
  795. case 8: \
  796. { \
  797. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  798. *(inst)++ = (unsigned char)0x81; \
  799. x86_64_membase_emit((inst), (opc), (basereg), (disp)); \
  800. x86_imm_emit32((inst), (imm)); \
  801. } \
  802. } \
  803. } \
  804. } while(0)
  805. #define x86_64_alu_memindex_imm_size(inst, opc, basereg, disp, indexreg, shift, imm, size) \
  806. do { \
  807. if(x86_is_imm8((imm))) \
  808. { \
  809. switch(size) \
  810. { \
  811. case 1: \
  812. { \
  813. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  814. *(inst)++ = (unsigned char)0x80; \
  815. } \
  816. break; \
  817. case 2: \
  818. { \
  819. *(inst)++ = (unsigned char)0x66; \
  820. } \
  821. case 4: \
  822. case 8: \
  823. { \
  824. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  825. *(inst)++ = (unsigned char)0x83; \
  826. } \
  827. } \
  828. x86_64_memindex_emit((inst), (opc), (basereg), (disp), (indexreg), (shift)); \
  829. x86_imm_emit8((inst), (imm)); \
  830. } \
  831. else \
  832. { \
  833. switch(size) \
  834. { \
  835. case 1: \
  836. { \
  837. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  838. *(inst)++ = (unsigned char)0x80; \
  839. x86_64_memindex_emit((inst), (opc), (basereg), (disp), (indexreg), (shift)); \
  840. x86_imm_emit8((inst), (imm)); \
  841. jit_assert(1); \
  842. } \
  843. break; \
  844. case 2: \
  845. { \
  846. *(inst)++ = (unsigned char)0x66; \
  847. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  848. *(inst)++ = (unsigned char)0x81; \
  849. x86_64_memindex_emit((inst), (opc), (basereg), (disp), (indexreg), (shift)); \
  850. x86_imm_emit16((inst), (imm)); \
  851. } \
  852. break; \
  853. case 4: \
  854. case 8: \
  855. { \
  856. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  857. *(inst)++ = (unsigned char)0x81; \
  858. x86_64_memindex_emit((inst), (opc), (basereg), (disp), (indexreg), (shift)); \
  859. x86_imm_emit32((inst), (imm)); \
  860. } \
  861. } \
  862. } \
  863. } while(0)
  864. /*
  865. * Instructions with one opcode (plus optional r/m)
  866. */
  867. /*
  868. * Unary opcodes
  869. */
  870. #define x86_64_alu1_reg(inst, opc1, r, reg) \
  871. do { \
  872. x86_64_rex_emit((inst), 0, 0, 0, (reg)); \
  873. *(inst)++ = (unsigned char)(opc1); \
  874. x86_64_reg_emit((inst), (r), (reg)); \
  875. } while(0)
  876. #define x86_64_alu1_regp(inst, opc1, r, regp) \
  877. do { \
  878. x86_64_rex_emit((inst), 0, 0, 0, (regp)); \
  879. *(inst)++ = (unsigned char)(opc1); \
  880. x86_64_regp_emit((inst), (r), (regp)); \
  881. } while(0)
  882. #define x86_64_alu1_mem(inst, opc1, r, mem) \
  883. do { \
  884. *(inst)++ = (unsigned char)(opc1); \
  885. x86_64_mem_emit((inst), (r), (mem)); \
  886. } while(0)
  887. #define x86_64_alu1_membase(inst, opc1, r, basereg, disp) \
  888. do { \
  889. x86_64_rex_emit((inst), 0, 0, 0, (basereg)); \
  890. *(inst)++ = (unsigned char)(opc1); \
  891. x86_64_membase_emit((inst), (r), (basereg), (disp)); \
  892. } while(0)
  893. #define x86_64_alu1_memindex(inst, opc1, r, basereg, disp, indexreg, shift) \
  894. do { \
  895. x86_64_rex_emit((inst), 0, 0, (indexreg), (basereg)); \
  896. *(inst)++ = (unsigned char)(opc1); \
  897. x86_64_memindex_emit((inst), (r), (basereg), (disp), (indexreg), (shift)); \
  898. } while(0)
  899. #define x86_64_alu1_reg_size(inst, opc1, r, reg, size) \
  900. do { \
  901. if((size) == 2) \
  902. { \
  903. *(inst)++ = (unsigned char)0x66; \
  904. } \
  905. x86_64_rex_emit((inst), (size), 0, 0, (reg)); \
  906. x86_64_opcode1_emit((inst), (opc1), (size)); \
  907. x86_64_reg_emit((inst), (r), (reg)); \
  908. } while(0)
  909. #define x86_64_alu1_regp_size(inst, opc1, r, regp, size) \
  910. do { \
  911. if((size) == 2) \
  912. { \
  913. *(inst)++ = (unsigned char)0x66; \
  914. } \
  915. x86_64_rex_emit((inst), (size), 0, 0, (regp)); \
  916. x86_64_opcode1_emit((inst), (opc1), (size)); \
  917. x86_64_regp_emit((inst), (r), (regp)); \
  918. } while(0)
  919. #define x86_64_alu1_mem_size(inst, opc1, r, mem, size) \
  920. do { \
  921. if((size) == 2) \
  922. { \
  923. *(inst)++ = (unsigned char)0x66; \
  924. } \
  925. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  926. x86_64_opcode1_emit((inst), (opc1), (size)); \
  927. x86_64_mem_emit((inst), (r), (mem)); \
  928. } while(0)
  929. #define x86_64_alu1_membase_size(inst, opc1, r, basereg, disp, size) \
  930. do { \
  931. if((size) == 2) \
  932. { \
  933. *(inst)++ = (unsigned char)0x66; \
  934. } \
  935. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  936. x86_64_opcode1_emit((inst), (opc1), (size)); \
  937. x86_64_membase_emit((inst), (r), (basereg), (disp)); \
  938. } while(0)
  939. #define x86_64_alu1_memindex_size(inst, opc1, r, basereg, disp, indexreg, shift, size) \
  940. do { \
  941. if((size) == 2) \
  942. { \
  943. *(inst)++ = (unsigned char)0x66; \
  944. } \
  945. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  946. x86_64_opcode1_emit((inst), (opc1), (size)); \
  947. x86_64_memindex_emit((inst), (r), (basereg), (disp), (indexreg), (shift)); \
  948. } while(0)
  949. #define x86_64_alu1_reg_reg_size(inst, opc1, dreg, sreg, size) \
  950. do { \
  951. if((size) == 2) \
  952. { \
  953. *(inst)++ = (unsigned char)0x66; \
  954. } \
  955. x86_64_rex_emit((inst), (size), (dreg), 0, (sreg)); \
  956. *(inst)++ = (unsigned char)(opc1); \
  957. x86_64_reg_emit((inst), (dreg), (sreg)); \
  958. } while(0)
  959. #define x86_64_alu1_reg_regp_size(inst, opc1, dreg, sregp, size) \
  960. do { \
  961. if((size) == 2) \
  962. { \
  963. *(inst)++ = (unsigned char)0x66; \
  964. } \
  965. x86_64_rex_emit((inst), (size), (dreg), 0, (sregp)); \
  966. *(inst)++ = (unsigned char)(opc1); \
  967. x86_64_regp_emit((inst), (dreg), (sregp)); \
  968. } while(0)
  969. #define x86_64_alu1_reg_mem_size(inst, opc1, dreg, mem, size) \
  970. do { \
  971. if((size) == 2) \
  972. { \
  973. *(inst)++ = (unsigned char)0x66; \
  974. } \
  975. x86_64_rex_emit((inst), (size), (dreg), 0, 0); \
  976. *(inst)++ = (unsigned char)(opc1); \
  977. x86_64_mem_emit((inst), (dreg), (mem)); \
  978. } while(0)
  979. #define x86_64_alu1_reg_membase_size(inst, opc1, dreg, basereg, disp, size) \
  980. do { \
  981. if((size) == 2) \
  982. { \
  983. *(inst)++ = (unsigned char)0x66; \
  984. } \
  985. x86_64_rex_emit((inst), (size), (dreg), 0, (basereg)); \
  986. *(inst)++ = (unsigned char)(opc1); \
  987. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  988. } while(0)
  989. #define x86_64_alu1_reg_memindex_size(inst, opc1, dreg, basereg, disp, indexreg, shift, size) \
  990. do { \
  991. if((size) == 2) \
  992. { \
  993. *(inst)++ = (unsigned char)0x66; \
  994. } \
  995. x86_64_rex_emit((inst), (size), (dreg), (indexreg), (basereg)); \
  996. *(inst)++ = (unsigned char)(opc1); \
  997. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  998. } while(0)
  999. #define x86_64_alu2_reg_reg_size(inst, opc1, opc2, dreg, sreg, size) \
  1000. do { \
  1001. if((size) == 2) \
  1002. { \
  1003. *(inst)++ = (unsigned char)0x66; \
  1004. } \
  1005. x86_64_rex_emit((inst), (size), (dreg), 0, (sreg)); \
  1006. *(inst)++ = (unsigned char)(opc1); \
  1007. *(inst)++ = (unsigned char)(opc2); \
  1008. x86_64_reg_emit((inst), (dreg), (sreg)); \
  1009. } while(0)
  1010. #define x86_64_alu2_reg_regp_size(inst, opc1, opc2, dreg, sregp, size) \
  1011. do { \
  1012. if((size) == 2) \
  1013. { \
  1014. *(inst)++ = (unsigned char)0x66; \
  1015. } \
  1016. x86_64_rex_emit((inst), (size), (dreg), 0, (sregp)); \
  1017. *(inst)++ = (unsigned char)(opc1); \
  1018. *(inst)++ = (unsigned char)(opc2); \
  1019. x86_64_regp_emit((inst), (dreg), (sregp)); \
  1020. } while(0)
  1021. #define x86_64_alu2_reg_mem_size(inst, opc1, opc2, dreg, mem, size) \
  1022. do { \
  1023. if((size) == 2) \
  1024. { \
  1025. *(inst)++ = (unsigned char)0x66; \
  1026. } \
  1027. x86_64_rex_emit((inst), (size), (dreg), 0, 0); \
  1028. *(inst)++ = (unsigned char)(opc1); \
  1029. *(inst)++ = (unsigned char)(opc2); \
  1030. x86_64_mem_emit((inst), (dreg), (mem)); \
  1031. } while(0)
  1032. #define x86_64_alu2_reg_membase_size(inst, opc1, opc2, dreg, basereg, disp, size) \
  1033. do { \
  1034. if((size) == 2) \
  1035. { \
  1036. *(inst)++ = (unsigned char)0x66; \
  1037. } \
  1038. x86_64_rex_emit((inst), (size), (dreg), 0, (basereg)); \
  1039. *(inst)++ = (unsigned char)(opc1); \
  1040. *(inst)++ = (unsigned char)(opc2); \
  1041. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  1042. } while(0)
  1043. #define x86_64_alu2_reg_memindex_size(inst, opc1, opc2, dreg, basereg, disp, indexreg, shift, size) \
  1044. do { \
  1045. if((size) == 2) \
  1046. { \
  1047. *(inst)++ = (unsigned char)0x66; \
  1048. } \
  1049. x86_64_rex_emit((inst), (size), (dreg), (indexreg), (basereg)); \
  1050. *(inst)++ = (unsigned char)(opc1); \
  1051. *(inst)++ = (unsigned char)(opc2); \
  1052. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  1053. } while(0)
  1054. /*
  1055. * Group1 general instructions
  1056. */
  1057. #define x86_64_alu_reg_reg(inst, opc, dreg, sreg) \
  1058. do { \
  1059. x86_64_alu_reg_reg_size((inst), (opc), (dreg), (sreg), 8); \
  1060. } while(0)
  1061. #define x86_64_alu_reg_imm(inst, opc, dreg, imm) \
  1062. do { \
  1063. x86_64_alu_reg_imm_size((inst), (opc), (dreg), (imm), 8); \
  1064. } while(0)
  1065. /*
  1066. * ADC: Add with carry
  1067. */
  1068. #define x86_64_adc_reg_reg_size(inst, dreg, sreg, size) \
  1069. do { \
  1070. x86_64_alu_reg_reg_size((inst), 2, (dreg), (sreg), (size)); \
  1071. } while(0)
  1072. #define x86_64_adc_regp_reg_size(inst, dregp, sreg, size) \
  1073. do { \
  1074. x86_64_alu_regp_reg_size((inst), 2, (dregp), (sreg), (size)); \
  1075. } while(0)
  1076. #define x86_64_adc_mem_reg_size(inst, mem, sreg, size) \
  1077. do { \
  1078. x86_64_alu_mem_reg_size((inst), 2, (mem), (sreg), (size)); \
  1079. } while(0)
  1080. #define x86_64_adc_membase_reg_size(inst, basereg, disp, sreg, size) \
  1081. do { \
  1082. x86_64_alu_membase_reg_size((inst), 2, (basereg), (disp), (sreg), (size)); \
  1083. } while(0)
  1084. #define x86_64_adc_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1085. do { \
  1086. x86_64_alu_memindex_reg_size((inst), 2, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1087. } while(0)
  1088. #define x86_64_adc_reg_regp_size(inst, dreg, sregp, size) \
  1089. do { \
  1090. x86_64_alu_reg_regp_size((inst), 2, (dreg), (sregp), (size)); \
  1091. } while(0)
  1092. #define x86_64_adc_reg_mem_size(inst, dreg, mem, size) \
  1093. do { \
  1094. x86_64_alu_reg_mem_size((inst), 2, (dreg), (mem), (size)); \
  1095. } while(0)
  1096. #define x86_64_adc_reg_membase_size(inst, dreg, basereg, disp, size) \
  1097. do { \
  1098. x86_64_alu_reg_membase_size((inst), 2, (dreg), (basereg), (disp), (size)); \
  1099. } while(0)
  1100. #define x86_64_adc_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1101. do { \
  1102. x86_64_alu_reg_memindex_size((inst), 2, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1103. } while(0)
  1104. #define x86_64_adc_reg_imm_size(inst, dreg, imm, size) \
  1105. do { \
  1106. x86_64_alu_reg_imm_size((inst), 2, (dreg), (imm), (size)); \
  1107. } while(0)
  1108. #define x86_64_adc_regp_imm_size(inst, reg, imm, size) \
  1109. do { \
  1110. x86_64_alu_regp_imm_size((inst), 2, (reg), (imm), (size)); \
  1111. } while(0)
  1112. #define x86_64_adc_mem_imm_size(inst, mem, imm, size) \
  1113. do { \
  1114. x86_64_alu_mem_imm_size(inst, 2, mem, imm, size); \
  1115. } while(0)
  1116. #define x86_64_adc_membase_imm_size(inst, basereg, disp, imm, size) \
  1117. do { \
  1118. x86_64_alu_membase_imm_size((inst), 2, (basereg), (disp), (imm), (size)); \
  1119. } while(0)
  1120. #define x86_64_adc_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1121. do { \
  1122. x86_64_alu_memindex_imm_size((inst), 2, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1123. } while(0)
  1124. /*
  1125. * ADD
  1126. */
  1127. #define x86_64_add_reg_reg_size(inst, dreg, sreg, size) \
  1128. do { \
  1129. x86_64_alu_reg_reg_size((inst), 0, (dreg), (sreg), (size)); \
  1130. } while(0)
  1131. #define x86_64_add_regp_reg_size(inst, dregp, sreg, size) \
  1132. do { \
  1133. x86_64_alu_regp_reg_size((inst), 0, (dregp), (sreg), (size)); \
  1134. } while(0)
  1135. #define x86_64_add_mem_reg_size(inst, mem, sreg, size) \
  1136. do { \
  1137. x86_64_alu_mem_reg_size((inst), 0, (mem), (sreg), (size)); \
  1138. } while(0)
  1139. #define x86_64_add_membase_reg_size(inst, basereg, disp, sreg, size) \
  1140. do { \
  1141. x86_64_alu_membase_reg_size((inst), 0, (basereg), (disp), (sreg), (size)); \
  1142. } while(0)
  1143. #define x86_64_add_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1144. do { \
  1145. x86_64_alu_memindex_reg_size((inst), 0, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1146. } while(0)
  1147. #define x86_64_add_reg_regp_size(inst, dreg, sregp, size) \
  1148. do { \
  1149. x86_64_alu_reg_regp_size((inst), 0, (dreg), (sregp), (size)); \
  1150. } while(0)
  1151. #define x86_64_add_reg_mem_size(inst, dreg, mem, size) \
  1152. do { \
  1153. x86_64_alu_reg_mem_size((inst), 0, (dreg), (mem), (size)); \
  1154. } while(0)
  1155. #define x86_64_add_reg_membase_size(inst, dreg, basereg, disp, size) \
  1156. do { \
  1157. x86_64_alu_reg_membase_size((inst), 0, (dreg), (basereg), (disp), (size)); \
  1158. } while(0)
  1159. #define x86_64_add_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1160. do { \
  1161. x86_64_alu_reg_memindex_size((inst), 0, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1162. } while(0)
  1163. #define x86_64_add_reg_imm_size(inst, dreg, imm, size) \
  1164. do { \
  1165. x86_64_alu_reg_imm_size((inst), 0, (dreg), (imm), (size)); \
  1166. } while(0)
  1167. #define x86_64_add_regp_imm_size(inst, reg, imm, size) \
  1168. do { \
  1169. x86_64_alu_regp_imm_size((inst), 0, (reg), (imm), (size)); \
  1170. } while(0)
  1171. #define x86_64_add_mem_imm_size(inst, mem, imm, size) \
  1172. do { \
  1173. x86_64_alu_mem_imm_size(inst, 0, mem, imm, size); \
  1174. } while(0)
  1175. #define x86_64_add_membase_imm_size(inst, basereg, disp, imm, size) \
  1176. do { \
  1177. x86_64_alu_membase_imm_size((inst), 0, (basereg), (disp), (imm), (size)); \
  1178. } while(0)
  1179. #define x86_64_add_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1180. do { \
  1181. x86_64_alu_memindex_imm_size((inst), 0, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1182. } while(0)
  1183. /*
  1184. * AND
  1185. */
  1186. #define x86_64_and_reg_reg_size(inst, dreg, sreg, size) \
  1187. do { \
  1188. x86_64_alu_reg_reg_size((inst), 4, (dreg), (sreg), (size)); \
  1189. } while(0)
  1190. #define x86_64_and_regp_reg_size(inst, dregp, sreg, size) \
  1191. do { \
  1192. x86_64_alu_regp_reg_size((inst), 4, (dregp), (sreg), (size)); \
  1193. } while(0)
  1194. #define x86_64_and_mem_reg_size(inst, mem, sreg, size) \
  1195. do { \
  1196. x86_64_alu_mem_reg_size((inst), 4, (mem), (sreg), (size)); \
  1197. } while(0)
  1198. #define x86_64_and_membase_reg_size(inst, basereg, disp, sreg, size) \
  1199. do { \
  1200. x86_64_alu_membase_reg_size((inst), 4, (basereg), (disp), (sreg), (size)); \
  1201. } while(0)
  1202. #define x86_64_and_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1203. do { \
  1204. x86_64_alu_memindex_reg_size((inst), 4, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1205. } while(0)
  1206. #define x86_64_and_reg_regp_size(inst, dreg, sregp, size) \
  1207. do { \
  1208. x86_64_alu_reg_regp_size((inst), 4, (dreg), (sregp), (size)); \
  1209. } while(0)
  1210. #define x86_64_and_reg_mem_size(inst, dreg, mem, size) \
  1211. do { \
  1212. x86_64_alu_reg_mem_size((inst), 4, (dreg), (mem), (size)); \
  1213. } while(0)
  1214. #define x86_64_and_reg_membase_size(inst, dreg, basereg, disp, size) \
  1215. do { \
  1216. x86_64_alu_reg_membase_size((inst), 4, (dreg), (basereg), (disp), (size)); \
  1217. } while(0)
  1218. #define x86_64_and_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1219. do { \
  1220. x86_64_alu_reg_memindex_size((inst), 4, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1221. } while(0)
  1222. #define x86_64_and_reg_imm_size(inst, dreg, imm, size) \
  1223. do { \
  1224. x86_64_alu_reg_imm_size((inst), 4, (dreg), (imm), (size)); \
  1225. } while(0)
  1226. #define x86_64_and_regp_imm_size(inst, reg, imm, size) \
  1227. do { \
  1228. x86_64_alu_regp_imm_size((inst), 4, (reg), (imm), (size)); \
  1229. } while(0)
  1230. #define x86_64_and_mem_imm_size(inst, mem, imm, size) \
  1231. do { \
  1232. x86_64_alu_mem_imm_size(inst, 4, mem, imm, size); \
  1233. } while(0)
  1234. #define x86_64_and_membase_imm_size(inst, basereg, disp, imm, size) \
  1235. do { \
  1236. x86_64_alu_membase_imm_size((inst), 4, (basereg), (disp), (imm), (size)); \
  1237. } while(0)
  1238. #define x86_64_and_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1239. do { \
  1240. x86_64_alu_memindex_imm_size((inst), 4, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1241. } while(0)
  1242. /*
  1243. * CMP: compare
  1244. */
  1245. #define x86_64_cmp_reg_reg_size(inst, dreg, sreg, size) \
  1246. do { \
  1247. x86_64_alu_reg_reg_size((inst), 7, (dreg), (sreg), (size)); \
  1248. } while(0)
  1249. #define x86_64_cmp_regp_reg_size(inst, dregp, sreg, size) \
  1250. do { \
  1251. x86_64_alu_regp_reg_size((inst), 7, (dregp), (sreg), (size)); \
  1252. } while(0)
  1253. #define x86_64_cmp_mem_reg_size(inst, mem, sreg, size) \
  1254. do { \
  1255. x86_64_alu_mem_reg_size((inst), 7, (mem), (sreg), (size)); \
  1256. } while(0)
  1257. #define x86_64_cmp_membase_reg_size(inst, basereg, disp, sreg, size) \
  1258. do { \
  1259. x86_64_alu_membase_reg_size((inst), 7, (basereg), (disp), (sreg), (size)); \
  1260. } while(0)
  1261. #define x86_64_cmp_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1262. do { \
  1263. x86_64_alu_memindex_reg_size((inst), 7, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1264. } while(0)
  1265. #define x86_64_cmp_reg_regp_size(inst, dreg, sregp, size) \
  1266. do { \
  1267. x86_64_alu_reg_regp_size((inst), 7, (dreg), (sregp), (size)); \
  1268. } while(0)
  1269. #define x86_64_cmp_reg_mem_size(inst, dreg, mem, size) \
  1270. do { \
  1271. x86_64_alu_reg_mem_size((inst), 7, (dreg), (mem), (size)); \
  1272. } while(0)
  1273. #define x86_64_cmp_reg_membase_size(inst, dreg, basereg, disp, size) \
  1274. do { \
  1275. x86_64_alu_reg_membase_size((inst), 7, (dreg), (basereg), (disp), (size)); \
  1276. } while(0)
  1277. #define x86_64_cmp_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1278. do { \
  1279. x86_64_alu_reg_memindex_size((inst), 7, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1280. } while(0)
  1281. #define x86_64_cmp_reg_imm_size(inst, dreg, imm, size) \
  1282. do { \
  1283. x86_64_alu_reg_imm_size((inst), 7, (dreg), (imm), (size)); \
  1284. } while(0)
  1285. #define x86_64_cmp_regp_imm_size(inst, reg, imm, size) \
  1286. do { \
  1287. x86_64_alu_regp_imm_size((inst), 7, (reg), (imm), (size)); \
  1288. } while(0)
  1289. #define x86_64_cmp_mem_imm_size(inst, mem, imm, size) \
  1290. do { \
  1291. x86_64_alu_mem_imm_size(inst, 7, mem, imm, size); \
  1292. } while(0)
  1293. #define x86_64_cmp_membase_imm_size(inst, basereg, disp, imm, size) \
  1294. do { \
  1295. x86_64_alu_membase_imm_size((inst), 7, (basereg), (disp), (imm), (size)); \
  1296. } while(0)
  1297. #define x86_64_cmp_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1298. do { \
  1299. x86_64_alu_memindex_imm_size((inst), 7, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1300. } while(0)
  1301. /*
  1302. * OR
  1303. */
  1304. #define x86_64_or_reg_reg_size(inst, dreg, sreg, size) \
  1305. do { \
  1306. x86_64_alu_reg_reg_size((inst), 1, (dreg), (sreg), (size)); \
  1307. } while(0)
  1308. #define x86_64_or_regp_reg_size(inst, dregp, sreg, size) \
  1309. do { \
  1310. x86_64_alu_regp_reg_size((inst), 1, (dregp), (sreg), (size)); \
  1311. } while(0)
  1312. #define x86_64_or_mem_reg_size(inst, mem, sreg, size) \
  1313. do { \
  1314. x86_64_alu_mem_reg_size((inst), 1, (mem), (sreg), (size)); \
  1315. } while(0)
  1316. #define x86_64_or_membase_reg_size(inst, basereg, disp, sreg, size) \
  1317. do { \
  1318. x86_64_alu_membase_reg_size((inst), 1, (basereg), (disp), (sreg), (size)); \
  1319. } while(0)
  1320. #define x86_64_or_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1321. do { \
  1322. x86_64_alu_memindex_reg_size((inst), 1, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1323. } while(0)
  1324. #define x86_64_or_reg_regp_size(inst, dreg, sregp, size) \
  1325. do { \
  1326. x86_64_alu_reg_regp_size((inst), 1, (dreg), (sregp), (size)); \
  1327. } while(0)
  1328. #define x86_64_or_reg_mem_size(inst, dreg, mem, size) \
  1329. do { \
  1330. x86_64_alu_reg_mem_size((inst), 1, (dreg), (mem), (size)); \
  1331. } while(0)
  1332. #define x86_64_or_reg_membase_size(inst, dreg, basereg, disp, size) \
  1333. do { \
  1334. x86_64_alu_reg_membase_size((inst), 1, (dreg), (basereg), (disp), (size)); \
  1335. } while(0)
  1336. #define x86_64_or_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1337. do { \
  1338. x86_64_alu_reg_memindex_size((inst), 1, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1339. } while(0)
  1340. #define x86_64_or_reg_imm_size(inst, dreg, imm, size) \
  1341. do { \
  1342. x86_64_alu_reg_imm_size((inst), 1, (dreg), (imm), (size)); \
  1343. } while(0)
  1344. #define x86_64_or_regp_imm_size(inst, reg, imm, size) \
  1345. do { \
  1346. x86_64_alu_regp_imm_size((inst), 1, (reg), (imm), (size)); \
  1347. } while(0)
  1348. #define x86_64_or_mem_imm_size(inst, mem, imm, size) \
  1349. do { \
  1350. x86_64_alu_mem_imm_size(inst, 1, mem, imm, size); \
  1351. } while(0)
  1352. #define x86_64_or_membase_imm_size(inst, basereg, disp, imm, size) \
  1353. do { \
  1354. x86_64_alu_membase_imm_size((inst), 1, (basereg), (disp), (imm), (size)); \
  1355. } while(0)
  1356. #define x86_64_or_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1357. do { \
  1358. x86_64_alu_memindex_imm_size((inst), 1, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1359. } while(0)
  1360. /*
  1361. * SBB: Subtract with borrow from al
  1362. */
  1363. #define x86_64_sbb_reg_reg_size(inst, dreg, sreg, size) \
  1364. do { \
  1365. x86_64_alu_reg_reg_size((inst), 3, (dreg), (sreg), (size)); \
  1366. } while(0)
  1367. #define x86_64_sbb_regp_reg_size(inst, dregp, sreg, size) \
  1368. do { \
  1369. x86_64_alu_regp_reg_size((inst), 3, (dregp), (sreg), (size)); \
  1370. } while(0)
  1371. #define x86_64_sbb_mem_reg_size(inst, mem, sreg, size) \
  1372. do { \
  1373. x86_64_alu_mem_reg_size((inst), 3, (mem), (sreg), (size)); \
  1374. } while(0)
  1375. #define x86_64_sbb_membase_reg_size(inst, basereg, disp, sreg, size) \
  1376. do { \
  1377. x86_64_alu_membase_reg_size((inst), 3, (basereg), (disp), (sreg), (size)); \
  1378. } while(0)
  1379. #define x86_64_sbb_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1380. do { \
  1381. x86_64_alu_memindex_reg_size((inst), 3, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1382. } while(0)
  1383. #define x86_64_sbb_reg_regp_size(inst, dreg, sregp, size) \
  1384. do { \
  1385. x86_64_alu_reg_regp_size((inst), 3, (dreg), (sregp), (size)); \
  1386. } while(0)
  1387. #define x86_64_sbb_reg_mem_size(inst, dreg, mem, size) \
  1388. do { \
  1389. x86_64_alu_reg_mem_size((inst), 3, (dreg), (mem), (size)); \
  1390. } while(0)
  1391. #define x86_64_sbb_reg_membase_size(inst, dreg, basereg, disp, size) \
  1392. do { \
  1393. x86_64_alu_reg_membase_size((inst), 3, (dreg), (basereg), (disp), (size)); \
  1394. } while(0)
  1395. #define x86_64_sbb_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1396. do { \
  1397. x86_64_alu_reg_memindex_size((inst), 3, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1398. } while(0)
  1399. #define x86_64_sbb_reg_imm_size(inst, dreg, imm, size) \
  1400. do { \
  1401. x86_64_alu_reg_imm_size((inst), 3, (dreg), (imm), (size)); \
  1402. } while(0)
  1403. #define x86_64_sbb_regp_imm_size(inst, reg, imm, size) \
  1404. do { \
  1405. x86_64_alu_regp_imm_size((inst), 3, (reg), (imm), (size)); \
  1406. } while(0)
  1407. #define x86_64_sbb_mem_imm_size(inst, mem, imm, size) \
  1408. do { \
  1409. x86_64_alu_mem_imm_size(inst, 3, mem, imm, size); \
  1410. } while(0)
  1411. #define x86_64_sbb_membase_imm_size(inst, basereg, disp, imm, size) \
  1412. do { \
  1413. x86_64_alu_membase_imm_size((inst), 3, (basereg), (disp), (imm), (size)); \
  1414. } while(0)
  1415. #define x86_64_sbb_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1416. do { \
  1417. x86_64_alu_memindex_imm_size((inst), 3, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1418. } while(0)
  1419. /*
  1420. * SUB: Subtract
  1421. */
  1422. #define x86_64_sub_reg_reg_size(inst, dreg, sreg, size) \
  1423. do { \
  1424. x86_64_alu_reg_reg_size((inst), 5, (dreg), (sreg), (size)); \
  1425. } while(0)
  1426. #define x86_64_sub_regp_reg_size(inst, dregp, sreg, size) \
  1427. do { \
  1428. x86_64_alu_regp_reg_size((inst), 5, (dregp), (sreg), (size)); \
  1429. } while(0)
  1430. #define x86_64_sub_mem_reg_size(inst, mem, sreg, size) \
  1431. do { \
  1432. x86_64_alu_mem_reg_size((inst), 5, (mem), (sreg), (size)); \
  1433. } while(0)
  1434. #define x86_64_sub_membase_reg_size(inst, basereg, disp, sreg, size) \
  1435. do { \
  1436. x86_64_alu_membase_reg_size((inst), 5, (basereg), (disp), (sreg), (size)); \
  1437. } while(0)
  1438. #define x86_64_sub_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1439. do { \
  1440. x86_64_alu_memindex_reg_size((inst), 5, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1441. } while(0)
  1442. #define x86_64_sub_reg_regp_size(inst, dreg, sregp, size) \
  1443. do { \
  1444. x86_64_alu_reg_regp_size((inst), 5, (dreg), (sregp), (size)); \
  1445. } while(0)
  1446. #define x86_64_sub_reg_mem_size(inst, dreg, mem, size) \
  1447. do { \
  1448. x86_64_alu_reg_mem_size((inst), 5, (dreg), (mem), (size)); \
  1449. } while(0)
  1450. #define x86_64_sub_reg_membase_size(inst, dreg, basereg, disp, size) \
  1451. do { \
  1452. x86_64_alu_reg_membase_size((inst), 5, (dreg), (basereg), (disp), (size)); \
  1453. } while(0)
  1454. #define x86_64_sub_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1455. do { \
  1456. x86_64_alu_reg_memindex_size((inst), 5, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1457. } while(0)
  1458. #define x86_64_sub_reg_imm_size(inst, dreg, imm, size) \
  1459. do { \
  1460. x86_64_alu_reg_imm_size((inst), 5, (dreg), (imm), (size)); \
  1461. } while(0)
  1462. #define x86_64_sub_regp_imm_size(inst, reg, imm, size) \
  1463. do { \
  1464. x86_64_alu_regp_imm_size((inst), 5, (reg), (imm), (size)); \
  1465. } while(0)
  1466. #define x86_64_sub_mem_imm_size(inst, mem, imm, size) \
  1467. do { \
  1468. x86_64_alu_mem_imm_size(inst, 5, mem, imm, size); \
  1469. } while(0)
  1470. #define x86_64_sub_membase_imm_size(inst, basereg, disp, imm, size) \
  1471. do { \
  1472. x86_64_alu_membase_imm_size((inst), 5, (basereg), (disp), (imm), (size)); \
  1473. } while(0)
  1474. #define x86_64_sub_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1475. do { \
  1476. x86_64_alu_memindex_imm_size((inst), 5, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1477. } while(0)
  1478. /*
  1479. * XOR
  1480. */
  1481. #define x86_64_xor_reg_reg_size(inst, dreg, sreg, size) \
  1482. do { \
  1483. x86_64_alu_reg_reg_size((inst), 6, (dreg), (sreg), (size)); \
  1484. } while(0)
  1485. #define x86_64_xor_regp_reg_size(inst, dregp, sreg, size) \
  1486. do { \
  1487. x86_64_alu_regp_reg_size((inst), 6, (dregp), (sreg), (size)); \
  1488. } while(0)
  1489. #define x86_64_xor_mem_reg_size(inst, mem, sreg, size) \
  1490. do { \
  1491. x86_64_alu_mem_reg_size((inst), 6, (mem), (sreg), (size)); \
  1492. } while(0)
  1493. #define x86_64_xor_membase_reg_size(inst, basereg, disp, sreg, size) \
  1494. do { \
  1495. x86_64_alu_membase_reg_size((inst), 6, (basereg), (disp), (sreg), (size)); \
  1496. } while(0)
  1497. #define x86_64_xor_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1498. do { \
  1499. x86_64_alu_memindex_reg_size((inst), 6, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1500. } while(0)
  1501. #define x86_64_xor_reg_regp_size(inst, dreg, sregp, size) \
  1502. do { \
  1503. x86_64_alu_reg_regp_size((inst), 6, (dreg), (sregp), (size)); \
  1504. } while(0)
  1505. #define x86_64_xor_reg_mem_size(inst, dreg, mem, size) \
  1506. do { \
  1507. x86_64_alu_reg_mem_size((inst), 6, (dreg), (mem), (size)); \
  1508. } while(0)
  1509. #define x86_64_xor_reg_membase_size(inst, dreg, basereg, disp, size) \
  1510. do { \
  1511. x86_64_alu_reg_membase_size((inst), 6, (dreg), (basereg), (disp), (size)); \
  1512. } while(0)
  1513. #define x86_64_xor_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1514. do { \
  1515. x86_64_alu_reg_memindex_size((inst), 6, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1516. } while(0)
  1517. #define x86_64_xor_reg_imm_size(inst, dreg, imm, size) \
  1518. do { \
  1519. x86_64_alu_reg_imm_size((inst), 6, (dreg), (imm), (size)); \
  1520. } while(0)
  1521. #define x86_64_xor_regp_imm_size(inst, reg, imm, size) \
  1522. do { \
  1523. x86_64_alu_regp_imm_size((inst), 6, (reg), (imm), (size)); \
  1524. } while(0)
  1525. #define x86_64_xor_mem_imm_size(inst, mem, imm, size) \
  1526. do { \
  1527. x86_64_alu_mem_imm_size(inst, 6, mem, imm, size); \
  1528. } while(0)
  1529. #define x86_64_xor_membase_imm_size(inst, basereg, disp, imm, size) \
  1530. do { \
  1531. x86_64_alu_membase_imm_size((inst), 6, (basereg), (disp), (imm), (size)); \
  1532. } while(0)
  1533. #define x86_64_xor_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1534. do { \
  1535. x86_64_alu_memindex_imm_size((inst), 6, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1536. } while(0)
  1537. /*
  1538. * dec
  1539. */
  1540. #define x86_64_dec_reg_size(inst, reg, size) \
  1541. do { \
  1542. x86_64_alu1_reg_size((inst), 0xfe, 1, (reg), (size)); \
  1543. } while(0)
  1544. #define x86_64_dec_regp_size(inst, regp, size) \
  1545. do { \
  1546. x86_64_alu1_regp_size((inst), 0xfe, 1, (regp), (size)); \
  1547. } while(0)
  1548. #define x86_64_dec_mem_size(inst, mem, size) \
  1549. do { \
  1550. x86_64_alu1_mem_size((inst), 0xfe, 1, (mem), (size)); \
  1551. } while(0)
  1552. #define x86_64_dec_membase_size(inst, basereg, disp, size) \
  1553. do { \
  1554. x86_64_alu1_membase_size((inst), 0xfe, 1, (basereg), (disp), (size)); \
  1555. } while(0)
  1556. #define x86_64_dec_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1557. do { \
  1558. x86_64_alu1_memindex_size((inst), 0xfe, 1, (basereg), (disp), (indexreg), (shift), (size)); \
  1559. } while(0)
  1560. /*
  1561. * div: unsigned division RDX:RAX / operand
  1562. */
  1563. #define x86_64_div_reg_size(inst, reg, size) \
  1564. do { \
  1565. x86_64_alu1_reg_size((inst), 0xf6, 6, (reg), (size)); \
  1566. } while(0)
  1567. #define x86_64_div_regp_size(inst, regp, size) \
  1568. do { \
  1569. x86_64_alu1_regp_size((inst), 0xf6, 6, (regp), (size)); \
  1570. } while(0)
  1571. #define x86_64_div_mem_size(inst, mem, size) \
  1572. do { \
  1573. x86_64_alu1_mem_size((inst), 0xf6, 6, (mem), (size)); \
  1574. } while(0)
  1575. #define x86_64_div_membase_size(inst, basereg, disp, size) \
  1576. do { \
  1577. x86_64_alu1_membase_size((inst), 0xf6, 6, (basereg), (disp), (size)); \
  1578. } while(0)
  1579. #define x86_64_div_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1580. do { \
  1581. x86_64_alu1_memindex_size((inst), 0xf6, 6, (basereg), (disp), (indexreg), (shift), (size)); \
  1582. } while(0)
  1583. /*
  1584. * idiv: signed division RDX:RAX / operand
  1585. */
  1586. #define x86_64_idiv_reg_size(inst, reg, size) \
  1587. do { \
  1588. x86_64_alu1_reg_size((inst), 0xf6, 7, (reg), (size)); \
  1589. } while(0)
  1590. #define x86_64_idiv_regp_size(inst, regp, size) \
  1591. do { \
  1592. x86_64_alu1_regp_size((inst), 0xf6, 7, (regp), (size)); \
  1593. } while(0)
  1594. #define x86_64_idiv_mem_size(inst, mem, size) \
  1595. do { \
  1596. x86_64_alu1_mem_size((inst), 0xf6, 7, (mem), (size)); \
  1597. } while(0)
  1598. #define x86_64_idiv_membase_size(inst, basereg, disp, size) \
  1599. do { \
  1600. x86_64_alu1_membase_size((inst), 0xf6, 7, (basereg), (disp), (size)); \
  1601. } while(0)
  1602. #define x86_64_idiv_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1603. do { \
  1604. x86_64_alu1_memindex_size((inst), 0xf6, 7, (basereg), (disp), (indexreg), (shift), (size)); \
  1605. } while(0)
  1606. /*
  1607. * inc
  1608. */
  1609. #define x86_64_inc_reg_size(inst, reg, size) \
  1610. do { \
  1611. x86_64_alu1_reg_size((inst), 0xfe, 0, (reg), (size)); \
  1612. } while(0)
  1613. #define x86_64_inc_regp_size(inst, regp, size) \
  1614. do { \
  1615. x86_64_alu1_regp_size((inst), 0xfe, 0, (regp), (size)); \
  1616. } while(0)
  1617. #define x86_64_inc_mem_size(inst, mem, size) \
  1618. do { \
  1619. x86_64_alu1_mem_size((inst), 0xfe, 0, (mem), (size)); \
  1620. } while(0)
  1621. #define x86_64_inc_membase_size(inst, basereg, disp, size) \
  1622. do { \
  1623. x86_64_alu1_membase_size((inst), 0xfe, 0, (basereg), (disp), (size)); \
  1624. } while(0)
  1625. #define x86_64_inc_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1626. do { \
  1627. x86_64_alu1_memindex_size((inst), 0xfe, 0, (basereg), (disp), (indexreg), (shift), (size)); \
  1628. } while(0)
  1629. /*
  1630. * mul: multiply RDX:RAX = RAX * operand
  1631. * is_signed == 0 -> unsigned multiplication
  1632. * signed multiplication otherwise.
  1633. */
  1634. #define x86_64_mul_reg_issigned_size(inst, reg, is_signed, size) \
  1635. do { \
  1636. x86_64_alu1_reg_size((inst), 0xf6, ((is_signed) ? 5 : 4), (reg), (size)); \
  1637. } while(0)
  1638. #define x86_64_mul_regp_issigned_size(inst, regp, is_signed, size) \
  1639. do { \
  1640. x86_64_alu1_regp_size((inst), 0xf6, ((is_signed) ? 5 : 4), (regp), (size)); \
  1641. } while(0)
  1642. #define x86_64_mul_mem_issigned_size(inst, mem, is_signed, size) \
  1643. do { \
  1644. x86_64_alu1_mem_size((inst), 0xf6, ((is_signed) ? 5 : 4), (mem), (size)); \
  1645. } while(0)
  1646. #define x86_64_mul_membase_issigned_size(inst, basereg, disp, is_signed, size) \
  1647. do { \
  1648. x86_64_alu1_membase_size((inst), 0xf6, ((is_signed) ? 5 : 4), (basereg), (disp), (size)); \
  1649. } while(0)
  1650. #define x86_64_mul_memindex_issigned_size(inst, basereg, disp, indexreg, shift, is_signed, size) \
  1651. do { \
  1652. x86_64_alu1_memindex_size((inst), 0xf6, ((is_signed) ? 5 : 4), (basereg), (disp), (indexreg), (shift), (size)); \
  1653. } while(0)
  1654. /*
  1655. * neg
  1656. */
  1657. #define x86_64_neg_reg_size(inst, reg, size) \
  1658. do { \
  1659. x86_64_alu1_reg_size((inst), 0xf6, 3, (reg), (size)); \
  1660. } while(0)
  1661. #define x86_64_neg_regp_size(inst, regp, size) \
  1662. do { \
  1663. x86_64_alu1_regp_size((inst), 0xf6, 3, (regp), (size)); \
  1664. } while(0)
  1665. #define x86_64_neg_mem_size(inst, mem, size) \
  1666. do { \
  1667. x86_64_alu1_mem_size((inst), 0xf6, 3, (mem), (size)); \
  1668. } while(0)
  1669. #define x86_64_neg_membase_size(inst, basereg, disp, size) \
  1670. do { \
  1671. x86_64_alu1_membase_size((inst), 0xf6, 3, (basereg), (disp), (size)); \
  1672. } while(0)
  1673. #define x86_64_neg_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1674. do { \
  1675. x86_64_alu1_memindex_size((inst), 0xf6, 3, (basereg), (disp), (indexreg), (shift), (size)); \
  1676. } while(0)
  1677. /*
  1678. * not
  1679. */
  1680. #define x86_64_not_reg_size(inst, reg, size) \
  1681. do { \
  1682. x86_64_alu1_reg_size((inst), 0xf6, 2, (reg), (size)); \
  1683. } while(0)
  1684. #define x86_64_not_regp_size(inst, regp, size) \
  1685. do { \
  1686. x86_64_alu1_regp_size((inst), 0xf6, 2, (regp), (size)); \
  1687. } while(0)
  1688. #define x86_64_not_mem_size(inst, mem, size) \
  1689. do { \
  1690. x86_64_alu1_mem_size((inst), 0xf6, 2, (mem), (size)); \
  1691. } while(0)
  1692. #define x86_64_not_membase_size(inst, basereg, disp, size) \
  1693. do { \
  1694. x86_64_alu1_membase_size((inst), 0xf6, 2, (basereg), (disp), (size)); \
  1695. } while(0)
  1696. #define x86_64_not_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1697. do { \
  1698. x86_64_alu1_memindex_size((inst), 0xf6, 2, (basereg), (disp), (indexreg), (shift), (size)); \
  1699. } while(0)
  1700. /*
  1701. * Note: x86_64_clear_reg () changes the condition code!
  1702. */
  1703. #define x86_64_clear_reg(inst, reg) \
  1704. x86_64_xor_reg_reg_size((inst), (reg), (reg), 4)
  1705. /*
  1706. * shift instructions
  1707. */
  1708. #define x86_64_shift_reg_imm_size(inst, opc, dreg, imm, size) \
  1709. do { \
  1710. if((imm) == 1) \
  1711. { \
  1712. if((size) == 2) \
  1713. { \
  1714. *(inst)++ = (unsigned char)0x66; \
  1715. } \
  1716. x86_64_rex_emit((inst), (size), 0, 0, (dreg)); \
  1717. x86_64_opcode1_emit((inst), 0xd0, (size)); \
  1718. x86_64_reg_emit((inst), (opc), (dreg)); \
  1719. } \
  1720. else \
  1721. { \
  1722. if((size) == 2) \
  1723. { \
  1724. *(inst)++ = (unsigned char)0x66; \
  1725. } \
  1726. x86_64_rex_emit((inst), (size), 0, 0, (dreg)); \
  1727. x86_64_opcode1_emit((inst), 0xc0, (size)); \
  1728. x86_64_reg_emit((inst), (opc), (dreg)); \
  1729. x86_imm_emit8((inst), (imm)); \
  1730. } \
  1731. } while(0)
  1732. #define x86_64_shift_mem_imm_size(inst, opc, mem, imm, size) \
  1733. do { \
  1734. if((imm) == 1) \
  1735. { \
  1736. if((size) == 2) \
  1737. { \
  1738. *(inst)++ = (unsigned char)0x66; \
  1739. } \
  1740. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  1741. x86_64_opcode1_emit((inst), 0xd0, (size)); \
  1742. x86_64_mem_emit((inst), (opc), (mem)); \
  1743. } \
  1744. else \
  1745. { \
  1746. if((size) == 2) \
  1747. { \
  1748. *(inst)++ = (unsigned char)0x66; \
  1749. } \
  1750. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  1751. x86_64_opcode1_emit((inst), 0xc0, (size)); \
  1752. x86_64_mem_emit((inst), (opc), (mem)); \
  1753. x86_imm_emit8((inst), (imm)); \
  1754. } \
  1755. } while(0)
  1756. #define x86_64_shift_regp_imm_size(inst, opc, dregp, imm, size) \
  1757. do { \
  1758. if((imm) == 1) \
  1759. { \
  1760. if((size) == 2) \
  1761. { \
  1762. *(inst)++ = (unsigned char)0x66; \
  1763. } \
  1764. x86_64_rex_emit((inst), (size), 0, 0, (dregp)); \
  1765. x86_64_opcode1_emit((inst), 0xd0, (size)); \
  1766. x86_64_regp_emit((inst), (opc), (dregp)); \
  1767. } \
  1768. else \
  1769. { \
  1770. if((size) == 2) \
  1771. { \
  1772. *(inst)++ = (unsigned char)0x66; \
  1773. } \
  1774. x86_64_rex_emit((inst), (size), 0, 0, (dregp)); \
  1775. x86_64_opcode1_emit((inst), 0xc0, (size)); \
  1776. x86_64_regp_emit((inst), (opc), (dregp)); \
  1777. x86_imm_emit8((inst), (imm)); \
  1778. } \
  1779. } while(0)
  1780. #define x86_64_shift_membase_imm_size(inst, opc, basereg, disp, imm, size) \
  1781. do { \
  1782. if((imm) == 1) \
  1783. { \
  1784. if((size) == 2) \
  1785. { \
  1786. *(inst)++ = (unsigned char)0x66; \
  1787. } \
  1788. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  1789. x86_64_opcode1_emit((inst), 0xd0, (size)); \
  1790. x86_64_membase_emit((inst), (opc), (basereg), (disp)); \
  1791. } \
  1792. else \
  1793. { \
  1794. if((size) == 2) \
  1795. { \
  1796. *(inst)++ = (unsigned char)0x66; \
  1797. } \
  1798. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  1799. x86_64_opcode1_emit((inst), 0xc0, (size)); \
  1800. x86_64_membase_emit((inst), (opc), (basereg), (disp)); \
  1801. x86_imm_emit8((inst), (imm)); \
  1802. } \
  1803. } while(0)
  1804. #define x86_64_shift_memindex_imm_size(inst, opc, basereg, disp, indexreg, shift, imm, size) \
  1805. do { \
  1806. if((imm) == 1) \
  1807. { \
  1808. if((size) == 2) \
  1809. { \
  1810. *(inst)++ = (unsigned char)0x66; \
  1811. } \
  1812. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  1813. x86_64_opcode1_emit((inst), 0xd0, (size)); \
  1814. x86_64_memindex_emit((inst), (opc), (basereg), (disp), (indexreg), (shift)); \
  1815. } \
  1816. else \
  1817. { \
  1818. if((size) == 2) \
  1819. { \
  1820. *(inst)++ = (unsigned char)0x66; \
  1821. } \
  1822. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  1823. x86_64_opcode1_emit((inst), 0xc0, (size)); \
  1824. x86_64_memindex_emit((inst), (opc), (basereg), (disp), (indexreg), (shift)); \
  1825. x86_imm_emit8((inst), (imm)); \
  1826. } \
  1827. } while(0)
  1828. /*
  1829. * shift by the number of bits in %cl
  1830. */
  1831. #define x86_64_shift_reg_size(inst, opc, dreg, size) \
  1832. do { \
  1833. if((size) == 2) \
  1834. { \
  1835. *(inst)++ = (unsigned char)0x66; \
  1836. } \
  1837. x86_64_rex_emit((inst), (size), 0, 0, (dreg)); \
  1838. x86_64_opcode1_emit((inst), 0xd2, (size)); \
  1839. x86_64_reg_emit((inst), (opc), (dreg)); \
  1840. } while(0)
  1841. #define x86_64_shift_mem_size(inst, opc, mem, size) \
  1842. do { \
  1843. if((size) == 2) \
  1844. { \
  1845. *(inst)++ = (unsigned char)0x66; \
  1846. } \
  1847. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  1848. x86_64_opcode1_emit((inst), 0xd2, (size)); \
  1849. x86_64_mem_emit((inst), (opc), (mem)); \
  1850. } while(0)
  1851. #define x86_64_shift_regp_size(inst, opc, dregp, size) \
  1852. do { \
  1853. if((size) == 2) \
  1854. { \
  1855. *(inst)++ = (unsigned char)0x66; \
  1856. } \
  1857. x86_64_rex_emit((inst), (size), 0, 0, (dregp)); \
  1858. x86_64_opcode1_emit((inst), 0xd2, (size)); \
  1859. x86_64_regp_emit((inst), (opc), (dregp)); \
  1860. } while(0)
  1861. #define x86_64_shift_membase_size(inst, opc, basereg, disp, size) \
  1862. do { \
  1863. if((size) == 2) \
  1864. { \
  1865. *(inst)++ = (unsigned char)0x66; \
  1866. } \
  1867. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  1868. x86_64_opcode1_emit((inst), 0xd2, (size)); \
  1869. x86_64_membase_emit((inst), (opc), (basereg), (disp)); \
  1870. } while(0)
  1871. #define x86_64_shift_memindex_size(inst, opc, basereg, disp, indexreg, shift, size) \
  1872. do { \
  1873. if((size) == 2) \
  1874. { \
  1875. *(inst)++ = (unsigned char)0x66; \
  1876. } \
  1877. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  1878. x86_64_opcode1_emit((inst), 0xd2, (size)); \
  1879. x86_64_memindex_emit((inst), (opc), (basereg), (disp), (indexreg), (shift)); \
  1880. } while(0)
  1881. /*
  1882. * shl: Shit left (clear the least significant bit)
  1883. */
  1884. #define x86_64_shl_reg_imm_size(inst, dreg, imm, size) \
  1885. do { \
  1886. x86_64_shift_reg_imm_size((inst), 4, (dreg), (imm), (size)); \
  1887. } while(0)
  1888. #define x86_64_shl_mem_imm_size(inst, mem, imm, size) \
  1889. do { \
  1890. x86_64_shift_mem_imm_size((inst), 4, (mem), (imm), (size)); \
  1891. } while(0)
  1892. #define x86_64_shl_regp_imm_size(inst, dregp, imm, size) \
  1893. do { \
  1894. x86_64_shift_regp_imm_size((inst), 4, (dregp), (imm), (size)); \
  1895. } while(0)
  1896. #define x86_64_shl_membase_imm_size(inst, basereg, disp, imm, size) \
  1897. do { \
  1898. x86_64_shift_membase_imm_size((inst), 4, (basereg), (disp), (imm), (size)); \
  1899. } while(0)
  1900. #define x86_64_shl_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1901. do { \
  1902. x86_64_shift_memindex_imm_size((inst), 4, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1903. } while(0)
  1904. #define x86_64_shl_reg_size(inst, dreg, size) \
  1905. do { \
  1906. x86_64_shift_reg_size((inst), 4, (dreg), (size)); \
  1907. } while(0)
  1908. #define x86_64_shl_mem_size(inst, mem, size) \
  1909. do { \
  1910. x86_64_shift_mem_size((inst), 4, (mem), (size)); \
  1911. } while(0)
  1912. #define x86_64_shl_regp_size(inst, dregp, size) \
  1913. do { \
  1914. x86_64_shift_regp_size((inst), 4, (dregp), (size)); \
  1915. } while(0)
  1916. #define x86_64_shl_membase_size(inst, basereg, disp, size) \
  1917. do { \
  1918. x86_64_shift_membase_size((inst), 4, (basereg), (disp), (size)); \
  1919. } while(0)
  1920. #define x86_64_shl_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1921. do { \
  1922. x86_64_shift_memindex_size((inst), 4, (basereg), (disp), (indexreg), (shift), (size)); \
  1923. } while(0)
  1924. /*
  1925. * shr: Unsigned shit right (clear the most significant bit)
  1926. */
  1927. #define x86_64_shr_reg_imm_size(inst, dreg, imm, size) \
  1928. do { \
  1929. x86_64_shift_reg_imm_size((inst), 5, (dreg), (imm), (size)); \
  1930. } while(0)
  1931. #define x86_64_shr_mem_imm_size(inst, mem, imm, size) \
  1932. do { \
  1933. x86_64_shift_mem_imm_size((inst), 5, (mem), (imm), (size)); \
  1934. } while(0)
  1935. #define x86_64_shr_regp_imm_size(inst, dregp, imm, size) \
  1936. do { \
  1937. x86_64_shift_regp_imm_size((inst), 5, (dregp), (imm), (size)); \
  1938. } while(0)
  1939. #define x86_64_shr_membase_imm_size(inst, basereg, disp, imm, size) \
  1940. do { \
  1941. x86_64_shift_membase_imm_size((inst), 5, (basereg), (disp), (imm), (size)); \
  1942. } while(0)
  1943. #define x86_64_shr_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1944. do { \
  1945. x86_64_shift_memindex_imm_size((inst), 5, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1946. } while(0)
  1947. #define x86_64_shr_reg_size(inst, dreg, size) \
  1948. do { \
  1949. x86_64_shift_reg_size((inst), 5, (dreg), (size)); \
  1950. } while(0)
  1951. #define x86_64_shr_mem_size(inst, mem, size) \
  1952. do { \
  1953. x86_64_shift_mem_size((inst), 5, (mem), (size)); \
  1954. } while(0)
  1955. #define x86_64_shr_regp_size(inst, dregp, size) \
  1956. do { \
  1957. x86_64_shift_regp_size((inst), 5, (dregp), (size)); \
  1958. } while(0)
  1959. #define x86_64_shr_membase_size(inst, basereg, disp, size) \
  1960. do { \
  1961. x86_64_shift_membase_size((inst), 5, (basereg), (disp), (size)); \
  1962. } while(0)
  1963. #define x86_64_shr_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1964. do { \
  1965. x86_64_shift_memindex_size((inst), 5, (basereg), (disp), (indexreg), (shift), (size)); \
  1966. } while(0)
  1967. /*
  1968. * sar: Signed shit right (keep the most significant bit)
  1969. */
  1970. #define x86_64_sar_reg_imm_size(inst, dreg, imm, size) \
  1971. do { \
  1972. x86_64_shift_reg_imm_size((inst), 7, (dreg), (imm), (size)); \
  1973. } while(0)
  1974. #define x86_64_sar_mem_imm_size(inst, mem, imm, size) \
  1975. do { \
  1976. x86_64_shift_mem_imm_size((inst), 7, (mem), (imm), (size)); \
  1977. } while(0)
  1978. #define x86_64_sar_regp_imm_size(inst, dregp, imm, size) \
  1979. do { \
  1980. x86_64_shift_regp_imm_size((inst), 7, (dregp), (imm), (size)); \
  1981. } while(0)
  1982. #define x86_64_sar_membase_imm_size(inst, basereg, disp, imm, size) \
  1983. do { \
  1984. x86_64_shift_membase_imm_size((inst), 7, (basereg), (disp), (imm), (size)); \
  1985. } while(0)
  1986. #define x86_64_sar_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1987. do { \
  1988. x86_64_shift_memindex_imm_size((inst), 7, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1989. } while(0)
  1990. #define x86_64_sar_reg_size(inst, dreg, size) \
  1991. do { \
  1992. x86_64_shift_reg_size((inst), 7, (dreg), (size)); \
  1993. } while(0)
  1994. #define x86_64_sar_mem_size(inst, mem, size) \
  1995. do { \
  1996. x86_64_shift_mem_size((inst), 7, (mem), (size)); \
  1997. } while(0)
  1998. #define x86_64_sar_regp_size(inst, dregp, size) \
  1999. do { \
  2000. x86_64_shift_regp_size((inst), 7, (dregp), (size)); \
  2001. } while(0)
  2002. #define x86_64_sar_membase_size(inst, basereg, disp, size) \
  2003. do { \
  2004. x86_64_shift_membase_size((inst), 7, (basereg), (disp), (size)); \
  2005. } while(0)
  2006. #define x86_64_sar_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  2007. do { \
  2008. x86_64_shift_memindex_size((inst), 7, (basereg), (disp), (indexreg), (shift), (size)); \
  2009. } while(0)
  2010. /*
  2011. * test: and tha values and set sf, zf and pf according to the result
  2012. */
  2013. #define x86_64_test_reg_imm_size(inst, reg, imm, size) \
  2014. do { \
  2015. if((size) == 2) \
  2016. { \
  2017. *(inst)++ = (unsigned char)0x66; \
  2018. } \
  2019. x86_64_rex_emit((inst), (size), 0, 0, (reg)); \
  2020. if((reg) == X86_64_RAX) { \
  2021. x86_64_opcode1_emit((inst), 0xa8, (size)); \
  2022. } \
  2023. else \
  2024. { \
  2025. x86_64_opcode1_emit((inst), 0xf6, (size)); \
  2026. x86_64_reg_emit((inst), 0, (reg)); \
  2027. } \
  2028. x86_64_imm_emit_max32((inst), (imm), (size)); \
  2029. } while (0)
  2030. #define x86_64_test_regp_imm_size(inst, regp, imm, size) \
  2031. do { \
  2032. if((size) == 2) \
  2033. { \
  2034. *(inst)++ = (unsigned char)0x66; \
  2035. } \
  2036. x86_64_rex_emit((inst), (size), 0, 0, (regp)); \
  2037. x86_64_opcode1_emit((inst), 0xf6, (size)); \
  2038. x86_64_regp_emit((inst), 0, (regp)); \
  2039. x86_64_imm_emit_max32((inst), (imm), (size)); \
  2040. } while (0)
  2041. #define x86_64_test_mem_imm_size(inst, mem, imm, size) \
  2042. do { \
  2043. if((size) == 2) \
  2044. { \
  2045. *(inst)++ = (unsigned char)0x66; \
  2046. } \
  2047. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  2048. x86_64_opcode1_emit((inst), 0xf6, (size)); \
  2049. x86_64_mem_emit((inst), 0, (mem)); \
  2050. x86_64_imm_emit_max32((inst), (imm), (size)); \
  2051. } while (0)
  2052. #define x86_64_test_membase_imm_size(inst, basereg, disp, imm, size) \
  2053. do { \
  2054. if((size) == 2) \
  2055. { \
  2056. *(inst)++ = (unsigned char)0x66; \
  2057. } \
  2058. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  2059. x86_64_opcode1_emit((inst), 0xf6, (size)); \
  2060. x86_64_membase_emit((inst), 0, (basereg), (disp)); \
  2061. x86_64_imm_emit_max32((inst), (imm), (size)); \
  2062. } while (0)
  2063. #define x86_64_test_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  2064. do { \
  2065. if((size) == 2) \
  2066. { \
  2067. *(inst)++ = (unsigned char)0x66; \
  2068. } \
  2069. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  2070. x86_64_opcode1_emit((inst), 0xf6, (size)); \
  2071. x86_64_memindex_emit((inst), 0, (basereg), (disp), (indexreg), (shift)); \
  2072. x86_64_imm_emit_max32((inst), (imm), (size)); \
  2073. } while (0)
  2074. #define x86_64_test_reg_reg_size(inst, dreg, sreg, size) \
  2075. do { \
  2076. if((size) == 2) \
  2077. { \
  2078. *(inst)++ = (unsigned char)0x66; \
  2079. } \
  2080. x86_64_rex_emit((inst), (size), (sreg), 0, (dreg)); \
  2081. x86_64_opcode1_emit((inst), 0x84, (size)); \
  2082. x86_64_reg_emit((inst), (sreg), (dreg)); \
  2083. } while (0)
  2084. #define x86_64_test_regp_reg_size(inst, dregp, sreg, size) \
  2085. do { \
  2086. if((size) == 2) \
  2087. { \
  2088. *(inst)++ = (unsigned char)0x66; \
  2089. } \
  2090. x86_64_rex_emit((inst), (size), (sreg), 0, (dregp)); \
  2091. x86_64_opcode1_emit((inst), 0x84, (size)); \
  2092. x86_64_regp_emit((inst), (sreg), (dregp)); \
  2093. } while (0)
  2094. #define x86_64_test_mem_reg_size(inst, mem, sreg, size) \
  2095. do { \
  2096. if((size) == 2) \
  2097. { \
  2098. *(inst)++ = (unsigned char)0x66; \
  2099. } \
  2100. x86_64_rex_emit((inst), (size), (sreg), 0, 0); \
  2101. x86_64_opcode1_emit((inst), 0x84, (size)); \
  2102. x86_64_mem_emit((inst), (sreg), (mem)); \
  2103. } while (0)
  2104. #define x86_64_test_membase_reg_size(inst, basereg, disp, sreg, size) \
  2105. do { \
  2106. if((size) == 2) \
  2107. { \
  2108. *(inst)++ = (unsigned char)0x66; \
  2109. } \
  2110. x86_64_rex_emit((inst), (size), (sreg), 0, (basereg)); \
  2111. x86_64_opcode1_emit((inst), 0x84, (size)); \
  2112. x86_64_membase_emit((inst), (sreg), (basereg), (disp)); \
  2113. } while (0)
  2114. #define x86_64_test_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  2115. do { \
  2116. if((size) == 2) \
  2117. { \
  2118. *(inst)++ = (unsigned char)0x66; \
  2119. } \
  2120. x86_64_rex_emit((inst), (size), (sreg), (indexreg), (basereg)); \
  2121. x86_64_opcode1_emit((inst), 0x84, (size)); \
  2122. x86_64_memindex_emit((inst), (sreg), (basereg), (disp), (indexreg), (shift)); \
  2123. } while (0)
  2124. /*
  2125. * imul: signed multiply
  2126. */
  2127. #define x86_64_imul_reg_reg_imm_size(inst, dreg, sreg, imm, size) \
  2128. do { \
  2129. if((size) == 2) \
  2130. { \
  2131. *(inst)++ = (unsigned char)0x66; \
  2132. } \
  2133. x86_64_rex_emit((inst), (size), (dreg), 0, (sreg)); \
  2134. if(x86_is_imm8((imm))) \
  2135. { \
  2136. *(inst)++ = (unsigned char)0x6b; \
  2137. x86_64_reg_emit((inst), (dreg), (sreg)); \
  2138. x86_imm_emit8((inst), (imm)); \
  2139. } \
  2140. else \
  2141. { \
  2142. *(inst)++ = (unsigned char)0x69; \
  2143. x86_64_reg_emit((inst), (dreg), (sreg)); \
  2144. switch((size)) \
  2145. { \
  2146. case 2: \
  2147. { \
  2148. x86_imm_emit16(inst, (imm)); \
  2149. } \
  2150. break; \
  2151. case 4: \
  2152. case 8: \
  2153. { \
  2154. x86_imm_emit32(inst, (imm)); \
  2155. } \
  2156. break; \
  2157. } \
  2158. } \
  2159. } while(0)
  2160. #define x86_64_imul_reg_regp_imm_size(inst, dreg, sregp, imm, size) \
  2161. do { \
  2162. if((size) == 2) \
  2163. { \
  2164. *(inst)++ = (unsigned char)0x66; \
  2165. } \
  2166. x86_64_rex_emit((inst), (size), (dreg), 0, (sregp)); \
  2167. if(x86_is_imm8((imm))) \
  2168. { \
  2169. *(inst)++ = (unsigned char)0x6b; \
  2170. x86_64_regp_emit((inst), (dreg), (sregp)); \
  2171. x86_imm_emit8((inst), (imm)); \
  2172. } \
  2173. else \
  2174. { \
  2175. *(inst)++ = (unsigned char)0x69; \
  2176. x86_64_regp_emit((inst), (dreg), (sregp)); \
  2177. switch((size)) \
  2178. { \
  2179. case 2: \
  2180. { \
  2181. x86_imm_emit16(inst, (imm)); \
  2182. } \
  2183. break; \
  2184. case 4: \
  2185. case 8: \
  2186. { \
  2187. x86_imm_emit32(inst, (imm)); \
  2188. } \
  2189. break; \
  2190. } \
  2191. } \
  2192. } while(0)
  2193. #define x86_64_imul_reg_mem_imm_size(inst, dreg, mem, imm, size) \
  2194. do { \
  2195. if((size) == 2) \
  2196. { \
  2197. *(inst)++ = (unsigned char)0x66; \
  2198. } \
  2199. x86_64_rex_emit((inst), (size), (dreg), 0, 0); \
  2200. if(x86_is_imm8((imm))) \
  2201. { \
  2202. *(inst)++ = (unsigned char)0x6b; \
  2203. x86_64_mem_emit((inst), (dreg), (mem)); \
  2204. x86_imm_emit8((inst), (imm)); \
  2205. } \
  2206. else \
  2207. { \
  2208. *(inst)++ = (unsigned char)0x69; \
  2209. x86_64_mem_emit((inst), (dreg), (mem)); \
  2210. switch((size)) \
  2211. { \
  2212. case 2: \
  2213. { \
  2214. x86_imm_emit16(inst, (imm)); \
  2215. } \
  2216. break; \
  2217. case 4: \
  2218. case 8: \
  2219. { \
  2220. x86_imm_emit32(inst, (imm)); \
  2221. } \
  2222. break; \
  2223. } \
  2224. } \
  2225. } while(0)
  2226. #define x86_64_imul_reg_membase_imm_size(inst, dreg, basereg, disp, imm, size) \
  2227. do { \
  2228. if((size) == 2) \
  2229. { \
  2230. *(inst)++ = (unsigned char)0x66; \
  2231. } \
  2232. x86_64_rex_emit((inst), (size), (dreg), 0, (basereg)); \
  2233. if(x86_is_imm8((imm))) \
  2234. { \
  2235. *(inst)++ = (unsigned char)0x6b; \
  2236. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  2237. x86_imm_emit8((inst), (imm)); \
  2238. } \
  2239. else \
  2240. { \
  2241. *(inst)++ = (unsigned char)0x69; \
  2242. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  2243. switch((size)) \
  2244. { \
  2245. case 2: \
  2246. { \
  2247. x86_imm_emit16(inst, (imm)); \
  2248. } \
  2249. break; \
  2250. case 4: \
  2251. case 8: \
  2252. { \
  2253. x86_imm_emit32(inst, (imm)); \
  2254. } \
  2255. break; \
  2256. } \
  2257. } \
  2258. } while(0)
  2259. #define x86_64_imul_reg_memindex_imm_size(inst, dreg, basereg, disp, indexreg, shift, imm, size) \
  2260. do { \
  2261. if((size) == 2) \
  2262. { \
  2263. *(inst)++ = (unsigned char)0x66; \
  2264. } \
  2265. x86_64_rex_emit((inst), (size), (dreg), (indexreg), (basereg)); \
  2266. if(x86_is_imm8((imm))) \
  2267. { \
  2268. *(inst)++ = (unsigned char)0x6b; \
  2269. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  2270. x86_imm_emit8((inst), (imm)); \
  2271. } \
  2272. else \
  2273. { \
  2274. *(inst)++ = (unsigned char)0x69; \
  2275. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  2276. switch((size)) \
  2277. { \
  2278. case 2: \
  2279. { \
  2280. x86_imm_emit16(inst, (imm)); \
  2281. } \
  2282. break; \
  2283. case 4: \
  2284. case 8: \
  2285. { \
  2286. x86_imm_emit32(inst, (imm)); \
  2287. } \
  2288. break; \
  2289. } \
  2290. } \
  2291. } while(0)
  2292. #define x86_64_imul_reg_reg_size(inst, dreg, sreg, size) \
  2293. do { \
  2294. if((size) == 2) \
  2295. { \
  2296. *(inst)++ = (unsigned char)0x66; \
  2297. } \
  2298. x86_64_rex_emit((inst), (size), (dreg), 0, (sreg)); \
  2299. *(inst)++ = (unsigned char)0x0F; \
  2300. *(inst)++ = (unsigned char)0xAF; \
  2301. x86_64_reg_emit((inst), (dreg), (sreg)); \
  2302. } while(0)
  2303. #define x86_64_imul_reg_regp_size(inst, dreg, sregp, size) \
  2304. do { \
  2305. if((size) == 2) \
  2306. { \
  2307. *(inst)++ = (unsigned char)0x66; \
  2308. } \
  2309. x86_64_rex_emit((inst), (size), (dreg), 0, (sregp)); \
  2310. *(inst)++ = (unsigned char)0x0F; \
  2311. *(inst)++ = (unsigned char)0xAF; \
  2312. x86_64_regp_emit((inst), (dreg), (sregp)); \
  2313. } while(0)
  2314. #define x86_64_imul_reg_mem_size(inst, dreg, mem, size) \
  2315. do { \
  2316. if((size) == 2) \
  2317. { \
  2318. *(inst)++ = (unsigned char)0x66; \
  2319. } \
  2320. x86_64_rex_emit((inst), (size), (dreg), 0, 0); \
  2321. *(inst)++ = (unsigned char)0x0F; \
  2322. *(inst)++ = (unsigned char)0xAF; \
  2323. x86_64_mem_emit((inst), (dreg), (mem)); \
  2324. } while(0)
  2325. #define x86_64_imul_reg_membase_size(inst, dreg, basereg, disp, size) \
  2326. do { \
  2327. if((size) == 2) \
  2328. { \
  2329. *(inst)++ = (unsigned char)0x66; \
  2330. } \
  2331. x86_64_rex_emit((inst), (size), (dreg), 0, (basereg)); \
  2332. *(inst)++ = (unsigned char)0x0F; \
  2333. *(inst)++ = (unsigned char)0xAF; \
  2334. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  2335. } while(0)
  2336. #define x86_64_imul_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  2337. do { \
  2338. if((size) == 2) \
  2339. { \
  2340. *(inst)++ = (unsigned char)0x66; \
  2341. } \
  2342. x86_64_rex_emit((inst), (size), (dreg), (indexreg), (basereg)); \
  2343. *(inst)++ = (unsigned char)0x0F; \
  2344. *(inst)++ = (unsigned char)0xAF; \
  2345. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  2346. } while(0)
  2347. /*
  2348. * cwd, cdq, cqo: sign extend ax to dx (used for div and idiv)
  2349. */
  2350. #define x86_64_cwd(inst) \
  2351. do { \
  2352. *(inst)++ = (unsigned char)0x66; \
  2353. *(inst)++ = (unsigned char)0x99; \
  2354. } while(0)
  2355. #define x86_64_cdq(inst) \
  2356. do { \
  2357. *(inst)++ = (unsigned char)0x99; \
  2358. } while(0)
  2359. #define x86_64_cqo(inst) \
  2360. do { \
  2361. *(inst)++ = (unsigned char)0x48; \
  2362. *(inst)++ = (unsigned char)0x99; \
  2363. } while(0)
  2364. /*
  2365. * Lea instructions
  2366. */
  2367. #define x86_64_lea_mem_size(inst, dreg, mem, size) \
  2368. do { \
  2369. if((size) == 2) \
  2370. { \
  2371. *(inst)++ = (unsigned char)0x66; \
  2372. } \
  2373. x86_64_rex_emit((inst), (size), 0, 0, (dreg)); \
  2374. x86_lea_mem((inst), ((dreg) & 0x7), (mem)); \
  2375. } while(0)
  2376. #define x86_64_lea_membase_size(inst, dreg, basereg, disp, size) \
  2377. do { \
  2378. if((size) == 2) \
  2379. { \
  2380. *(inst)++ = (unsigned char)0x66; \
  2381. } \
  2382. x86_64_rex_emit(inst, (size), (dreg), 0, (basereg)); \
  2383. *(inst)++ = (unsigned char)0x8d; \
  2384. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  2385. } while (0)
  2386. #define x86_64_lea_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  2387. do { \
  2388. if((size) == 2) \
  2389. { \
  2390. *(inst)++ = (unsigned char)0x66; \
  2391. } \
  2392. x86_64_rex_emit((inst), (size), (dreg), (indexreg), (basereg)); \
  2393. *(inst)++ = (unsigned char)0x8d; \
  2394. x86_64_memindex_emit ((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  2395. } while(0)
  2396. /*
  2397. * Move instructions.
  2398. */
  2399. #define x86_64_mov_reg_reg_size(inst, dreg, sreg, size) \
  2400. do { \
  2401. if((size) == 2) \
  2402. { \
  2403. *(inst)++ = (unsigned char)0x66; \
  2404. } \
  2405. x86_64_rex_emit(inst, (size), (dreg), 0, (sreg)); \
  2406. x86_64_opcode1_emit(inst, 0x8a, (size)); \
  2407. x86_64_reg_emit((inst), ((dreg) & 0x7), ((sreg) & 0x7)); \
  2408. } while(0)
  2409. #define x86_64_mov_regp_reg_size(inst, regp, sreg, size) \
  2410. do { \
  2411. if((size) == 2) \
  2412. { \
  2413. *(inst)++ = (unsigned char)0x66; \
  2414. } \
  2415. x86_64_rex_emit(inst, (size), (sreg), 0, (regp)); \
  2416. x86_64_opcode1_emit(inst, 0x88, (size)); \
  2417. x86_64_regp_emit((inst), (sreg), (regp)); \
  2418. } while (0)
  2419. #define x86_64_mov_membase_reg_size(inst, basereg, disp, sreg, size) \
  2420. do { \
  2421. if((size) == 2) \
  2422. { \
  2423. *(inst)++ = (unsigned char)0x66; \
  2424. } \
  2425. x86_64_rex_emit(inst, (size), (sreg), 0, (basereg)); \
  2426. x86_64_opcode1_emit(inst, 0x88, (size)); \
  2427. x86_64_membase_emit((inst), (sreg), (basereg), (disp)); \
  2428. } while(0)
  2429. #define x86_64_mov_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  2430. do { \
  2431. if((size) == 2) \
  2432. { \
  2433. *(inst)++ = (unsigned char)0x66; \
  2434. } \
  2435. x86_64_rex_emit((inst), (size), (sreg), (indexreg), (basereg)); \
  2436. x86_64_opcode1_emit(inst, 0x88, (size)); \
  2437. x86_64_memindex_emit((inst), (sreg), (basereg), (disp), (indexreg), (shift)); \
  2438. } while (0)
  2439. /*
  2440. * Using the AX register is the only possibility to address 64bit.
  2441. * All other registers are bound to 32bit values.
  2442. */
  2443. #define x86_64_mov_mem_reg_size(inst, mem, sreg, size) \
  2444. do { \
  2445. if((size) == 2) \
  2446. { \
  2447. *(inst)++ = (unsigned char)0x66; \
  2448. } \
  2449. x86_64_rex_emit(inst, (size), (sreg), 0, 0); \
  2450. if((sreg) == X86_64_RAX) \
  2451. { \
  2452. x86_64_opcode1_emit(inst, 0xa2, (size)); \
  2453. x86_64_imm_emit64(inst, (mem)); \
  2454. } \
  2455. else \
  2456. { \
  2457. x86_64_opcode1_emit(inst, 0x88, (size)); \
  2458. x86_address_byte((inst), 0, ((sreg) & 0x7), 4); \
  2459. x86_address_byte((inst), 0, 4, 5); \
  2460. x86_imm_emit32((inst), (mem)); \
  2461. } \
  2462. } while (0)
  2463. #define x86_64_mov_reg_imm_size(inst, dreg, imm, size) \
  2464. do { \
  2465. if((size) == 2) \
  2466. { \
  2467. *(inst)++ = (unsigned char)0x66; \
  2468. } \
  2469. x86_64_rex_emit(inst, (size), 0, 0, (dreg)); \
  2470. switch((size)) \
  2471. { \
  2472. case 1: \
  2473. { \
  2474. *(inst)++ = (unsigned char)0xb0 + ((dreg) & 0x7); \
  2475. x86_imm_emit8(inst, (imm)); \
  2476. } \
  2477. break; \
  2478. case 2: \
  2479. { \
  2480. *(inst)++ = (unsigned char)0xb8 + ((dreg) & 0x7); \
  2481. x86_imm_emit16(inst, (imm)); \
  2482. } \
  2483. break; \
  2484. case 4: \
  2485. { \
  2486. *(inst)++ = (unsigned char)0xb8 + ((dreg) & 0x7); \
  2487. x86_imm_emit32(inst, (imm)); \
  2488. } \
  2489. break; \
  2490. case 8: \
  2491. { \
  2492. jit_nint __x86_64_imm = (imm); \
  2493. if(__x86_64_imm >= (jit_nint)jit_min_int && __x86_64_imm <= (jit_nint)jit_max_int) \
  2494. { \
  2495. *(inst)++ = (unsigned char)0xc7; \
  2496. x86_64_reg_emit((inst), 0, (dreg)); \
  2497. x86_imm_emit32(inst, (__x86_64_imm)); \
  2498. } \
  2499. else \
  2500. { \
  2501. *(inst)++ = (unsigned char)0xb8 + ((dreg) & 0x7); \
  2502. x86_64_imm_emit64(inst, (__x86_64_imm)); \
  2503. } \
  2504. } \
  2505. break; \
  2506. } \
  2507. } while(0)
  2508. /*
  2509. * Using the AX register is the only possibility to address 64bit.
  2510. * All other registers are bound to 32bit values.
  2511. */
  2512. #define x86_64_mov_reg_mem_size(inst, dreg, mem, size) \
  2513. do { \
  2514. if((size) == 2) \
  2515. { \
  2516. *(inst)++ = (unsigned char)0x66; \
  2517. } \
  2518. x86_64_rex_emit(inst, (size), (dreg), 0, 0); \
  2519. if((dreg) == X86_64_RAX) \
  2520. { \
  2521. x86_64_opcode1_emit(inst, 0xa0, (size)); \
  2522. x86_64_imm_emit64(inst, (mem)); \
  2523. } \
  2524. else \
  2525. { \
  2526. x86_64_opcode1_emit(inst, 0x8a, (size)); \
  2527. x86_address_byte ((inst), 0, (dreg), 4); \
  2528. x86_address_byte ((inst), 0, 4, 5); \
  2529. x86_imm_emit32 ((inst), (mem)); \
  2530. } \
  2531. } while (0)
  2532. #define x86_64_mov_reg_regp_size(inst, dreg, sregp, size) \
  2533. do { \
  2534. if((size) == 2) \
  2535. { \
  2536. *(inst)++ = (unsigned char)0x66; \
  2537. } \
  2538. x86_64_rex_emit(inst, (size), (dreg), 0, (sregp)); \
  2539. x86_64_opcode1_emit(inst, 0x8a, (size)); \
  2540. x86_64_regp_emit((inst), (dreg), (sregp)); \
  2541. } while(0)
  2542. #define x86_64_mov_reg_membase_size(inst, dreg, basereg, disp, size) \
  2543. do { \
  2544. if((size) == 2) \
  2545. { \
  2546. *(inst)++ = (unsigned char)0x66; \
  2547. } \
  2548. x86_64_rex_emit(inst, (size), (dreg), 0, (basereg)); \
  2549. x86_64_opcode1_emit(inst, 0x8a, (size)); \
  2550. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  2551. } while(0)
  2552. #define x86_64_mov_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  2553. do { \
  2554. if((size) == 2) \
  2555. { \
  2556. *(inst)++ = (unsigned char)0x66; \
  2557. } \
  2558. x86_64_rex_emit((inst), (size), (dreg), (indexreg), (basereg)); \
  2559. x86_64_opcode1_emit(inst, 0x8a, (size)); \
  2560. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  2561. } while(0)
  2562. /*
  2563. * Only 32bit mem and imm values are allowed here.
  2564. * mem is be RIP relative.
  2565. * 32 bit imm will be sign extended to 64 bits for 64 bit size.
  2566. */
  2567. #define x86_64_mov_mem_imm_size(inst, mem, imm, size) \
  2568. do { \
  2569. if((size) == 2) \
  2570. { \
  2571. *(inst)++ = (unsigned char)0x66; \
  2572. } \
  2573. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  2574. x86_64_opcode1_emit(inst, 0xc6, (size)); \
  2575. x86_64_mem_emit((inst), 0, (mem)); \
  2576. x86_64_imm_emit_max32(inst, (imm), (size)); \
  2577. } while(0)
  2578. #define x86_64_mov_regp_imm_size(inst, dregp, imm, size) \
  2579. do { \
  2580. if((size) == 2) \
  2581. { \
  2582. *(inst)++ = (unsigned char)0x66; \
  2583. } \
  2584. x86_64_rex_emit(inst, (size), 0, 0, (dregp)); \
  2585. x86_64_opcode1_emit(inst, 0xc6, (size)); \
  2586. x86_64_regp_emit((inst), 0, (dregp)); \
  2587. x86_64_imm_emit_max32(inst, (imm), (size)); \
  2588. } while(0)
  2589. #define x86_64_mov_membase_imm_size(inst, basereg, disp, imm, size) \
  2590. do { \
  2591. if((size) == 2) \
  2592. { \
  2593. *(inst)++ = (unsigned char)0x66; \
  2594. } \
  2595. x86_64_rex_emit(inst, (size), 0, 0, (basereg)); \
  2596. x86_64_opcode1_emit(inst, 0xc6, (size)); \
  2597. x86_64_membase_emit((inst), 0, (basereg), (disp)); \
  2598. x86_64_imm_emit_max32(inst, (imm), (size)); \
  2599. } while(0)
  2600. #define x86_64_mov_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  2601. do { \
  2602. if((size) == 2) \
  2603. { \
  2604. *(inst)++ = (unsigned char)0x66; \
  2605. } \
  2606. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  2607. x86_64_opcode1_emit(inst, 0xc6, (size)); \
  2608. x86_64_memindex_emit((inst), 0, (basereg), (disp), (indexreg), (shift)); \
  2609. x86_64_imm_emit_max32(inst, (imm), (size)); \
  2610. } while(0)
  2611. /*
  2612. * Move with sign extension to the given size (signed)
  2613. */
  2614. #define x86_64_movsx8_reg_reg_size(inst, dreg, sreg, size) \
  2615. do { \
  2616. x86_64_alu2_reg_reg_size((inst), 0x0f, 0xbe, (dreg), (sreg), (size) | 1); \
  2617. }while(0)
  2618. #define x86_64_movsx8_reg_regp_size(inst, dreg, sregp, size) \
  2619. do { \
  2620. x86_64_alu2_reg_regp_size((inst), 0x0f, 0xbe, (dreg), (sregp), (size)); \
  2621. }while(0)
  2622. #define x86_64_movsx8_reg_mem_size(inst, dreg, mem, size) \
  2623. do { \
  2624. x86_64_alu2_reg_mem_size((inst), 0x0f, 0xbe, (dreg), (mem), (size)); \
  2625. }while(0)
  2626. #define x86_64_movsx8_reg_membase_size(inst, dreg, basereg, disp, size) \
  2627. do { \
  2628. x86_64_alu2_reg_membase_size((inst), 0x0f, 0xbe, (dreg), (basereg), (disp), (size)); \
  2629. }while(0)
  2630. #define x86_64_movsx8_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  2631. do { \
  2632. x86_64_alu2_reg_memindex_size((inst), 0x0f, 0xbe, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  2633. }while(0)
  2634. #define x86_64_movsx16_reg_reg_size(inst, dreg, sreg, size) \
  2635. do { \
  2636. x86_64_alu2_reg_reg_size((inst), 0x0f, 0xbf, (dreg), (sreg), (size)); \
  2637. }while(0)
  2638. #define x86_64_movsx16_reg_regp_size(inst, dreg, sregp, size) \
  2639. do { \
  2640. x86_64_alu2_reg_regp_size((inst), 0x0f, 0xbf, (dreg), (sregp), (size)); \
  2641. }while(0)
  2642. #define x86_64_movsx16_reg_mem_size(inst, dreg, mem, size) \
  2643. do { \
  2644. x86_64_alu2_reg_mem_size((inst), 0x0f, 0xbf, (dreg), (mem), (size)); \
  2645. }while(0)
  2646. #define x86_64_movsx16_reg_membase_size(inst, dreg, basereg, disp, size) \
  2647. do { \
  2648. x86_64_alu2_reg_membase_size((inst), 0x0f, 0xbf, (dreg), (basereg), (disp), (size)); \
  2649. }while(0)
  2650. #define x86_64_movsx16_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  2651. do { \
  2652. x86_64_alu2_reg_memindex_size((inst), 0x0f, 0xbf, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  2653. }while(0)
  2654. #define x86_64_movsx32_reg_reg_size(inst, dreg, sreg, size) \
  2655. do { \
  2656. x86_64_alu1_reg_reg_size((inst), 0x63, (dreg), (sreg), (size)); \
  2657. }while(0)
  2658. #define x86_64_movsx32_reg_regp_size(inst, dreg, sregp, size) \
  2659. do { \
  2660. x86_64_alu1_reg_regp_size((inst), 0x63, (dreg), (sregp), (size)); \
  2661. }while(0)
  2662. #define x86_64_movsx32_reg_mem_size(inst, dreg, mem, size) \
  2663. do { \
  2664. x86_64_alu1_reg_mem_size((inst), 0x63, (dreg), (mem), (size)); \
  2665. }while(0)
  2666. #define x86_64_movsx32_reg_membase_size(inst, dreg, basereg, disp, size) \
  2667. do { \
  2668. x86_64_alu1_reg_membase_size((inst), 0x63, (dreg), (basereg), (disp), (size)); \
  2669. }while(0)
  2670. #define x86_64_movsx32_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  2671. do { \
  2672. x86_64_alu1_reg_memindex_size((inst), 0x63, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  2673. }while(0)
  2674. /*
  2675. * Move with zero extension to the given size (unsigned)
  2676. */
  2677. #define x86_64_movzx8_reg_reg_size(inst, dreg, sreg, size) \
  2678. do { \
  2679. x86_64_alu2_reg_reg_size((inst), 0x0f, 0xb6, (dreg), (sreg), (size) | 1); \
  2680. }while(0)
  2681. #define x86_64_movzx8_reg_regp_size(inst, dreg, sregp, size) \
  2682. do { \
  2683. x86_64_alu2_reg_regp_size((inst), 0x0f, 0xb6, (dreg), (sregp), (size)); \
  2684. }while(0)
  2685. #define x86_64_movzx8_reg_mem_size(inst, dreg, mem, size) \
  2686. do { \
  2687. x86_64_alu2_reg_mem_size((inst), 0x0f, 0xb6, (dreg), (mem), (size)); \
  2688. }while(0)
  2689. #define x86_64_movzx8_reg_membase_size(inst, dreg, basereg, disp, size) \
  2690. do { \
  2691. x86_64_alu2_reg_membase_size((inst), 0x0f, 0xb6, (dreg), (basereg), (disp), (size)); \
  2692. }while(0)
  2693. #define x86_64_movzx8_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  2694. do { \
  2695. x86_64_alu2_reg_memindex_size((inst), 0x0f, 0xb6, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  2696. }while(0)
  2697. #define x86_64_movzx16_reg_reg_size(inst, dreg, sreg, size) \
  2698. do { \
  2699. x86_64_alu2_reg_reg_size((inst), 0x0f, 0xb7, (dreg), (sreg), (size)); \
  2700. }while(0)
  2701. #define x86_64_movzx16_reg_regp_size(inst, dreg, sregp, size) \
  2702. do { \
  2703. x86_64_alu2_reg_regp_size((inst), 0x0f, 0xb7, (dreg), (sregp), (size)); \
  2704. }while(0)
  2705. #define x86_64_movzx16_reg_mem_size(inst, dreg, mem, size) \
  2706. do { \
  2707. x86_64_alu2_reg_mem_size((inst), 0x0f, 0xb7, (dreg), (mem), (size)); \
  2708. }while(0)
  2709. #define x86_64_movzx16_reg_membase_size(inst, dreg, basereg, disp, size) \
  2710. do { \
  2711. x86_64_alu2_reg_membase_size((inst), 0x0f, 0xb7, (dreg), (basereg), (disp), (size)); \
  2712. }while(0)
  2713. #define x86_64_movzx16_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  2714. do { \
  2715. x86_64_alu2_reg_memindex_size((inst), 0x0f, 0xb7, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  2716. }while(0)
  2717. /*
  2718. * cmov: conditional move
  2719. */
  2720. #define x86_64_cmov_reg_reg_size(inst, cond, dreg, sreg, is_signed, size) \
  2721. do { \
  2722. if((size) == 2) \
  2723. { \
  2724. *(inst)++ = (unsigned char)0x66; \
  2725. } \
  2726. x86_64_rex_emit((inst), (size), (dreg), 0, (sreg)); \
  2727. *(inst)++ = (unsigned char)0x0f; \
  2728. if((is_signed)) \
  2729. { \
  2730. *(inst)++ = x86_cc_signed_map[(cond)] - 0x30; \
  2731. } \
  2732. else \
  2733. { \
  2734. *(inst)++ = x86_cc_unsigned_map[(cond)] - 0x30; \
  2735. } \
  2736. x86_64_reg_emit((inst), (dreg), (sreg)); \
  2737. } while (0)
  2738. #define x86_64_cmov_reg_regp_size(inst, cond, dreg, sregp, is_signed, size) \
  2739. do { \
  2740. if((size) == 2) \
  2741. { \
  2742. *(inst)++ = (unsigned char)0x66; \
  2743. } \
  2744. x86_64_rex_emit((inst), (size), (dreg), 0, (sregp)); \
  2745. *(inst)++ = (unsigned char)0x0f; \
  2746. if((is_signed)) \
  2747. { \
  2748. *(inst)++ = x86_cc_signed_map[(cond)] - 0x30; \
  2749. } \
  2750. else \
  2751. { \
  2752. *(inst)++ = x86_cc_unsigned_map[(cond)] - 0x30; \
  2753. } \
  2754. x86_64_regp_emit((inst), (dreg), (sregp)); \
  2755. } while (0)
  2756. #define x86_64_cmov_reg_mem_size(inst, cond, dreg, mem, is_signed, size) \
  2757. do { \
  2758. if((size) == 2) \
  2759. { \
  2760. *(inst)++ = (unsigned char)0x66; \
  2761. } \
  2762. x86_64_rex_emit((inst), (size), (dreg), 0, 0); \
  2763. *(inst)++ = (unsigned char)0x0f; \
  2764. if((is_signed)) \
  2765. { \
  2766. *(inst)++ = x86_cc_signed_map[(cond)] - 0x30; \
  2767. } \
  2768. else \
  2769. { \
  2770. *(inst)++ = x86_cc_unsigned_map[(cond)] - 0x30; \
  2771. } \
  2772. x86_64_mem_emit((inst), (dreg), (mem)); \
  2773. } while (0)
  2774. #define x86_64_cmov_reg_membase_size(inst, cond, dreg, basereg, disp, is_signed, size) \
  2775. do { \
  2776. if((size) == 2) \
  2777. { \
  2778. *(inst)++ = (unsigned char)0x66; \
  2779. } \
  2780. x86_64_rex_emit((inst), (size), (dreg), 0, (basereg)); \
  2781. *(inst)++ = (unsigned char)0x0f; \
  2782. if((is_signed)) \
  2783. { \
  2784. *(inst)++ = x86_cc_signed_map[(cond)] - 0x30; \
  2785. } \
  2786. else \
  2787. { \
  2788. *(inst)++ = x86_cc_unsigned_map[(cond)] - 0x30; \
  2789. } \
  2790. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  2791. } while (0)
  2792. #define x86_64_cmov_reg_memindex_size(inst, cond, dreg, basereg, disp, indexreg, shift, is_signed, size) \
  2793. do { \
  2794. if((size) == 2) \
  2795. { \
  2796. *(inst)++ = (unsigned char)0x66; \
  2797. } \
  2798. x86_64_rex_emit((inst), (size), (dreg), (indexreg), (basereg)); \
  2799. *(inst)++ = (unsigned char)0x0f; \
  2800. if((is_signed)) \
  2801. { \
  2802. *(inst)++ = x86_cc_signed_map[(cond)] - 0x30; \
  2803. } \
  2804. else \
  2805. { \
  2806. *(inst)++ = x86_cc_unsigned_map[(cond)] - 0x30; \
  2807. } \
  2808. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  2809. } while (0)
  2810. /*
  2811. * Stack manupulation instructions (push and pop)
  2812. */
  2813. /*
  2814. * Push instructions have a default size of 64 bit. mode.
  2815. * There is no way to encode a 32 bit push.
  2816. * So only the sizes 8 and 2 are allowed in 64 bit mode.
  2817. */
  2818. #define x86_64_push_reg_size(inst, reg, size) \
  2819. do { \
  2820. if((size) == 2) \
  2821. { \
  2822. *(inst)++ = (unsigned char)0x66; \
  2823. } \
  2824. x86_64_rex_emit64((inst), (size), 0, 0, (reg)); \
  2825. *(inst)++ = (unsigned char)0x50 + ((reg) & 0x7); \
  2826. } while(0)
  2827. #define x86_64_push_regp_size(inst, sregp, size) \
  2828. do { \
  2829. if((size) == 2) \
  2830. { \
  2831. *(inst)++ = (unsigned char)0x66; \
  2832. } \
  2833. x86_64_rex_emit64((inst), (size), 0, 0, (sregp)); \
  2834. *(inst)++ = (unsigned char)0xff; \
  2835. x86_64_regp_emit((inst), 6, (sregp)); \
  2836. } while(0)
  2837. #define x86_64_push_mem_size(inst, mem, size) \
  2838. do { \
  2839. if((size) == 2) \
  2840. { \
  2841. *(inst)++ = (unsigned char)0x66; \
  2842. } \
  2843. x86_64_rex_emit64((inst), (size), 0, 0, 0); \
  2844. *(inst)++ = (unsigned char)0xff; \
  2845. x86_64_mem_emit((inst), 6, (mem)); \
  2846. } while(0)
  2847. #define x86_64_push_membase_size(inst, basereg, disp, size) \
  2848. do { \
  2849. if((size) == 2) \
  2850. { \
  2851. *(inst)++ = (unsigned char)0x66; \
  2852. } \
  2853. x86_64_rex_emit64((inst), (size), 0, 0, (basereg)); \
  2854. *(inst)++ = (unsigned char)0xff; \
  2855. x86_64_membase_emit((inst), 6, (basereg), (disp)); \
  2856. } while(0)
  2857. #define x86_64_push_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  2858. do { \
  2859. if((size) == 2) \
  2860. { \
  2861. *(inst)++ = (unsigned char)0x66; \
  2862. } \
  2863. x86_64_rex_emit64((inst), (size), 0, (indexreg), (basereg)); \
  2864. *(inst)++ = (unsigned char)0xff; \
  2865. x86_64_memindex_emit((inst), 6, (basereg), (disp), (indexreg), (shift)); \
  2866. } while(0)
  2867. /*
  2868. * We can push only 32 bit immediate values.
  2869. * The value is sign extended to 64 bit on the stack.
  2870. */
  2871. #define x86_64_push_imm(inst, imm) \
  2872. do { \
  2873. int _imm = (int) (imm); \
  2874. if(x86_is_imm8(_imm)) \
  2875. { \
  2876. *(inst)++ = (unsigned char)0x6A; \
  2877. x86_imm_emit8 ((inst), (_imm)); \
  2878. } \
  2879. else \
  2880. { \
  2881. *(inst)++ = (unsigned char)0x68; \
  2882. x86_imm_emit32((inst), (_imm)); \
  2883. } \
  2884. } while(0)
  2885. /*
  2886. * Use this version if you need a specific width of the value
  2887. * pushed. The Value on the stack will allways be 64bit wide.
  2888. */
  2889. #define x86_64_push_imm_size(inst, imm, size) \
  2890. do { \
  2891. switch(size) \
  2892. { \
  2893. case 1: \
  2894. { \
  2895. *(inst)++ = (unsigned char)0x6A; \
  2896. x86_imm_emit8((inst), (imm)); \
  2897. } \
  2898. break; \
  2899. case 2: \
  2900. { \
  2901. *(inst)++ = (unsigned char)0x66; \
  2902. *(inst)++ = (unsigned char)0x68; \
  2903. x86_imm_emit16((inst), (imm)); \
  2904. } \
  2905. break; \
  2906. case 4: \
  2907. { \
  2908. *(inst)++ = (unsigned char)0x68; \
  2909. x86_imm_emit32((inst), (imm)); \
  2910. }\
  2911. } \
  2912. } while (0)
  2913. /*
  2914. * Pop instructions have a default size of 64 bit in 64 bit mode.
  2915. * There is no way to encode a 32 bit pop.
  2916. * So only the sizes 2 and 8 are allowed.
  2917. */
  2918. #define x86_64_pop_reg_size(inst, dreg, size) \
  2919. do { \
  2920. if((size) == 2) \
  2921. { \
  2922. *(inst)++ = (unsigned char)0x66; \
  2923. } \
  2924. x86_64_rex_emit64((inst), 0, 0, 0, (dreg)); \
  2925. *(inst)++ = (unsigned char)0x58 + ((dreg) & 0x7); \
  2926. } while(0)
  2927. #define x86_64_pop_regp_size(inst, dregp, size) \
  2928. do { \
  2929. if((size) == 2) \
  2930. { \
  2931. *(inst)++ = (unsigned char)0x66; \
  2932. } \
  2933. x86_64_rex_emit64((inst), (size), 0, 0, (dregp)); \
  2934. *(inst)++ = (unsigned char)0x8f; \
  2935. x86_64_regp_emit((inst), 0, (dregp)); \
  2936. } while(0)
  2937. #define x86_64_pop_mem_size(inst, mem, size) \
  2938. do { \
  2939. if((size) == 2) \
  2940. { \
  2941. *(inst)++ = (unsigned char)0x66; \
  2942. } \
  2943. *(inst)++ = (unsigned char)0x8f; \
  2944. x86_64_mem_emit((inst), 0, (mem)); \
  2945. } while(0)
  2946. #define x86_64_pop_membase_size(inst, basereg, disp, size) \
  2947. do { \
  2948. if((size) == 2) \
  2949. { \
  2950. *(inst)++ = (unsigned char)0x66; \
  2951. } \
  2952. x86_64_rex_emit64((inst), (size), 0, 0,(basereg)); \
  2953. *(inst)++ = (unsigned char)0x8f; \
  2954. x86_64_membase_emit((inst), 0, (basereg), (disp)); \
  2955. } while(0)
  2956. #define x86_64_pop_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  2957. do { \
  2958. if((size) == 2) \
  2959. { \
  2960. *(inst)++ = (unsigned char)0x66; \
  2961. } \
  2962. x86_64_rex_emit64((inst), (size), 0, (indexreg), (basereg)); \
  2963. *(inst)++ = (unsigned char)0x8f; \
  2964. x86_64_memindex_emit((inst), 0, (basereg), (disp), (indexreg), (shift)); \
  2965. } while(0)
  2966. /*
  2967. * control flow change instructions
  2968. */
  2969. /*
  2970. * call
  2971. */
  2972. /*
  2973. * call_imm is a relative call.
  2974. * imm has to be a 32bit offset from the instruction following the
  2975. * call instruction (absolute - (inst + 5)).
  2976. * For offsets greater that 32bit an indirect call (via register)
  2977. * has to be used.
  2978. */
  2979. #define x86_64_call_imm(inst, imm) \
  2980. do { \
  2981. x86_call_imm((inst), (imm)); \
  2982. } while(0)
  2983. #define x86_64_call_reg(inst, reg) \
  2984. do { \
  2985. x86_64_alu1_reg((inst), 0xff, 2, (reg)); \
  2986. } while(0)
  2987. #define x86_64_call_regp(inst, regp) \
  2988. do { \
  2989. x86_64_alu1_regp((inst), 0xff, 2, (regp)); \
  2990. } while(0)
  2991. /*
  2992. * call_mem is a absolute indirect call.
  2993. * To be able to use this instruction the address must be either
  2994. * in the lowest 2GB or in the highest 2GB addressrange.
  2995. * This is because mem is sign extended to 64bit.
  2996. */
  2997. #define x86_64_call_mem(inst, mem) \
  2998. do { \
  2999. x86_64_alu1_mem((inst), 0xff, 2, (mem)); \
  3000. } while(0)
  3001. #define x86_64_call_membase(inst, basereg, disp) \
  3002. do { \
  3003. x86_64_alu1_membase((inst), 0xff, 2, (basereg), (disp)); \
  3004. } while(0)
  3005. #define x86_64_call_memindex(inst, basereg, disp, indexreg, shift) \
  3006. do { \
  3007. x86_64_alu1_memindex((inst), 0xff, 2, (basereg), (disp), (indexreg), (shift)); \
  3008. } while(0)
  3009. /*
  3010. * jmp
  3011. */
  3012. /*
  3013. * unconditional relative jumps
  3014. */
  3015. #define x86_64_jmp_imm8(inst, disp) \
  3016. do { \
  3017. *(inst)++ = (unsigned char)0xEB; \
  3018. x86_imm_emit8((inst), (disp)); \
  3019. } while(0)
  3020. #define x86_64_jmp_imm(inst, disp) \
  3021. do { \
  3022. *(inst)++ = (unsigned char)0xE9; \
  3023. x86_imm_emit32((inst), (disp)); \
  3024. } while(0)
  3025. /*
  3026. * unconditional indirect jumps
  3027. */
  3028. #define x86_64_jmp_reg(inst, reg) \
  3029. do { \
  3030. x86_64_alu1_reg((inst), 0xff, 4, (reg)); \
  3031. } while(0)
  3032. #define x86_64_jmp_regp(inst, regp) \
  3033. do { \
  3034. x86_64_alu1_regp((inst), 0xff, 4, (regp)); \
  3035. } while(0)
  3036. #define x86_64_jmp_mem(inst, mem) \
  3037. do { \
  3038. x86_64_alu1_mem((inst), 0xff, 4, (mem)); \
  3039. } while(0)
  3040. #define x86_64_jmp_membase(inst, basereg, disp) \
  3041. do { \
  3042. x86_64_alu1_membase((inst), 0xff, 4, (basereg), (disp)); \
  3043. } while(0)
  3044. #define x86_64_jmp_memindex(inst, basereg, disp, indexreg, shift) \
  3045. do { \
  3046. x86_64_alu1_memindex((inst), 0xff, 4, (basereg), (disp), (indexreg), (shift)); \
  3047. } while(0)
  3048. /*
  3049. * Set the low byte in a register to 0x01 if a condition is met
  3050. * or 0x00 otherwise.
  3051. */
  3052. #define x86_64_set_reg(inst, cond, dreg, is_signed) \
  3053. do { \
  3054. x86_64_rex_emit((inst), 1, 0, 0, (dreg)); \
  3055. *(inst)++ = (unsigned char)0x0f; \
  3056. if((is_signed)) \
  3057. { \
  3058. *(inst)++ = x86_cc_signed_map[(cond)] + 0x20; \
  3059. } \
  3060. else \
  3061. { \
  3062. *(inst)++ = x86_cc_unsigned_map[(cond)] + 0x20; \
  3063. } \
  3064. x86_64_reg_emit((inst), 0, (dreg)); \
  3065. } while(0)
  3066. #define x86_64_set_mem(inst, cond, mem, is_signed) \
  3067. do { \
  3068. *(inst)++ = (unsigned char)0x0f; \
  3069. if((is_signed)) \
  3070. { \
  3071. *(inst)++ = x86_cc_signed_map[(cond)] + 0x20; \
  3072. } \
  3073. else \
  3074. { \
  3075. *(inst)++ = x86_cc_unsigned_map[(cond)] + 0x20; \
  3076. } \
  3077. x86_64_mem_emit((inst), 0, (mem)); \
  3078. } while(0)
  3079. #define x86_64_set_membase(inst, cond, basereg, disp, is_signed) \
  3080. do { \
  3081. x86_64_rex_emit((inst), 4, 0, 0, (basereg)); \
  3082. *(inst)++ = (unsigned char)0x0f; \
  3083. if((is_signed)) \
  3084. { \
  3085. *(inst)++ = x86_cc_signed_map[(cond)] + 0x20; \
  3086. } \
  3087. else \
  3088. { \
  3089. *(inst)++ = x86_cc_unsigned_map[(cond)] + 0x20; \
  3090. } \
  3091. x86_64_membase_emit((inst), 0, (basereg), (disp)); \
  3092. } while(0)
  3093. /*
  3094. * ret
  3095. */
  3096. #define x86_64_ret(inst) \
  3097. do { \
  3098. x86_ret((inst)); \
  3099. } while(0)
  3100. /*
  3101. * xchg: Exchange values
  3102. */
  3103. #define x86_64_xchg_reg_reg_size(inst, dreg, sreg, size) \
  3104. do { \
  3105. if(((size) > 1) && ((dreg) == X86_64_RAX || (sreg) == X86_64_RAX)) \
  3106. { \
  3107. if((size) == 2) \
  3108. { \
  3109. *(inst)++ = (unsigned char)0x66; \
  3110. } \
  3111. if((dreg) == X86_64_RAX) \
  3112. { \
  3113. x86_64_rex_emit((inst), (size), 0, 0, (sreg)); \
  3114. *(inst)++ = (unsigned char)(0x90 + (unsigned char)(sreg & 0x7)); \
  3115. } \
  3116. else \
  3117. { \
  3118. x86_64_rex_emit((inst), (size), 0, 0, (dreg)); \
  3119. *(inst)++ = (unsigned char)(0x90 + (unsigned char)(dreg & 0x7)); \
  3120. } \
  3121. } \
  3122. else \
  3123. { \
  3124. if((size) == 1) \
  3125. { \
  3126. x86_64_alu1_reg_reg_size((inst), 0x86, (dreg), (sreg), (size)); \
  3127. } \
  3128. else \
  3129. { \
  3130. x86_64_alu1_reg_reg_size((inst), 0x87, (dreg), (sreg), (size)); \
  3131. } \
  3132. } \
  3133. } while(0)
  3134. /*
  3135. * XMM instructions
  3136. */
  3137. /*
  3138. * xmm instructions with two opcodes
  3139. */
  3140. #define x86_64_xmm2_reg_reg(inst, opc1, opc2, r, reg) \
  3141. do { \
  3142. x86_64_rex_emit(inst, 0, (r), 0, (reg)); \
  3143. *(inst)++ = (unsigned char)(opc1); \
  3144. *(inst)++ = (unsigned char)(opc2); \
  3145. x86_64_reg_emit(inst, (r), (reg)); \
  3146. } while(0)
  3147. #define x86_64_xmm2_reg_regp(inst, opc1, opc2, r, regp) \
  3148. do { \
  3149. x86_64_rex_emit(inst, 0, (r), 0, (regp)); \
  3150. *(inst)++ = (unsigned char)(opc1); \
  3151. *(inst)++ = (unsigned char)(opc2); \
  3152. x86_64_regp_emit(inst, (r), (regp)); \
  3153. } while(0)
  3154. #define x86_64_xmm2_reg_mem(inst, opc1, opc2, r, mem) \
  3155. do { \
  3156. x86_64_rex_emit(inst, 0, (r), 0, 0); \
  3157. *(inst)++ = (unsigned char)(opc1); \
  3158. *(inst)++ = (unsigned char)(opc2); \
  3159. x86_64_mem_emit(inst, (r), (mem)); \
  3160. } while(0)
  3161. #define x86_64_xmm2_reg_membase(inst, opc1, opc2, r, basereg, disp) \
  3162. do { \
  3163. x86_64_rex_emit(inst, 0, (r), 0, (basereg)); \
  3164. *(inst)++ = (unsigned char)(opc1); \
  3165. *(inst)++ = (unsigned char)(opc2); \
  3166. x86_64_membase_emit(inst, (r), (basereg), (disp)); \
  3167. } while(0)
  3168. #define x86_64_xmm2_reg_memindex(inst, opc1, opc2, r, basereg, disp, indexreg, shift) \
  3169. do { \
  3170. x86_64_rex_emit(inst, 0, (r), (indexreg), (basereg)); \
  3171. *(inst)++ = (unsigned char)(opc1); \
  3172. *(inst)++ = (unsigned char)(opc2); \
  3173. x86_64_memindex_emit((inst), (r), (basereg), (disp), (indexreg), (shift)); \
  3174. } while(0)
  3175. /*
  3176. * xmm instructions with a prefix and two opcodes
  3177. */
  3178. #define x86_64_p1_xmm2_reg_reg_size(inst, p1, opc1, opc2, r, reg, size) \
  3179. do { \
  3180. *(inst)++ = (unsigned char)(p1); \
  3181. x86_64_rex_emit(inst, (size), (r), 0, (reg)); \
  3182. *(inst)++ = (unsigned char)(opc1); \
  3183. *(inst)++ = (unsigned char)(opc2); \
  3184. x86_64_reg_emit(inst, (r), (reg)); \
  3185. } while(0)
  3186. #define x86_64_p1_xmm2_reg_regp_size(inst, p1, opc1, opc2, r, regp, size) \
  3187. do { \
  3188. *(inst)++ = (unsigned char)(p1); \
  3189. x86_64_rex_emit(inst, (size), (r), 0, (regp)); \
  3190. *(inst)++ = (unsigned char)(opc1); \
  3191. *(inst)++ = (unsigned char)(opc2); \
  3192. x86_64_regp_emit(inst, (r), (regp)); \
  3193. } while(0)
  3194. #define x86_64_p1_xmm2_reg_mem_size(inst, p1, opc1, opc2, r, mem, size) \
  3195. do { \
  3196. *(inst)++ = (unsigned char)(p1); \
  3197. x86_64_rex_emit(inst, (size), (r), 0, 0); \
  3198. *(inst)++ = (unsigned char)(opc1); \
  3199. *(inst)++ = (unsigned char)(opc2); \
  3200. x86_64_mem_emit(inst, (r), (mem)); \
  3201. } while(0)
  3202. #define x86_64_p1_xmm2_reg_membase_size(inst, p1, opc1, opc2, r, basereg, disp, size) \
  3203. do { \
  3204. *(inst)++ = (unsigned char)(p1); \
  3205. x86_64_rex_emit(inst, (size), (r), 0, (basereg)); \
  3206. *(inst)++ = (unsigned char)(opc1); \
  3207. *(inst)++ = (unsigned char)(opc2); \
  3208. x86_64_membase_emit(inst, (r), (basereg), (disp)); \
  3209. } while(0)
  3210. #define x86_64_p1_xmm2_reg_memindex_size(inst, p1, opc1, opc2, r, basereg, disp, indexreg, shift, size) \
  3211. do { \
  3212. *(inst)++ = (unsigned char)(p1); \
  3213. x86_64_rex_emit(inst, (size), (r), (indexreg), (basereg)); \
  3214. *(inst)++ = (unsigned char)(opc1); \
  3215. *(inst)++ = (unsigned char)(opc2); \
  3216. x86_64_memindex_emit((inst), (r), (basereg), (disp), (indexreg), (shift)); \
  3217. } while(0)
  3218. /*
  3219. * xmm instructions with a prefix and three opcodes
  3220. */
  3221. #define x86_64_p1_xmm3_reg_reg_size(inst, p1, opc1, opc2, opc3, r, reg, size) \
  3222. do { \
  3223. *(inst)++ = (unsigned char)(p1); \
  3224. x86_64_rex_emit(inst, (size), (r), 0, (reg)); \
  3225. *(inst)++ = (unsigned char)(opc1); \
  3226. *(inst)++ = (unsigned char)(opc2); \
  3227. *(inst)++ = (unsigned char)(opc3); \
  3228. x86_64_reg_emit(inst, (r), (reg)); \
  3229. } while(0)
  3230. #define x86_64_p1_xmm3_reg_regp_size(inst, p1, opc1, opc2, opc3, r, regp, size) \
  3231. do { \
  3232. *(inst)++ = (unsigned char)(p1); \
  3233. x86_64_rex_emit(inst, (size), (r), 0, (regp)); \
  3234. *(inst)++ = (unsigned char)(opc1); \
  3235. *(inst)++ = (unsigned char)(opc2); \
  3236. *(inst)++ = (unsigned char)(opc3); \
  3237. x86_64_regp_emit(inst, (r), (regp)); \
  3238. } while(0)
  3239. #define x86_64_p1_xmm3_reg_mem_size(inst, p1, opc1, opc2, opc3, r, mem, size) \
  3240. do { \
  3241. *(inst)++ = (unsigned char)(p1); \
  3242. x86_64_rex_emit(inst, (size), (r), 0, 0); \
  3243. *(inst)++ = (unsigned char)(opc1); \
  3244. *(inst)++ = (unsigned char)(opc2); \
  3245. *(inst)++ = (unsigned char)(opc3); \
  3246. x86_64_mem_emit(inst, (r), (mem)); \
  3247. } while(0)
  3248. #define x86_64_p1_xmm3_reg_membase_size(inst, p1, opc1, opc2, opc3, r, basereg, disp, size) \
  3249. do { \
  3250. *(inst)++ = (unsigned char)(p1); \
  3251. x86_64_rex_emit(inst, (size), (r), 0, (basereg)); \
  3252. *(inst)++ = (unsigned char)(opc1); \
  3253. *(inst)++ = (unsigned char)(opc2); \
  3254. *(inst)++ = (unsigned char)(opc3); \
  3255. x86_64_membase_emit(inst, (r), (basereg), (disp)); \
  3256. } while(0)
  3257. #define x86_64_p1_xmm3_reg_memindex_size(inst, p1, opc1, opc2, opc3, r, basereg, disp, indexreg, shift, size) \
  3258. do { \
  3259. *(inst)++ = (unsigned char)(p1); \
  3260. x86_64_rex_emit(inst, (size), (r), (indexreg), (basereg)); \
  3261. *(inst)++ = (unsigned char)(opc1); \
  3262. *(inst)++ = (unsigned char)(opc2); \
  3263. *(inst)++ = (unsigned char)(opc3); \
  3264. x86_64_memindex_emit((inst), (r), (basereg), (disp), (indexreg), (shift)); \
  3265. } while(0)
  3266. /*
  3267. * xmm1: Macro for use of the X86_64_XMM1 enum
  3268. */
  3269. #define x86_64_xmm1_reg_reg(inst, opc, dreg, sreg, is_double) \
  3270. do { \
  3271. x86_64_p1_xmm2_reg_reg_size((inst), ((is_double) ? 0xf2 : 0xf3), 0x0f, (opc), (dreg), (sreg), 0); \
  3272. } while(0)
  3273. #define x86_64_xmm1_reg_regp(inst, opc, dreg, sregp, is_double) \
  3274. do { \
  3275. x86_64_p1_xmm2_reg_regp_size((inst), ((is_double) ? 0xf2 : 0xf3), 0x0f, (opc), (dreg), (sregp), 0); \
  3276. } while(0)
  3277. #define x86_64_xmm1_reg_mem(inst, opc, dreg, mem, is_double) \
  3278. do { \
  3279. x86_64_p1_xmm2_reg_mem_size((inst), ((is_double) ? 0xf2 : 0xf3), 0x0f, (opc), (dreg), (mem), 0); \
  3280. } while(0)
  3281. #define x86_64_xmm1_reg_membase(inst, opc, dreg, basereg, disp, is_double) \
  3282. do { \
  3283. x86_64_p1_xmm2_reg_membase_size((inst), ((is_double) ? 0xf2 : 0xf3), 0x0f, (opc), (dreg), (basereg), (disp), 0); \
  3284. } while(0)
  3285. #define x86_64_xmm1_reg_memindex(inst, opc, dreg, basereg, disp, indexreg, shift, is_double) \
  3286. do { \
  3287. x86_64_p1_xmm2_reg_memindex_size((inst), ((is_double) ? 0xf2 : 0xf3), 0x0f, (opc), (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  3288. } while(0)
  3289. /*
  3290. * Load and store MXCSR register state
  3291. */
  3292. /*
  3293. * ldmxcsr: Load MXCSR register
  3294. */
  3295. #define x86_64_ldmxcsr_regp(inst, sregp) \
  3296. do { \
  3297. x86_64_xmm2_reg_regp((inst), 0x0f, 0xae, 2, (sregp)); \
  3298. } while(0)
  3299. #define x86_64_ldmxcsr_mem(inst, mem) \
  3300. do { \
  3301. x86_64_xmm2_reg_mem((inst), 0x0f, 0xae, 2, (mem)); \
  3302. } while(0)
  3303. #define x86_64_ldmxcsr_membase(inst, basereg, disp) \
  3304. do { \
  3305. x86_64_xmm2_reg_membase((inst), 0x0f, 0xae, 2, (basereg), (disp)); \
  3306. } while(0)
  3307. #define x86_64_ldmxcsr_memindex(inst, basereg, disp, indexreg, shift) \
  3308. do { \
  3309. x86_64_xmm2_reg_memindex((inst), 0x0f, 0xae, 2, (basereg), (disp), (indexreg), (shift)); \
  3310. } while(0)
  3311. /*
  3312. * stmxcsr: Store MXCSR register
  3313. */
  3314. #define x86_64_stmxcsr_regp(inst, sregp) \
  3315. do { \
  3316. x86_64_xmm2_reg_regp((inst), 0x0f, 0xae, 3, (sregp)); \
  3317. } while(0)
  3318. #define x86_64_stmxcsr_mem(inst, mem) \
  3319. do { \
  3320. x86_64_xmm2_reg_mem((inst), 0x0f, 0xae, 3, (mem)); \
  3321. } while(0)
  3322. #define x86_64_stmxcsr_membase(inst, basereg, disp) \
  3323. do { \
  3324. x86_64_xmm2_reg_membase((inst), 0x0f, 0xae, 3, (basereg), (disp)); \
  3325. } while(0)
  3326. #define x86_64_stmxcsr_memindex(inst, basereg, disp, indexreg, shift) \
  3327. do { \
  3328. x86_64_xmm2_reg_memindex((inst), 0x0f, 0xae, 3, (basereg), (disp), (indexreg), (shift)); \
  3329. } while(0)
  3330. /*
  3331. * Move instructions
  3332. */
  3333. /*
  3334. * movd: Move doubleword from/to xmm register
  3335. */
  3336. #define x86_64_movd_xreg_reg(inst, dreg, sreg) \
  3337. do { \
  3338. x86_64_p1_xmm2_reg_reg_size((inst), 0x66, 0x0f, 0x6e, (dreg), (sreg), 4); \
  3339. } while(0)
  3340. #define x86_64_movd_xreg_mem(inst, dreg, mem) \
  3341. do { \
  3342. x86_64_p1_xmm2_reg_mem_size((inst), 0x66, 0x0f, 0x6e, (dreg), (mem), 4); \
  3343. } while(0)
  3344. #define x86_64_movd_xreg_regp(inst, dreg, sregp) \
  3345. do { \
  3346. x86_64_p1_xmm2_reg_regp_size((inst), 0x66, 0x0f, 0x6e, (dreg), (sregp), 4); \
  3347. } while(0)
  3348. #define x86_64_movd_xreg_membase(inst, dreg, basereg, disp) \
  3349. do { \
  3350. x86_64_p1_xmm2_reg_membase_size((inst), 0x66, 0x0f, 0x6e, (dreg), (basereg), (disp), 4); \
  3351. } while(0)
  3352. #define x86_64_movd_xreg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3353. do { \
  3354. x86_64_p1_xmm2_reg_memindex_size((inst), 0x66, 0x0f, 0x6e, (dreg), (basereg), (disp), (indexreg), (shift), 4); \
  3355. } while(0)
  3356. #define x86_64_movd_reg_xreg(inst, dreg, sreg) \
  3357. do { \
  3358. x86_64_p1_xmm2_reg_reg_size((inst), 0x66, 0x0f, 0x7e, (sreg), (dreg), 4); \
  3359. } while(0)
  3360. #define x86_64_movd_mem_xreg(inst, mem, sreg) \
  3361. do { \
  3362. x86_64_p1_xmm2_reg_mem_size((inst), 0x66, 0x0f, 0x7e, (sreg), (mem), 4); \
  3363. } while(0)
  3364. #define x86_64_movd_regp_xreg(inst, dregp, sreg) \
  3365. do { \
  3366. x86_64_p1_xmm2_reg_regp_size((inst), 0x66, 0x0f, 0x7e, (sreg), (dregp), 4); \
  3367. } while(0)
  3368. #define x86_64_movd_membase_xreg(inst, basereg, disp, sreg) \
  3369. do { \
  3370. x86_64_p1_xmm2_reg_membase_size((inst), 0x66, 0x0f, 0x7e, (sreg), (basereg), (disp), 4); \
  3371. } while(0)
  3372. #define x86_64_movd_memindex_xreg(inst, basereg, disp, indexreg, shift, sreg) \
  3373. do { \
  3374. x86_64_p1_xmm2_reg_memindex_size((inst), 0x66, 0x0f, 0x7e, (sreg), (basereg), (disp), (indexreg), (shift), 4); \
  3375. } while(0)
  3376. /*
  3377. * movq: Move quadword from/to xmm register
  3378. */
  3379. #define x86_64_movq_xreg_reg(inst, dreg, sreg) \
  3380. do { \
  3381. x86_64_p1_xmm2_reg_reg_size((inst), 0x66, 0x0f, 0x6e, (dreg), (sreg), 8); \
  3382. } while(0)
  3383. #define x86_64_movq_xreg_mem(inst, dreg, mem) \
  3384. do { \
  3385. x86_64_p1_xmm2_reg_mem_size((inst), 0x66, 0x0f, 0x6e, (dreg), (mem), 8); \
  3386. } while(0)
  3387. #define x86_64_movq_xreg_regp(inst, dreg, sregp) \
  3388. do { \
  3389. x86_64_p1_xmm2_reg_regp_size((inst), 0x66, 0x0f, 0x6e, (dreg), (sregp), 8); \
  3390. } while(0)
  3391. #define x86_64_movq_xreg_membase(inst, dreg, basereg, disp) \
  3392. do { \
  3393. x86_64_p1_xmm2_reg_membase_size((inst), 0x66, 0x0f, 0x6e, (dreg), (basereg), (disp), 8); \
  3394. } while(0)
  3395. #define x86_64_movq_xreg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3396. do { \
  3397. x86_64_p1_xmm2_reg_memindex_size((inst), 0x66, 0x0f, 0x6e, (dreg), (basereg), (disp), (indexreg), (shift), 8); \
  3398. } while(0)
  3399. #define x86_64_movq_reg_xreg(inst, dreg, sreg) \
  3400. do { \
  3401. x86_64_p1_xmm2_reg_reg_size((inst), 0x66, 0x0f, 0x7e, (sreg), (dreg), 8); \
  3402. } while(0)
  3403. #define x86_64_movq_mem_xreg(inst, mem, sreg) \
  3404. do { \
  3405. x86_64_p1_xmm2_reg_mem_size((inst), 0x66, 0x0f, 0x7e, (sreg), (mem), 8); \
  3406. } while(0)
  3407. #define x86_64_movq_regp_xreg(inst, dregp, sreg) \
  3408. do { \
  3409. x86_64_p1_xmm2_reg_regp_size((inst), 0x66, 0x0f, 0x7e, (sreg), (dregp), 8); \
  3410. } while(0)
  3411. #define x86_64_movq_membase_xreg(inst, basereg, disp, sreg) \
  3412. do { \
  3413. x86_64_p1_xmm2_reg_membase_size((inst), 0x66, 0x0f, 0x7e, (sreg), (basereg), (disp), 8); \
  3414. } while(0)
  3415. #define x86_64_movq_memindex_xreg(inst, basereg, disp, indexreg, shift, sreg) \
  3416. do { \
  3417. x86_64_p1_xmm2_reg_memindex_size((inst), 0x66, 0x0f, 0x7e, (sreg), (basereg), (disp), (indexreg), (shift), 8); \
  3418. } while(0)
  3419. /*
  3420. * movaps: Move aligned quadword (16 bytes)
  3421. */
  3422. #define x86_64_movaps_reg_reg(inst, dreg, sreg) \
  3423. do { \
  3424. x86_64_xmm2_reg_reg((inst), 0x0f, 0x28, (dreg), (sreg)); \
  3425. } while(0)
  3426. #define x86_64_movaps_regp_reg(inst, dregp, sreg) \
  3427. do { \
  3428. x86_64_xmm2_reg_regp((inst), 0x0f, 0x29, (sreg), (dregp)); \
  3429. } while(0)
  3430. #define x86_64_movaps_mem_reg(inst, mem, sreg) \
  3431. do { \
  3432. x86_64_xmm2_reg_mem((inst), 0x0f, 0x29, (sreg), (mem)); \
  3433. } while(0)
  3434. #define x86_64_movaps_membase_reg(inst, basereg, disp, sreg) \
  3435. do { \
  3436. x86_64_xmm2_reg_membase((inst), 0x0f, 0x29, (sreg), (basereg), (disp)); \
  3437. } while(0)
  3438. #define x86_64_movaps_memindex_reg(inst, basereg, disp, indexreg, shift, sreg) \
  3439. do { \
  3440. x86_64_xmm2_reg_memindex((inst), 0x0f, 0x29, (sreg), (basereg), (disp), (indexreg), (shift)); \
  3441. } while(0)
  3442. #define x86_64_movaps_reg_regp(inst, dreg, sregp) \
  3443. do { \
  3444. x86_64_xmm2_reg_regp((inst), 0x0f, 0x28, (dreg), (sregp)); \
  3445. } while(0)
  3446. #define x86_64_movaps_reg_mem(inst, dreg, mem) \
  3447. do { \
  3448. x86_64_xmm2_reg_mem((inst), 0x0f, 0x28, (dreg), (mem)); \
  3449. } while(0)
  3450. #define x86_64_movaps_reg_membase(inst, dreg, basereg, disp) \
  3451. do { \
  3452. x86_64_xmm2_reg_membase((inst), 0x0f, 0x28, (dreg), (basereg), (disp)); \
  3453. } while(0)
  3454. #define x86_64_movaps_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3455. do { \
  3456. x86_64_xmm2_reg_memindex((inst), 0x0f, 0x28, (dreg), (basereg), (disp), (indexreg), (shift)); \
  3457. } while(0)
  3458. /*
  3459. * movups: Move unaligned quadword (16 bytes)
  3460. */
  3461. #define x86_64_movups_reg_reg(inst, dreg, sreg) \
  3462. do { \
  3463. x86_64_xmm2_reg_reg((inst), 0x0f, 0x10, (dreg), (sreg)); \
  3464. } while(0)
  3465. #define x86_64_movups_regp_reg(inst, dregp, sreg) \
  3466. do { \
  3467. x86_64_xmm2_reg_regp((inst), 0x0f, 0x11, (sreg), (dregp)); \
  3468. } while(0)
  3469. #define x86_64_movups_mem_reg(inst, mem, sreg) \
  3470. do { \
  3471. x86_64_xmm2_reg_mem((inst), 0x0f, 0x11, (sreg), (mem)); \
  3472. } while(0)
  3473. #define x86_64_movups_membase_reg(inst, basereg, disp, sreg) \
  3474. do { \
  3475. x86_64_xmm2_reg_membase((inst), 0x0f, 0x11, (sreg), (basereg), (disp)); \
  3476. } while(0)
  3477. #define x86_64_movups_memindex_reg(inst, basereg, disp, indexreg, shift, sreg) \
  3478. do { \
  3479. x86_64_xmm2_reg_memindex((inst), 0x0f, 0x11, (sreg), (basereg), (disp), (indexreg), (shift)); \
  3480. } while(0)
  3481. #define x86_64_movups_reg_regp(inst, dreg, sregp) \
  3482. do { \
  3483. x86_64_xmm2_reg_regp((inst), 0x0f, 0x10, (dreg), (sregp)); \
  3484. } while(0)
  3485. #define x86_64_movups_reg_mem(inst, dreg, mem) \
  3486. do { \
  3487. x86_64_xmm2_reg_mem((inst), 0x0f, 0x10, (dreg), (mem)); \
  3488. } while(0)
  3489. #define x86_64_movups_reg_membase(inst, dreg, basereg, disp) \
  3490. do { \
  3491. x86_64_xmm2_reg_membase((inst), 0x0f, 0x10, (dreg), (basereg), (disp)); \
  3492. } while(0)
  3493. #define x86_64_movups_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3494. do { \
  3495. x86_64_xmm2_reg_memindex((inst), 0x0f, 0x10, (dreg), (basereg), (disp), (indexreg), (shift)); \
  3496. } while(0)
  3497. /*
  3498. * movsd: Move scalar double (64bit float)
  3499. */
  3500. #define x86_64_movsd_reg_reg(inst, dreg, sreg) \
  3501. do { \
  3502. x86_64_p1_xmm2_reg_reg_size((inst), 0xf2, 0x0f, 0x10, (dreg), (sreg), 0); \
  3503. } while(0)
  3504. #define x86_64_movsd_regp_reg(inst, dregp, sreg) \
  3505. do { \
  3506. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x11, (sreg), (dregp), 0); \
  3507. } while(0)
  3508. #define x86_64_movsd_mem_reg(inst, mem, sreg) \
  3509. do { \
  3510. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x11, (sreg), (mem), 0); \
  3511. } while(0)
  3512. #define x86_64_movsd_membase_reg(inst, basereg, disp, sreg) \
  3513. do { \
  3514. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x11, (sreg), (basereg), (disp), 0); \
  3515. } while(0)
  3516. #define x86_64_movsd_memindex_reg(inst, basereg, disp, indexreg, shift, sreg) \
  3517. do { \
  3518. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2, 0x0f, 0x11, (sreg), (basereg), (disp), (indexreg), (shift), 0); \
  3519. } while(0)
  3520. #define x86_64_movsd_reg_regp(inst, dreg, sregp) \
  3521. do { \
  3522. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x10, (dreg), (sregp), 0); \
  3523. } while(0)
  3524. #define x86_64_movsd_reg_mem(inst, dreg, mem) \
  3525. do { \
  3526. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x10, (dreg), (mem), 0); \
  3527. } while(0)
  3528. #define x86_64_movsd_reg_membase(inst, dreg, basereg, disp) \
  3529. do { \
  3530. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x10, (dreg), (basereg), (disp), 0); \
  3531. } while(0)
  3532. #define x86_64_movsd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3533. do { \
  3534. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2, 0x0f, 0x10, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  3535. } while(0)
  3536. /*
  3537. * movss: Move scalar single (32bit float)
  3538. */
  3539. #define x86_64_movss_reg_reg(inst, dreg, sreg) \
  3540. do { \
  3541. x86_64_p1_xmm2_reg_reg_size((inst), 0xf3, 0x0f, 0x10, (dreg), (sreg), 0); \
  3542. } while(0)
  3543. #define x86_64_movss_regp_reg(inst, dregp, sreg) \
  3544. do { \
  3545. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x11, (sreg), (dregp), 0); \
  3546. } while(0)
  3547. #define x86_64_movss_mem_reg(inst, mem, sreg) \
  3548. do { \
  3549. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x11, (sreg), (mem), 0); \
  3550. } while(0)
  3551. #define x86_64_movss_membase_reg(inst, basereg, disp, sreg) \
  3552. do { \
  3553. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x11, (sreg), (basereg), (disp), 0); \
  3554. } while(0)
  3555. #define x86_64_movss_memindex_reg(inst, basereg, disp, indexreg, shift, sreg) \
  3556. do { \
  3557. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x11, (sreg), (basereg), (disp), (indexreg), (shift), 0); \
  3558. } while(0)
  3559. #define x86_64_movss_reg_regp(inst, dreg, sregp) \
  3560. do { \
  3561. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x10, (dreg), (sregp), 0); \
  3562. } while(0)
  3563. #define x86_64_movss_reg_mem(inst, dreg, mem) \
  3564. do { \
  3565. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x10, (dreg), (mem), 0); \
  3566. } while(0)
  3567. #define x86_64_movss_reg_membase(inst, dreg, basereg, disp) \
  3568. do { \
  3569. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x10, (dreg), (basereg), (disp), 0); \
  3570. } while(0)
  3571. #define x86_64_movss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3572. do { \
  3573. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x10, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  3574. } while(0)
  3575. /*
  3576. * Conversion opcodes
  3577. */
  3578. /*
  3579. * cvtsi2ss: Convert signed integer to float32
  3580. * The size is the size of the integer value (4 or 8)
  3581. */
  3582. #define x86_64_cvtsi2ss_reg_reg_size(inst, dxreg, sreg, size) \
  3583. do { \
  3584. x86_64_p1_xmm2_reg_reg_size((inst), 0xf3, 0x0f, 0x2a, (dxreg), (sreg), (size)); \
  3585. } while(0)
  3586. #define x86_64_cvtsi2ss_reg_regp_size(inst, dxreg, sregp, size) \
  3587. do { \
  3588. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x2a, (dxreg), (sregp), (size)); \
  3589. } while(0)
  3590. #define x86_64_cvtsi2ss_reg_mem_size(inst, dxreg, mem, size) \
  3591. do { \
  3592. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x2a, (dxreg), (mem), (size)); \
  3593. } while(0)
  3594. #define x86_64_cvtsi2ss_reg_membase_size(inst, dreg, basereg, disp, size) \
  3595. do { \
  3596. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x2a, (dreg), (basereg), (disp), (size)); \
  3597. } while(0)
  3598. #define x86_64_cvtsi2ss_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  3599. do { \
  3600. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x2a, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  3601. } while(0)
  3602. /*
  3603. * cvtsi2sd: Convert signed integer to float64
  3604. * The size is the size of the integer value (4 or 8)
  3605. */
  3606. #define x86_64_cvtsi2sd_reg_reg_size(inst, dxreg, sreg, size) \
  3607. do { \
  3608. x86_64_p1_xmm2_reg_reg_size((inst), 0xf2, 0x0f, 0x2a, (dxreg), (sreg), (size)); \
  3609. } while(0)
  3610. #define x86_64_cvtsi2sd_reg_regp_size(inst, dxreg, sregp, size) \
  3611. do { \
  3612. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x2a, (dxreg), (sregp), (size)); \
  3613. } while(0)
  3614. #define x86_64_cvtsi2sd_reg_mem_size(inst, dxreg, mem, size) \
  3615. do { \
  3616. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x2a, (dxreg), (mem), (size)); \
  3617. } while(0)
  3618. #define x86_64_cvtsi2sd_reg_membase_size(inst, dreg, basereg, disp, size) \
  3619. do { \
  3620. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x2a, (dreg), (basereg), (disp), (size)); \
  3621. } while(0)
  3622. #define x86_64_cvtsi2sd_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  3623. do { \
  3624. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2, 0x0f, 0x2a, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  3625. } while(0)
  3626. /*
  3627. * cvtss2si: Convert float32.to a signed integer using the rounding mode
  3628. * in the mxcsr register
  3629. * The size is the size of the integer value (4 or 8)
  3630. */
  3631. #define x86_64_cvtss2si_reg_reg_size(inst, dreg, sxreg, size) \
  3632. do { \
  3633. x86_64_p1_xmm2_reg_reg_size((inst), 0xf3, 0x0f, 0x2d, (dreg), (sxreg), (size)); \
  3634. } while(0)
  3635. #define x86_64_cvtss2si_reg_regp_size(inst, dreg, sregp, size) \
  3636. do { \
  3637. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x2d, (dreg), (sregp), (size)); \
  3638. } while(0)
  3639. #define x86_64_cvtss2si_reg_mem_size(inst, dreg, mem, size) \
  3640. do { \
  3641. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x2d, (dreg), (mem), (size)); \
  3642. } while(0)
  3643. #define x86_64_cvtss2si_reg_membase_size(inst, dreg, basereg, disp, size) \
  3644. do { \
  3645. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x2d, (dreg), (basereg), (disp), (size)); \
  3646. } while(0)
  3647. #define x86_64_cvtss2si_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  3648. do { \
  3649. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x2d, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  3650. } while(0)
  3651. /*
  3652. * cvtsd2si: Convert float64 to a signed integer using the rounding mode
  3653. * in the mxcsr register
  3654. * The size is the size of the integer value (4 or 8)
  3655. */
  3656. #define x86_64_cvtsd2si_reg_reg_size(inst, dreg, sxreg, size) \
  3657. do { \
  3658. x86_64_p1_xmm2_reg_reg_size((inst), 0xf2, 0x0f, 0x2d, (dreg), (sxreg), (size)); \
  3659. } while(0)
  3660. #define x86_64_cvtsd2si_reg_regp_size(inst, dreg, sregp, size) \
  3661. do { \
  3662. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x2d, (dreg), (sregp), (size)); \
  3663. } while(0)
  3664. #define x86_64_cvtsd2si_reg_mem_size(inst, dreg, mem, size) \
  3665. do { \
  3666. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x2d, (dreg), (mem), (size)); \
  3667. } while(0)
  3668. #define x86_64_cvtsd2si_reg_membase_size(inst, dreg, basereg, disp, size) \
  3669. do { \
  3670. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x2d, (dreg), (basereg), (disp), (size)); \
  3671. } while(0)
  3672. #define x86_64_cvtsd2si_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  3673. do { \
  3674. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2, 0x0f, 0x2d, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  3675. } while(0)
  3676. /*
  3677. * cvtss2sd: Convert float32 to float64
  3678. */
  3679. #define x86_64_cvtss2sd_reg_reg(inst, dreg, sreg) \
  3680. do { \
  3681. x86_64_p1_xmm2_reg_reg_size((inst), 0xf3, 0x0f, 0x5a, (dreg), (sreg), 0); \
  3682. } while(0)
  3683. #define x86_64_cvtss2sd_reg_regp(inst, dxreg, sregp) \
  3684. do { \
  3685. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x5a, (dxreg), (sregp), 0); \
  3686. } while(0)
  3687. #define x86_64_cvtss2sd_reg_mem(inst, dxreg, mem) \
  3688. do { \
  3689. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x5a, (dxreg), (mem), 0); \
  3690. } while(0)
  3691. #define x86_64_cvtss2sd_reg_membase(inst, dreg, basereg, disp) \
  3692. do { \
  3693. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x5a, (dreg), (basereg), (disp), 0); \
  3694. } while(0)
  3695. #define x86_64_cvtss2sd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3696. do { \
  3697. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x5a, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  3698. } while(0)
  3699. /*
  3700. * cvtsd2ss: Convert float64 to float32
  3701. */
  3702. #define x86_64_cvtsd2ss_reg_reg(inst, dreg, sreg) \
  3703. do { \
  3704. x86_64_p1_xmm2_reg_reg_size((inst), 0xf2, 0x0f, 0x5a, (dreg), (sreg), 0); \
  3705. } while(0)
  3706. #define x86_64_cvtsd2ss_reg_regp(inst, dxreg, sregp) \
  3707. do { \
  3708. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x5a, (dxreg), (sregp), 0); \
  3709. } while(0)
  3710. #define x86_64_cvtsd2ss_reg_mem(inst, dxreg, mem) \
  3711. do { \
  3712. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x5a, (dxreg), (mem), 0); \
  3713. } while(0)
  3714. #define x86_64_cvtsd2ss_reg_membase(inst, dreg, basereg, disp) \
  3715. do { \
  3716. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x5a, (dreg), (basereg), (disp), 0); \
  3717. } while(0)
  3718. #define x86_64_cvtsd2ss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3719. do { \
  3720. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2, 0x0f, 0x5a, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  3721. } while(0)
  3722. /*
  3723. * Compare opcodes
  3724. */
  3725. /*
  3726. * comiss: Compare ordered scalar single precision values
  3727. */
  3728. #define x86_64_comiss_reg_reg(inst, dreg, sreg) \
  3729. do { \
  3730. x86_64_xmm2_reg_reg((inst), 0x0f, 0x2f, (dreg), (sreg)); \
  3731. } while(0)
  3732. #define x86_64_comiss_reg_regp(inst, dreg, sregp) \
  3733. do { \
  3734. x86_64_xmm2_reg_regp((inst), 0x0f, 0x2f, (dreg), (sregp)); \
  3735. } while(0)
  3736. #define x86_64_comiss_reg_mem(inst, dreg, mem) \
  3737. do { \
  3738. x86_64_xmm2_reg_mem((inst), 0x0f, 0x2f, (dreg), (mem)); \
  3739. } while(0)
  3740. #define x86_64_comiss_reg_membase(inst, dreg, basereg, disp) \
  3741. do { \
  3742. x86_64_xmm2_reg_membase((inst), 0x0f, 0x2f, (dreg), (basereg), (disp)); \
  3743. } while(0)
  3744. #define x86_64_comiss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3745. do { \
  3746. x86_64_xmm2_reg_memindex((inst), 0x0f, 0x2f, (dreg), (basereg), (disp), (indexreg), (shift)); \
  3747. } while(0)
  3748. /*
  3749. * comisd: Compare ordered scalar double precision values
  3750. */
  3751. #define x86_64_comisd_reg_reg(inst, dreg, sreg) \
  3752. do { \
  3753. x86_64_p1_xmm2_reg_reg_size((inst), 0x66, 0x0f, 0x2f, (dreg), (sreg), 0); \
  3754. } while(0)
  3755. #define x86_64_comisd_reg_regp(inst, dreg, sregp) \
  3756. do { \
  3757. x86_64_p1_xmm2_reg_regp_size((inst), 0x66, 0x0f, 0x2f, (dreg), (sregp), 0); \
  3758. } while(0)
  3759. #define x86_64_comisd_reg_mem(inst, dreg, mem) \
  3760. do { \
  3761. x86_64_p1_xmm2_reg_mem_size((inst), 0x66, 0x0f, 0x2f, (dreg), (mem), 0); \
  3762. } while(0)
  3763. #define x86_64_comisd_reg_membase(inst, dreg, basereg, disp) \
  3764. do { \
  3765. x86_64_p1_xmm2_reg_membase_size((inst), 0x66, 0x0f, 0x2f, (dreg), (basereg), (disp), 0); \
  3766. } while(0)
  3767. #define x86_64_comisd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3768. do { \
  3769. x86_64_p1_xmm2_reg_memindex_size((inst), 0x66, 0x0f, 0x2f, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  3770. } while(0)
  3771. /*
  3772. * ucomiss: Compare unordered scalar single precision values
  3773. */
  3774. #define x86_64_ucomiss_reg_reg(inst, dreg, sreg) \
  3775. do { \
  3776. x86_64_xmm2_reg_reg((inst), 0x0f, 0x2e, (dreg), (sreg)); \
  3777. } while(0)
  3778. #define x86_64_ucomiss_reg_regp(inst, dreg, sregp) \
  3779. do { \
  3780. x86_64_xmm2_reg_regp((inst), 0x0f, 0x2e, (dreg), (sregp)); \
  3781. } while(0)
  3782. #define x86_64_ucomiss_reg_mem(inst, dreg, mem) \
  3783. do { \
  3784. x86_64_xmm2_reg_mem((inst), 0x0f, 0x2e, (dreg), (mem)); \
  3785. } while(0)
  3786. #define x86_64_ucomiss_reg_membase(inst, dreg, basereg, disp) \
  3787. do { \
  3788. x86_64_xmm2_reg_membase((inst), 0x0f, 0x2e, (dreg), (basereg), (disp)); \
  3789. } while(0)
  3790. #define x86_64_ucomiss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3791. do { \
  3792. x86_64_xmm2_reg_memindex((inst), 0x0f, 0x2e, (dreg), (basereg), (disp), (indexreg), (shift)); \
  3793. } while(0)
  3794. /*
  3795. * ucomisd: Compare unordered scalar double precision values
  3796. */
  3797. #define x86_64_ucomisd_reg_reg(inst, dreg, sreg) \
  3798. do { \
  3799. x86_64_p1_xmm2_reg_reg_size((inst), 0x66, 0x0f, 0x2e, (dreg), (sreg), 0); \
  3800. } while(0)
  3801. #define x86_64_ucomisd_reg_regp(inst, dreg, sregp) \
  3802. do { \
  3803. x86_64_p1_xmm2_reg_regp_size((inst), 0x66, 0x0f, 0x2e, (dreg), (sregp), 0); \
  3804. } while(0)
  3805. #define x86_64_ucomisd_reg_mem(inst, dreg, mem) \
  3806. do { \
  3807. x86_64_p1_xmm2_reg_mem_size((inst), 0x66, 0x0f, 0x2e, (dreg), (mem), 0); \
  3808. } while(0)
  3809. #define x86_64_ucomisd_reg_membase(inst, dreg, basereg, disp) \
  3810. do { \
  3811. x86_64_p1_xmm2_reg_membase_size((inst), 0x66, 0x0f, 0x2e, (dreg), (basereg), (disp), 0); \
  3812. } while(0)
  3813. #define x86_64_ucomisd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3814. do { \
  3815. x86_64_p1_xmm2_reg_memindex_size((inst), 0x66, 0x0f, 0x2e, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  3816. } while(0)
  3817. /*
  3818. * Arithmetic opcodes
  3819. */
  3820. /*
  3821. * addss: Add scalar single precision float values
  3822. */
  3823. #define x86_64_addss_reg_reg(inst, dreg, sreg) \
  3824. do { \
  3825. x86_64_p1_xmm2_reg_reg_size((inst), 0xf3, 0x0f, 0x58, (dreg), (sreg), 0); \
  3826. } while(0)
  3827. #define x86_64_addss_reg_regp(inst, dreg, sregp) \
  3828. do { \
  3829. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x58, (dreg), (sregp), 0); \
  3830. } while(0)
  3831. #define x86_64_addss_reg_mem(inst, dreg, mem) \
  3832. do { \
  3833. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x58, (dreg), (mem), 0); \
  3834. } while(0)
  3835. #define x86_64_addss_reg_membase(inst, dreg, basereg, disp) \
  3836. do { \
  3837. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x58, (dreg), (basereg), (disp), 0); \
  3838. } while(0)
  3839. #define x86_64_addss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3840. do { \
  3841. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x58, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  3842. } while(0)
  3843. /*
  3844. * subss: Substract scalar single precision float values
  3845. */
  3846. #define x86_64_subss_reg_reg(inst, dreg, sreg) \
  3847. do { \
  3848. x86_64_p1_xmm2_reg_reg_size((inst), 0xf3, 0x0f, 0x5c, (dreg), (sreg), 0); \
  3849. } while(0)
  3850. #define x86_64_subss_reg_regp(inst, dreg, sregp) \
  3851. do { \
  3852. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x5c, (dreg), (sregp), 0); \
  3853. } while(0)
  3854. #define x86_64_subss_reg_mem(inst, dreg, mem) \
  3855. do { \
  3856. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x5c, (dreg), (mem), 0); \
  3857. } while(0)
  3858. #define x86_64_subss_reg_membase(inst, dreg, basereg, disp) \
  3859. do { \
  3860. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x5c, (dreg), (basereg), (disp), 0); \
  3861. } while(0)
  3862. #define x86_64_subss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3863. do { \
  3864. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x5c, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  3865. } while(0)
  3866. /*
  3867. * mulss: Multiply scalar single precision float values
  3868. */
  3869. #define x86_64_mulss_reg_reg(inst, dreg, sreg) \
  3870. do { \
  3871. x86_64_p1_xmm2_reg_reg_size((inst), 0xf3, 0x0f, 0x59, (dreg), (sreg), 0); \
  3872. } while(0)
  3873. #define x86_64_mulss_reg_regp(inst, dreg, sregp) \
  3874. do { \
  3875. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x59, (dreg), (sregp), 0); \
  3876. } while(0)
  3877. #define x86_64_mulss_reg_mem(inst, dreg, mem) \
  3878. do { \
  3879. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x59, (dreg), (mem), 0); \
  3880. } while(0)
  3881. #define x86_64_mulss_reg_membase(inst, dreg, basereg, disp) \
  3882. do { \
  3883. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x59, (dreg), (basereg), (disp), 0); \
  3884. } while(0)
  3885. #define x86_64_mulss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3886. do { \
  3887. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x59, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  3888. } while(0)
  3889. /*
  3890. * divss: Divide scalar single precision float values
  3891. */
  3892. #define x86_64_divss_reg_reg(inst, dreg, sreg) \
  3893. do { \
  3894. x86_64_p1_xmm2_reg_reg_size((inst), 0xf3, 0x0f, 0x5e, (dreg), (sreg), 0); \
  3895. } while(0)
  3896. #define x86_64_divss_reg_regp(inst, dreg, sregp) \
  3897. do { \
  3898. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x5e, (dreg), (sregp), 0); \
  3899. } while(0)
  3900. #define x86_64_divss_reg_mem(inst, dreg, mem) \
  3901. do { \
  3902. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x5e, (dreg), (mem), 0); \
  3903. } while(0)
  3904. #define x86_64_divss_reg_membase(inst, dreg, basereg, disp) \
  3905. do { \
  3906. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x5e, (dreg), (basereg), (disp), 0); \
  3907. } while(0)
  3908. #define x86_64_divss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3909. do { \
  3910. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x5e, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  3911. } while(0)
  3912. /*
  3913. * Macros for the logical operations with packed single precision values.
  3914. */
  3915. #define x86_64_plops_reg_reg(inst, op, dreg, sreg) \
  3916. do { \
  3917. x86_64_xmm2_reg_reg((inst), 0x0f, (op), (dreg), (sreg)); \
  3918. } while(0)
  3919. #define x86_64_plops_reg_regp(inst, op, dreg, sregp) \
  3920. do { \
  3921. x86_64_xmm2_reg_regp((inst), 0x0f, (op), (dreg), (sregp)); \
  3922. } while(0)
  3923. #define x86_64_plops_reg_mem(inst, op, dreg, mem) \
  3924. do { \
  3925. x86_64_xmm2_reg_mem((inst), 0x0f, (op), (dreg), (mem)); \
  3926. } while(0)
  3927. #define x86_64_plops_reg_membase(inst, op, dreg, basereg, disp) \
  3928. do { \
  3929. x86_64_xmm2_reg_membase((inst), 0x0f, (op), (dreg), (basereg), (disp)); \
  3930. } while(0)
  3931. #define x86_64_plops_reg_memindex(inst, op, dreg, basereg, disp, indexreg, shift) \
  3932. do { \
  3933. x86_64_xmm2_reg_memindex((inst), 0x0f, (op), (dreg), (basereg), (disp), (indexreg), (shift)); \
  3934. } while(0)
  3935. /*
  3936. * andps: And
  3937. */
  3938. #define x86_64_andps_reg_reg(inst, dreg, sreg) \
  3939. do { \
  3940. x86_64_xmm2_reg_reg((inst), 0x0f, 0x54, (dreg), (sreg)); \
  3941. } while(0)
  3942. #define x86_64_andps_reg_regp(inst, dreg, sregp) \
  3943. do { \
  3944. x86_64_xmm2_reg_regp((inst), 0x0f, 0x54, (dreg), (sregp)); \
  3945. } while(0)
  3946. #define x86_64_andps_reg_mem(inst, dreg, mem) \
  3947. do { \
  3948. x86_64_xmm2_reg_mem((inst), 0x0f, 0x54, (dreg), (mem)); \
  3949. } while(0)
  3950. #define x86_64_andps_reg_membase(inst, dreg, basereg, disp) \
  3951. do { \
  3952. x86_64_xmm2_reg_membase((inst), 0x0f, 0x54, (dreg), (basereg), (disp)); \
  3953. } while(0)
  3954. #define x86_64_andps_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3955. do { \
  3956. x86_64_xmm2_reg_memindex((inst), 0x0f, 0x54, (dreg), (basereg), (disp), (indexreg), (shift)); \
  3957. } while(0)
  3958. /*
  3959. * orps: Or
  3960. */
  3961. #define x86_64_orps_reg_reg(inst, dreg, sreg) \
  3962. do { \
  3963. x86_64_xmm2_reg_reg((inst), 0x0f, 0x56, (dreg), (sreg)); \
  3964. } while(0)
  3965. #define x86_64_orps_reg_regp(inst, dreg, sregp) \
  3966. do { \
  3967. x86_64_xmm2_reg_regp((inst), 0x0f, 0x56, (dreg), (sregp)); \
  3968. } while(0)
  3969. #define x86_64_orps_reg_mem(inst, dreg, mem) \
  3970. do { \
  3971. x86_64_xmm2_reg_mem((inst), 0x0f, 0x56, (dreg), (mem)); \
  3972. } while(0)
  3973. #define x86_64_orps_reg_membase(inst, dreg, basereg, disp) \
  3974. do { \
  3975. x86_64_xmm2_reg_membase((inst), 0x0f, 0x56, (dreg), (basereg), (disp)); \
  3976. } while(0)
  3977. #define x86_64_orps_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  3978. do { \
  3979. x86_64_xmm2_reg_memindex((inst), 0x0f, 0x56, (dreg), (basereg), (disp), (indexreg), (shift)); \
  3980. } while(0)
  3981. /*
  3982. * xorps: Xor
  3983. */
  3984. #define x86_64_xorps_reg_reg(inst, dreg, sreg) \
  3985. do { \
  3986. x86_64_xmm2_reg_reg((inst), 0x0f, 0x57, (dreg), (sreg)); \
  3987. } while(0)
  3988. #define x86_64_xorps_reg_regp(inst, dreg, sregp) \
  3989. do { \
  3990. x86_64_xmm2_reg_regp((inst), 0x0f, 0x57, (dreg), (sregp)); \
  3991. } while(0)
  3992. #define x86_64_xorps_reg_mem(inst, dreg, mem) \
  3993. do { \
  3994. x86_64_xmm2_reg_mem((inst), 0x0f, 0x57, (dreg), (mem)); \
  3995. } while(0)
  3996. #define x86_64_xorps_reg_membase(inst, dreg, basereg, disp) \
  3997. do { \
  3998. x86_64_xmm2_reg_membase((inst), 0x0f, 0x57, (dreg), (basereg), (disp)); \
  3999. } while(0)
  4000. #define x86_64_xorps_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4001. do { \
  4002. x86_64_xmm2_reg_memindex((inst), 0x0f, 0x57, (dreg), (basereg), (disp), (indexreg), (shift)); \
  4003. } while(0)
  4004. /*
  4005. * maxss: Maximum value
  4006. */
  4007. #define x86_64_maxss_reg_reg(inst, dreg, sreg) \
  4008. do { \
  4009. x86_64_p1_xmm2_reg_reg_size((inst), 0xf3, 0x0f, 0x5f, (dreg), (sreg), 0); \
  4010. } while(0)
  4011. #define x86_64_maxss_reg_regp(inst, dreg, sregp) \
  4012. do { \
  4013. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x5f, (dreg), (sregp), 0); \
  4014. } while(0)
  4015. #define x86_64_maxss_reg_mem(inst, dreg, mem) \
  4016. do { \
  4017. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x5f, (dreg), (mem), 0); \
  4018. } while(0)
  4019. #define x86_64_maxss_reg_membase(inst, dreg, basereg, disp) \
  4020. do { \
  4021. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x5f, (dreg), (basereg), (disp), 0); \
  4022. } while(0)
  4023. #define x86_64_maxss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4024. do { \
  4025. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x5f, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4026. } while(0)
  4027. /*
  4028. * minss: Minimum value
  4029. */
  4030. #define x86_64_minss_reg_reg(inst, dreg, sreg) \
  4031. do { \
  4032. x86_64_p1_xmm2_reg_reg_size((inst), 0xf3, 0x0f, 0x5d, (dreg), (sreg), 0); \
  4033. } while(0)
  4034. #define x86_64_minss_reg_regp(inst, dreg, sregp) \
  4035. do { \
  4036. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x5d, (dreg), (sregp), 0); \
  4037. } while(0)
  4038. #define x86_64_minss_reg_mem(inst, dreg, mem) \
  4039. do { \
  4040. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x5d, (dreg), (mem), 0); \
  4041. } while(0)
  4042. #define x86_64_minss_reg_membase(inst, dreg, basereg, disp) \
  4043. do { \
  4044. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x5d, (dreg), (basereg), (disp), 0); \
  4045. } while(0)
  4046. #define x86_64_minss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4047. do { \
  4048. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x5d, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4049. } while(0)
  4050. /*
  4051. * sqrtss: Square root
  4052. */
  4053. #define x86_64_sqrtss_reg_reg(inst, dreg, sreg) \
  4054. do { \
  4055. x86_64_p1_xmm2_reg_reg_size((inst), 0xf3, 0x0f, 0x51, (dreg), (sreg), 0); \
  4056. } while(0)
  4057. #define x86_64_sqrtss_reg_regp(inst, dreg, sregp) \
  4058. do { \
  4059. x86_64_p1_xmm2_reg_regp_size((inst), 0xf3, 0x0f, 0x51, (dreg), (sregp), 0); \
  4060. } while(0)
  4061. #define x86_64_sqrtss_reg_mem(inst, dreg, mem) \
  4062. do { \
  4063. x86_64_p1_xmm2_reg_mem_size((inst), 0xf3, 0x0f, 0x51, (dreg), (mem), 0); \
  4064. } while(0)
  4065. #define x86_64_sqrtss_reg_membase(inst, dreg, basereg, disp) \
  4066. do { \
  4067. x86_64_p1_xmm2_reg_membase_size((inst), 0xf3, 0x0f, 0x51, (dreg), (basereg), (disp), 0); \
  4068. } while(0)
  4069. #define x86_64_sqrtss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4070. do { \
  4071. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf3, 0x0f, 0x51, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4072. } while(0)
  4073. /*
  4074. * Macros for the logical operations with packed double precision values.
  4075. */
  4076. #define x86_64_plopd_reg_reg(inst, op, dreg, sreg) \
  4077. do { \
  4078. x86_64_p1_xmm2_reg_reg_size((inst), 0x66, 0x0f, (op), (dreg), (sreg), 0); \
  4079. } while(0)
  4080. #define x86_64_plopd_reg_regp(inst, op, dreg, sregp) \
  4081. do { \
  4082. x86_64_p1_xmm2_reg_regp_size((inst), 0x66, 0x0f, (op), (dreg), (sregp), 0); \
  4083. } while(0)
  4084. #define x86_64_plopd_reg_mem(inst, op, dreg, mem) \
  4085. do { \
  4086. x86_64_p1_xmm2_reg_mem_size((inst), 0x66, 0x0f, (op), (dreg), (mem), 0); \
  4087. } while(0)
  4088. #define x86_64_plopd_reg_membase(inst, op, dreg, basereg, disp) \
  4089. do { \
  4090. x86_64_p1_xmm2_reg_membase_size((inst), 0x66, 0x0f, (op), (dreg), (basereg), (disp), 0); \
  4091. } while(0)
  4092. #define x86_64_plopd_reg_memindex(inst, op, dreg, basereg, disp, indexreg, shift) \
  4093. do { \
  4094. x86_64_xmm2_reg_memindex_size((inst), 0x66, 0x0f, (op), (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4095. } while(0)
  4096. /*
  4097. * addsd: Add scalar double precision float values
  4098. */
  4099. #define x86_64_addsd_reg_reg(inst, dreg, sreg) \
  4100. do { \
  4101. x86_64_p1_xmm2_reg_reg_size((inst), 0xf2, 0x0f, 0x58, (dreg), (sreg), 0); \
  4102. } while(0)
  4103. #define x86_64_addsd_reg_regp(inst, dreg, sregp) \
  4104. do { \
  4105. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x58, (dreg), (sregp), 0); \
  4106. } while(0)
  4107. #define x86_64_addsd_reg_mem(inst, dreg, mem) \
  4108. do { \
  4109. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x58, (dreg), (mem), 0); \
  4110. } while(0)
  4111. #define x86_64_addsd_reg_membase(inst, dreg, basereg, disp) \
  4112. do { \
  4113. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x58, (dreg), (basereg), (disp), 0); \
  4114. } while(0)
  4115. #define x86_64_addsd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4116. do { \
  4117. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2, 0x0f, 0x58, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4118. } while(0)
  4119. /*
  4120. * subsd: Substract scalar double precision float values
  4121. */
  4122. #define x86_64_subsd_reg_reg(inst, dreg, sreg) \
  4123. do { \
  4124. x86_64_p1_xmm2_reg_reg_size((inst), 0xf2, 0x0f, 0x5c, (dreg), (sreg), 0); \
  4125. } while(0)
  4126. #define x86_64_subsd_reg_regp(inst, dreg, sregp) \
  4127. do { \
  4128. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x5c, (dreg), (sregp), 0); \
  4129. } while(0)
  4130. #define x86_64_subsd_reg_mem(inst, dreg, mem) \
  4131. do { \
  4132. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x5c, (dreg), (mem), 0); \
  4133. } while(0)
  4134. #define x86_64_subsd_reg_membase(inst, dreg, basereg, disp) \
  4135. do { \
  4136. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x5c, (dreg), (basereg), (disp), 0); \
  4137. } while(0)
  4138. #define x86_64_subsd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4139. do { \
  4140. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2, 0x0f, 0x5c, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4141. } while(0)
  4142. /*
  4143. * mulsd: Multiply scalar double precision float values
  4144. */
  4145. #define x86_64_mulsd_reg_reg(inst, dreg, sreg) \
  4146. do { \
  4147. x86_64_p1_xmm2_reg_reg_size((inst), 0xf2, 0x0f, 0x59, (dreg), (sreg), 0); \
  4148. } while(0)
  4149. #define x86_64_mulsd_reg_regp(inst, dreg, sregp) \
  4150. do { \
  4151. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x59, (dreg), (sregp), 0); \
  4152. } while(0)
  4153. #define x86_64_mulsd_reg_mem(inst, dreg, mem) \
  4154. do { \
  4155. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x59, (dreg), (mem), 0); \
  4156. } while(0)
  4157. #define x86_64_mulsd_reg_membase(inst, dreg, basereg, disp) \
  4158. do { \
  4159. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x59, (dreg), (basereg), (disp), 0); \
  4160. } while(0)
  4161. #define x86_64_mulsd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4162. do { \
  4163. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2, 0x0f, 0x59, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4164. } while(0)
  4165. /*
  4166. * divsd: Divide scalar double precision float values
  4167. */
  4168. #define x86_64_divsd_reg_reg(inst, dreg, sreg) \
  4169. do { \
  4170. x86_64_p1_xmm2_reg_reg_size((inst), 0xf2, 0x0f, 0x5e, (dreg), (sreg), 0); \
  4171. } while(0)
  4172. #define x86_64_divsd_reg_regp(inst, dreg, sregp) \
  4173. do { \
  4174. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x5e, (dreg), (sregp), 0); \
  4175. } while(0)
  4176. #define x86_64_divsd_reg_mem(inst, dreg, mem) \
  4177. do { \
  4178. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x5e, (dreg), (mem), 0); \
  4179. } while(0)
  4180. #define x86_64_divsd_reg_membase(inst, dreg, basereg, disp) \
  4181. do { \
  4182. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x5e, (dreg), (basereg), (disp), 0); \
  4183. } while(0)
  4184. #define x86_64_divsd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4185. do { \
  4186. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2, 0x0f, 0x5e, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4187. } while(0)
  4188. /*
  4189. * andpd: And
  4190. */
  4191. #define x86_64_andpd_reg_reg(inst, dreg, sreg) \
  4192. do { \
  4193. x86_64_p1_xmm2_reg_reg_size((inst), 0x66, 0x0f, 0x54, (dreg), (sreg), 0); \
  4194. } while(0)
  4195. #define x86_64_andpd_reg_regp(inst, dreg, sregp) \
  4196. do { \
  4197. x86_64_p1_xmm2_reg_regp_size((inst), 0x66, 0x0f, 0x54, (dreg), (sregp), 0); \
  4198. } while(0)
  4199. #define x86_64_andpd_reg_mem(inst, dreg, mem) \
  4200. do { \
  4201. x86_64_p1_xmm2_reg_mem_size((inst), 0x66, 0x0f, 0x54, (dreg), (mem), 0); \
  4202. } while(0)
  4203. #define x86_64_andpd_reg_membase(inst, dreg, basereg, disp) \
  4204. do { \
  4205. x86_64_p1_xmm2_reg_membase_size((inst), 0x66, 0x0f, 0x54, (dreg), (basereg), (disp), 0); \
  4206. } while(0)
  4207. #define x86_64_andpd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4208. do { \
  4209. x86_64_p1_xmm2_reg_memindex_size((inst), 0x66, 0x0f, 0x54, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4210. } while(0)
  4211. /*
  4212. * orpd: Or
  4213. */
  4214. #define x86_64_orpd_reg_reg(inst, dreg, sreg) \
  4215. do { \
  4216. x86_64_p1_xmm2_reg_reg_size((inst), 0x66, 0x0f, 0x56, (dreg), (sreg), 0); \
  4217. } while(0)
  4218. #define x86_64_orpd_reg_regp(inst, dreg, sregp) \
  4219. do { \
  4220. x86_64_p1_xmm2_reg_regp_size((inst), 0x66, 0x0f, 0x56, (dreg), (sregp), 0); \
  4221. } while(0)
  4222. #define x86_64_orpd_reg_mem(inst, dreg, mem) \
  4223. do { \
  4224. x86_64_p1_xmm2_reg_mem_size((inst), 0x66, 0x0f, 0x56, (dreg), (mem), 0); \
  4225. } while(0)
  4226. #define x86_64_orpd_reg_membase(inst, dreg, basereg, disp) \
  4227. do { \
  4228. x86_64_p1_xmm2_reg_membase_size((inst), 0x66, 0x0f, 0x56, (dreg), (basereg), (disp), 0); \
  4229. } while(0)
  4230. #define x86_64_orpd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4231. do { \
  4232. x86_64_p1_xmm2_reg_memindex_size((inst), 0x66, 0x0f, 0x56, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4233. } while(0)
  4234. /*
  4235. * xorpd: Xor
  4236. */
  4237. #define x86_64_xorpd_reg_reg(inst, dreg, sreg) \
  4238. do { \
  4239. x86_64_p1_xmm2_reg_reg_size((inst), 0x66, 0x0f, 0x57, (dreg), (sreg), 0); \
  4240. } while(0)
  4241. #define x86_64_xorpd_reg_regp(inst, dreg, sregp) \
  4242. do { \
  4243. x86_64_p1_xmm2_reg_regp_size((inst), 0x66, 0x0f, 0x57, (dreg), (sregp), 0); \
  4244. } while(0)
  4245. #define x86_64_xorpd_reg_mem(inst, dreg, mem) \
  4246. do { \
  4247. x86_64_p1_xmm2_reg_mem_size((inst), 0x66, 0x0f, 0x57, (dreg), (mem), 0); \
  4248. } while(0)
  4249. #define x86_64_xorpd_reg_membase(inst, dreg, basereg, disp) \
  4250. do { \
  4251. x86_64_p1_xmm2_reg_membase_size((inst), 0x66, 0x0f, 0x57, (dreg), (basereg), (disp), 0); \
  4252. } while(0)
  4253. #define x86_64_xorpd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4254. do { \
  4255. x86_64_p1_xmm2_reg_memindex_size((inst), 0x66, 0x0f, 0x57, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4256. } while(0)
  4257. /*
  4258. * maxsd: Maximum value
  4259. */
  4260. #define x86_64_maxsd_reg_reg(inst, dreg, sreg) \
  4261. do { \
  4262. x86_64_p1_xmm2_reg_reg_size((inst), 0xf2, 0x0f, 0x5f, (dreg), (sreg), 0); \
  4263. } while(0)
  4264. #define x86_64_maxsd_reg_regp(inst, dreg, sregp) \
  4265. do { \
  4266. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x5f, (dreg), (sregp), 0); \
  4267. } while(0)
  4268. #define x86_64_maxsd_reg_mem(inst, dreg, mem) \
  4269. do { \
  4270. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x5f, (dreg), (mem), 0); \
  4271. } while(0)
  4272. #define x86_64_maxsd_reg_membase(inst, dreg, basereg, disp) \
  4273. do { \
  4274. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x5f, (dreg), (basereg), (disp), 0); \
  4275. } while(0)
  4276. #define x86_64_maxsd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4277. do { \
  4278. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2, 0x0f, 0x5f, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4279. } while(0)
  4280. /*
  4281. * minsd: Minimum value
  4282. */
  4283. #define x86_64_minsd_reg_reg(inst, dreg, sreg) \
  4284. do { \
  4285. x86_64_p1_xmm2_reg_reg_size((inst), 0xf2, 0x0f, 0x5d, (dreg), (sreg), 0); \
  4286. } while(0)
  4287. #define x86_64_minsd_reg_regp(inst, dreg, sregp) \
  4288. do { \
  4289. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x5d, (dreg), (sregp), 0); \
  4290. } while(0)
  4291. #define x86_64_minsd_reg_mem(inst, dreg, mem) \
  4292. do { \
  4293. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x5d, (dreg), (mem), 0); \
  4294. } while(0)
  4295. #define x86_64_minsd_reg_membase(inst, dreg, basereg, disp) \
  4296. do { \
  4297. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x5d, (dreg), (basereg), (disp), 0); \
  4298. } while(0)
  4299. #define x86_64_minsd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4300. do { \
  4301. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2 0x0f, 0x5d, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4302. } while(0)
  4303. /*
  4304. * sqrtsd: Square root
  4305. */
  4306. #define x86_64_sqrtsd_reg_reg(inst, dreg, sreg) \
  4307. do { \
  4308. x86_64_p1_xmm2_reg_reg_size((inst), 0xf2, 0x0f, 0x51, (dreg), (sreg), 0); \
  4309. } while(0)
  4310. #define x86_64_sqrtsd_reg_regp(inst, dreg, sregp) \
  4311. do { \
  4312. x86_64_p1_xmm2_reg_regp_size((inst), 0xf2, 0x0f, 0x51, (dreg), (sregp), 0); \
  4313. } while(0)
  4314. #define x86_64_sqrtsd_reg_mem(inst, dreg, mem) \
  4315. do { \
  4316. x86_64_p1_xmm2_reg_mem_size((inst), 0xf2, 0x0f, 0x51, (dreg), (mem), 0); \
  4317. } while(0)
  4318. #define x86_64_sqrtsd_reg_membase(inst, dreg, basereg, disp) \
  4319. do { \
  4320. x86_64_p1_xmm2_reg_membase_size((inst), 0xf2, 0x0f, 0x51, (dreg), (basereg), (disp), 0); \
  4321. } while(0)
  4322. #define x86_64_sqrtsd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift) \
  4323. do { \
  4324. x86_64_p1_xmm2_reg_memindex_size((inst), 0xf2, 0x0f, 0x51, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4325. } while(0)
  4326. /*
  4327. * Rounding: Available in SSE 4.1 only
  4328. */
  4329. /*
  4330. * roundss: Round scalar single precision value
  4331. */
  4332. #define x86_64_roundss_reg_reg(inst, dreg, sreg, mode) \
  4333. do { \
  4334. x86_64_p1_xmm3_reg_reg_size((inst), 0x66, 0x0f, 0x3a, 0x0a, (dreg), (sreg), 0); \
  4335. x86_imm_emit8((inst), (mode)); \
  4336. } while(0)
  4337. #define x86_64_roundss_reg_regp(inst, dreg, sregp, mode) \
  4338. do { \
  4339. x86_64_p1_xmm3_reg_regp_size((inst), 0x66, 0x0f, 0x3a, 0x0a, (dreg), (sregp), 0); \
  4340. x86_imm_emit8((inst), (mode)); \
  4341. } while(0)
  4342. #define x86_64_roundss_reg_mem(inst, dreg, mem, mode) \
  4343. do { \
  4344. x86_64_p1_xmm3_reg_mem_size((inst), 0x66, 0x0f, 0x3a, 0x0a, (dreg), (mem), 0); \
  4345. x86_imm_emit8((inst), (mode)); \
  4346. } while(0)
  4347. #define x86_64_roundss_reg_membase(inst, dreg, basereg, disp, mode) \
  4348. do { \
  4349. x86_64_p1_xmm3_reg_membase_size((inst), 0x66, 0x0f, 0x3a, 0x0a, (dreg), (basereg), (disp), 0); \
  4350. x86_imm_emit8((inst), (mode)); \
  4351. } while(0)
  4352. #define x86_64_roundss_reg_memindex(inst, dreg, basereg, disp, indexreg, shift, mode) \
  4353. do { \
  4354. x86_64_p1_xmm3_reg_memindex_size((inst), 0x66, 0x0f, 0x3a, 0x0a, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4355. x86_imm_emit8((inst), (mode)); \
  4356. } while(0)
  4357. /*
  4358. * roundsd: Round scalar double precision value
  4359. */
  4360. #define x86_64_roundsd_reg_reg(inst, dreg, sreg, mode) \
  4361. do { \
  4362. x86_64_p1_xmm3_reg_reg_size((inst), 0x66, 0x0f, 0x3a, 0x0b, (dreg), (sreg), 0); \
  4363. x86_imm_emit8((inst), (mode)); \
  4364. } while(0)
  4365. #define x86_64_roundsd_reg_regp(inst, dreg, sregp, mode) \
  4366. do { \
  4367. x86_64_p1_xmm3_reg_regp_size((inst), 0x66, 0x0f, 0x3a, 0x0b, (dreg), (sregp), 0); \
  4368. x86_imm_emit8((inst), (mode)); \
  4369. } while(0)
  4370. #define x86_64_roundsd_reg_mem(inst, dreg, mem, mode) \
  4371. do { \
  4372. x86_64_p1_xmm3_reg_mem_size((inst), 0x66, 0x0f, 0x3a, 0x0b, (dreg), (mem), 0); \
  4373. x86_imm_emit8((inst), (mode)); \
  4374. } while(0)
  4375. #define x86_64_roundsd_reg_membase(inst, dreg, basereg, disp, mode) \
  4376. do { \
  4377. x86_64_p1_xmm3_reg_membase_size((inst), 0x66, 0x0f, 0x3a, 0x0b, (dreg), (basereg), (disp), 0); \
  4378. x86_imm_emit8((inst), (mode)); \
  4379. } while(0)
  4380. #define x86_64_roundsd_reg_memindex(inst, dreg, basereg, disp, indexreg, shift, mode) \
  4381. do { \
  4382. x86_64_p1_xmm3_reg_memindex_size((inst), 0x66, 0x0f, 0x3a, 0x0b, (dreg), (basereg), (disp), (indexreg), (shift), 0); \
  4383. x86_imm_emit8((inst), (mode)); \
  4384. } while(0)
  4385. /*
  4386. * Clear xmm register
  4387. */
  4388. #define x86_64_clear_xreg(inst, reg) \
  4389. do { \
  4390. x86_64_xorps_reg_reg((inst), (reg), (reg)); \
  4391. } while(0)
  4392. /*
  4393. * fpu instructions
  4394. */
  4395. /*
  4396. * fld
  4397. */
  4398. #define x86_64_fld_regp_size(inst, sregp, size) \
  4399. do { \
  4400. x86_64_rex_emit((inst), 0, 0, 0, (sregp)); \
  4401. switch(size) \
  4402. { \
  4403. case 4: \
  4404. { \
  4405. *(inst)++ = (unsigned char)0xd9; \
  4406. x86_64_regp_emit((inst), 0, (sregp)); \
  4407. } \
  4408. break; \
  4409. case 8: \
  4410. { \
  4411. *(inst)++ = (unsigned char)0xdd; \
  4412. x86_64_regp_emit((inst), 0, (sregp)); \
  4413. } \
  4414. break; \
  4415. case 10: \
  4416. { \
  4417. *(inst)++ = (unsigned char)0xdb; \
  4418. x86_64_regp_emit((inst), 5, (sregp)); \
  4419. } \
  4420. break; \
  4421. } \
  4422. } while(0)
  4423. #define x86_64_fld_mem_size(inst, mem, size) \
  4424. do { \
  4425. switch(size) \
  4426. { \
  4427. case 4: \
  4428. { \
  4429. *(inst)++ = (unsigned char)0xd9; \
  4430. x86_64_mem_emit((inst), 0, (mem)); \
  4431. } \
  4432. break; \
  4433. case 8: \
  4434. { \
  4435. *(inst)++ = (unsigned char)0xdd; \
  4436. x86_64_mem_emit((inst), 0, (mem)); \
  4437. } \
  4438. break; \
  4439. case 10: \
  4440. { \
  4441. *(inst)++ = (unsigned char)0xdb; \
  4442. x86_64_mem_emit((inst), 5, (mem)); \
  4443. } \
  4444. break; \
  4445. } \
  4446. } while(0)
  4447. #define x86_64_fld_membase_size(inst, basereg, disp, size) \
  4448. do { \
  4449. x86_64_rex_emit((inst), 0, 0, 0, (basereg)); \
  4450. switch(size) \
  4451. { \
  4452. case 4: \
  4453. { \
  4454. *(inst)++ = (unsigned char)0xd9; \
  4455. x86_64_membase_emit((inst), 0, (basereg), (disp)); \
  4456. } \
  4457. break; \
  4458. case 8: \
  4459. { \
  4460. *(inst)++ = (unsigned char)0xdd; \
  4461. x86_64_membase_emit((inst), 0, (basereg), (disp)); \
  4462. } \
  4463. break; \
  4464. case 10: \
  4465. { \
  4466. *(inst)++ = (unsigned char)0xdb; \
  4467. x86_64_membase_emit((inst), 5, (basereg), (disp)); \
  4468. } \
  4469. break; \
  4470. } \
  4471. } while(0)
  4472. #define x86_64_fld_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  4473. do { \
  4474. x86_64_rex_emit((inst), 0, 0, (indexreg), (basereg)); \
  4475. switch(size) \
  4476. { \
  4477. case 4: \
  4478. { \
  4479. *(inst)++ = (unsigned char)0xd9; \
  4480. x86_64_memindex_emit((inst), 0, (basereg), (disp), (indexreg), (shift)); \
  4481. } \
  4482. break; \
  4483. case 8: \
  4484. { \
  4485. *(inst)++ = (unsigned char)0xdd; \
  4486. x86_64_memindex_emit((inst), 0, (basereg), (disp), (indexreg), (shift)); \
  4487. } \
  4488. break; \
  4489. case 10: \
  4490. { \
  4491. *(inst)++ = (unsigned char)0xdb; \
  4492. x86_64_memindex_emit((inst), 5, (basereg), (disp), (indexreg), (shift)); \
  4493. } \
  4494. break; \
  4495. } \
  4496. } while(0)
  4497. /*
  4498. * fild: Load an integer and convert it to long double
  4499. */
  4500. #define x86_64_fild_mem_size(inst, mem, size) \
  4501. do { \
  4502. switch(size) \
  4503. { \
  4504. case 2: \
  4505. { \
  4506. *(inst)++ = (unsigned char)0xdf; \
  4507. x86_64_mem_emit((inst), 0, (mem)); \
  4508. } \
  4509. break; \
  4510. case 4: \
  4511. { \
  4512. *(inst)++ = (unsigned char)0xdb; \
  4513. x86_64_mem_emit((inst), 0, (mem)); \
  4514. } \
  4515. break; \
  4516. case 8: \
  4517. { \
  4518. *(inst)++ = (unsigned char)0xdf; \
  4519. x86_64_mem_emit((inst), 5, (mem)); \
  4520. } \
  4521. break; \
  4522. } \
  4523. } while (0)
  4524. #define x86_64_fild_membase_size(inst, basereg, disp, size) \
  4525. do { \
  4526. x86_64_rex_emit((inst), 0, 0, 0, (basereg)); \
  4527. switch(size) \
  4528. { \
  4529. case 2: \
  4530. { \
  4531. *(inst)++ = (unsigned char)0xdf; \
  4532. x86_64_membase_emit((inst), 0, (basereg), (disp)); \
  4533. } \
  4534. break; \
  4535. case 4: \
  4536. { \
  4537. *(inst)++ = (unsigned char)0xdb; \
  4538. x86_64_membase_emit((inst), 0, (basereg), (disp)); \
  4539. } \
  4540. break; \
  4541. case 8: \
  4542. { \
  4543. *(inst)++ = (unsigned char)0xdf; \
  4544. x86_64_membase_emit((inst), 5, (basereg), (disp)); \
  4545. } \
  4546. break; \
  4547. } \
  4548. } while (0)
  4549. /*
  4550. * fst: Store fpu register to memory (only float32 and float64 allowed)
  4551. */
  4552. #define x86_64_fst_regp_size(inst, sregp, size) \
  4553. do { \
  4554. x86_64_rex_emit((inst), 0, 0, 0, (sregp)); \
  4555. switch(size) \
  4556. { \
  4557. case 4: \
  4558. { \
  4559. *(inst)++ = (unsigned char)0xd9; \
  4560. x86_64_regp_emit((inst), 2, (sregp)); \
  4561. } \
  4562. break; \
  4563. case 8: \
  4564. { \
  4565. *(inst)++ = (unsigned char)0xdd; \
  4566. x86_64_regp_emit((inst), 2, (sregp)); \
  4567. } \
  4568. break; \
  4569. } \
  4570. } while(0)
  4571. #define x86_64_fst_mem_size(inst, mem, size) \
  4572. do { \
  4573. switch(size) \
  4574. { \
  4575. case 4: \
  4576. { \
  4577. *(inst)++ = (unsigned char)0xd9; \
  4578. x86_64_mem_emit((inst), 2, (mem)); \
  4579. } \
  4580. break; \
  4581. case 8: \
  4582. { \
  4583. *(inst)++ = (unsigned char)0xdd; \
  4584. x86_64_mem_emit((inst), 2, (mem)); \
  4585. } \
  4586. break; \
  4587. } \
  4588. } while(0)
  4589. #define x86_64_fst_membase_size(inst, basereg, disp, size) \
  4590. do { \
  4591. x86_64_rex_emit((inst), 0, 0, 0, (basereg)); \
  4592. switch(size) \
  4593. { \
  4594. case 4: \
  4595. { \
  4596. *(inst)++ = (unsigned char)0xd9; \
  4597. x86_64_membase_emit((inst), 2, (basereg), (disp)); \
  4598. } \
  4599. break; \
  4600. case 8: \
  4601. { \
  4602. *(inst)++ = (unsigned char)0xdd; \
  4603. x86_64_membase_emit((inst), 2, (basereg), (disp)); \
  4604. } \
  4605. break; \
  4606. } \
  4607. } while(0)
  4608. #define x86_64_fst_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  4609. do { \
  4610. x86_64_rex_emit((inst), 0, 0, (indexreg), (basereg)); \
  4611. switch(size) \
  4612. { \
  4613. case 4: \
  4614. { \
  4615. *(inst)++ = (unsigned char)0xd9; \
  4616. x86_64_memindex_emit((inst), 2, (basereg), (disp), (indexreg), (shift)); \
  4617. } \
  4618. break; \
  4619. case 8: \
  4620. { \
  4621. *(inst)++ = (unsigned char)0xdd; \
  4622. x86_64_memindex_emit((inst), 2, (basereg), (disp), (indexreg), (shift)); \
  4623. } \
  4624. break; \
  4625. } \
  4626. } while(0)
  4627. /*
  4628. * fstp: store top fpu register to memory and pop it from the fpu stack
  4629. */
  4630. #define x86_64_fstp_regp_size(inst, sregp, size) \
  4631. do { \
  4632. x86_64_rex_emit((inst), 0, 0, 0, (sregp)); \
  4633. switch(size) \
  4634. { \
  4635. case 4: \
  4636. { \
  4637. *(inst)++ = (unsigned char)0xd9; \
  4638. x86_64_regp_emit((inst), 3, (sregp)); \
  4639. } \
  4640. break; \
  4641. case 8: \
  4642. { \
  4643. *(inst)++ = (unsigned char)0xdd; \
  4644. x86_64_regp_emit((inst), 3, (sregp)); \
  4645. } \
  4646. break; \
  4647. case 10: \
  4648. { \
  4649. *(inst)++ = (unsigned char)0xdb; \
  4650. x86_64_regp_emit((inst), 7, (sregp)); \
  4651. } \
  4652. break; \
  4653. } \
  4654. } while(0)
  4655. #define x86_64_fstp_mem_size(inst, mem, size) \
  4656. do { \
  4657. switch(size) \
  4658. { \
  4659. case 4: \
  4660. { \
  4661. *(inst)++ = (unsigned char)0xd9; \
  4662. x86_64_mem_emit((inst), 3, (mem)); \
  4663. } \
  4664. break; \
  4665. case 8: \
  4666. { \
  4667. *(inst)++ = (unsigned char)0xdd; \
  4668. x86_64_mem_emit((inst), 3, (mem)); \
  4669. } \
  4670. break; \
  4671. case 10: \
  4672. { \
  4673. *(inst)++ = (unsigned char)0xdb; \
  4674. x86_64_mem_emit((inst), 7, (mem)); \
  4675. } \
  4676. break; \
  4677. } \
  4678. } while(0)
  4679. #define x86_64_fstp_membase_size(inst, basereg, disp, size) \
  4680. do { \
  4681. x86_64_rex_emit((inst), 0, 0, 0, (basereg)); \
  4682. switch(size) \
  4683. { \
  4684. case 4: \
  4685. { \
  4686. *(inst)++ = (unsigned char)0xd9; \
  4687. x86_64_membase_emit((inst), 3, (basereg), (disp)); \
  4688. } \
  4689. break; \
  4690. case 8: \
  4691. { \
  4692. *(inst)++ = (unsigned char)0xdd; \
  4693. x86_64_membase_emit((inst), 3, (basereg), (disp)); \
  4694. } \
  4695. break; \
  4696. case 10: \
  4697. { \
  4698. *(inst)++ = (unsigned char)0xdb; \
  4699. x86_64_membase_emit((inst), 7, (basereg), (disp)); \
  4700. } \
  4701. break; \
  4702. } \
  4703. } while(0)
  4704. #define x86_64_fstp_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  4705. do { \
  4706. x86_64_rex_emit((inst), 0, 0, (indexreg), (basereg)); \
  4707. switch(size) \
  4708. { \
  4709. case 4: \
  4710. { \
  4711. *(inst)++ = (unsigned char)0xd9; \
  4712. x86_64_memindex_emit((inst), 3, (basereg), (disp), (indexreg), (shift)); \
  4713. } \
  4714. break; \
  4715. case 8: \
  4716. { \
  4717. *(inst)++ = (unsigned char)0xdd; \
  4718. x86_64_memindex_emit((inst), 3, (basereg), (disp), (indexreg), (shift)); \
  4719. } \
  4720. break; \
  4721. case 10: \
  4722. { \
  4723. *(inst)++ = (unsigned char)0xdb; \
  4724. x86_64_memindex_emit((inst), 7, (basereg), (disp), (indexreg), (shift)); \
  4725. } \
  4726. break; \
  4727. } \
  4728. } while(0)
  4729. /*
  4730. * fistp: Convert long double to integer
  4731. */
  4732. #define x86_64_fistp_mem_size(inst, mem, size) \
  4733. do { \
  4734. switch((size)) \
  4735. { \
  4736. case 2: \
  4737. { \
  4738. *(inst)++ = (unsigned char)0xdf; \
  4739. x86_64_mem_emit((inst), 3, (mem)); \
  4740. } \
  4741. break; \
  4742. case 4: \
  4743. { \
  4744. *(inst)++ = (unsigned char)0xdb; \
  4745. x86_64_mem_emit((inst), 3, (mem)); \
  4746. } \
  4747. break; \
  4748. case 8: \
  4749. { \
  4750. *(inst)++ = (unsigned char)0xdf; \
  4751. x86_64_mem_emit((inst), 7, (mem)); \
  4752. } \
  4753. break; \
  4754. } \
  4755. } while(0)
  4756. #define x86_64_fistp_regp_size(inst, dregp, size) \
  4757. do { \
  4758. x86_64_rex_emit((inst), 0, 0, 0, (dregp)); \
  4759. switch((size)) \
  4760. { \
  4761. case 2: \
  4762. { \
  4763. *(inst)++ = (unsigned char)0xdf; \
  4764. x86_64_regp_emit((inst), 3, (dregp)); \
  4765. } \
  4766. break; \
  4767. case 4: \
  4768. { \
  4769. *(inst)++ = (unsigned char)0xdb; \
  4770. x86_64_regp_emit((inst), 3, (dregp)); \
  4771. } \
  4772. break; \
  4773. case 8: \
  4774. { \
  4775. *(inst)++ = (unsigned char)0xdf; \
  4776. x86_64_regp_emit((inst), 7, (dregp)); \
  4777. } \
  4778. break; \
  4779. } \
  4780. } while(0)
  4781. #define x86_64_fistp_membase_size(inst, basereg, disp, size) \
  4782. do { \
  4783. x86_64_rex_emit((inst), 0, 0, 0, (basereg)); \
  4784. switch((size)) \
  4785. { \
  4786. case 2: \
  4787. { \
  4788. *(inst)++ = (unsigned char)0xdf; \
  4789. x86_64_membase_emit((inst), 3, (basereg), (disp)); \
  4790. } \
  4791. break; \
  4792. case 4: \
  4793. { \
  4794. *(inst)++ = (unsigned char)0xdb; \
  4795. x86_64_membase_emit((inst), 3, (basereg), (disp)); \
  4796. } \
  4797. break; \
  4798. case 8: \
  4799. { \
  4800. *(inst)++ = (unsigned char)0xdf; \
  4801. x86_64_membase_emit((inst), 7, (basereg), (disp)); \
  4802. } \
  4803. break; \
  4804. } \
  4805. } while(0)
  4806. /*
  4807. * frndint: Round st(0) to integer according to the rounding mode set in the fpu control word.
  4808. */
  4809. #define x86_64_frndint(inst) \
  4810. do { \
  4811. *(inst)++ = (unsigned char)0xd9; \
  4812. *(inst)++ = (unsigned char)0xfc; \
  4813. } while(0)
  4814. /*
  4815. * fisttp: Convert long double to integer using truncation as rounding mode Available in SSE 3 only
  4816. */
  4817. #define x86_64_fisttp_regp_size(inst, dregp, size) \
  4818. do { \
  4819. x86_64_rex_emit((inst), 0, 0, 0, (dregp)); \
  4820. switch((size)) \
  4821. { \
  4822. case 2: \
  4823. { \
  4824. *(inst)++ = (unsigned char)0xdf; \
  4825. x86_64_regp_emit((inst), 1, (dregp)); \
  4826. } \
  4827. break; \
  4828. case 4: \
  4829. { \
  4830. *(inst)++ = (unsigned char)0xdb; \
  4831. x86_64_regp_emit((inst), 1, (dregp)); \
  4832. } \
  4833. break; \
  4834. case 8: \
  4835. { \
  4836. *(inst)++ = (unsigned char)0xdd; \
  4837. x86_64_regp_emit((inst), 1, (dregp)); \
  4838. } \
  4839. break; \
  4840. } \
  4841. } while(0)
  4842. #define x86_64_fisttp_mem_size(inst, mem, size) \
  4843. do { \
  4844. switch((size)) \
  4845. { \
  4846. case 2: \
  4847. { \
  4848. *(inst)++ = (unsigned char)0xdf; \
  4849. x86_64_mem_emit((inst), 1, (mem)); \
  4850. } \
  4851. break; \
  4852. case 4: \
  4853. { \
  4854. *(inst)++ = (unsigned char)0xdb; \
  4855. x86_64_mem_emit((inst), 1, (mem)); \
  4856. } \
  4857. break; \
  4858. case 8: \
  4859. { \
  4860. *(inst)++ = (unsigned char)0xdd; \
  4861. x86_64_mem_emit((inst), 1, (mem)); \
  4862. } \
  4863. break; \
  4864. } \
  4865. } while(0)
  4866. #define x86_64_fisttp_membase_size(inst, basereg, disp, size) \
  4867. do { \
  4868. x86_64_rex_emit((inst), 0, 0, 0, (basereg)); \
  4869. switch((size)) \
  4870. { \
  4871. case 2: \
  4872. { \
  4873. *(inst)++ = (unsigned char)0xdf; \
  4874. x86_64_membase_emit((inst), 1, (basereg), (disp)); \
  4875. } \
  4876. break; \
  4877. case 4: \
  4878. { \
  4879. *(inst)++ = (unsigned char)0xdb; \
  4880. x86_64_membase_emit((inst), 1, (basereg), (disp)); \
  4881. } \
  4882. break; \
  4883. case 8: \
  4884. { \
  4885. *(inst)++ = (unsigned char)0xdd; \
  4886. x86_64_membase_emit((inst), 1, (basereg), (disp)); \
  4887. } \
  4888. break; \
  4889. } \
  4890. } while(0)
  4891. #define x86_64_fabs(inst) \
  4892. do { \
  4893. *(inst)++ = (unsigned char)0xd9; \
  4894. *(inst)++ = (unsigned char)0xe1; \
  4895. } while(0)
  4896. #define x86_64_fchs(inst) \
  4897. do { \
  4898. *(inst)++ = (unsigned char)0xd9; \
  4899. *(inst)++ = (unsigned char)0xe0; \
  4900. } while(0)
  4901. /*
  4902. * Store fpu control word after checking for pending unmasked fpu exceptions
  4903. */
  4904. #define x86_64_fnstcw(inst, mem) \
  4905. do { \
  4906. *(inst)++ = (unsigned char)0xd9; \
  4907. x86_64_mem_emit((inst), 7, (mem)); \
  4908. } while(0)
  4909. #define x86_64_fnstcw_membase(inst, basereg, disp) \
  4910. do { \
  4911. *(inst)++ = (unsigned char)0xd9; \
  4912. x86_64_membase_emit((inst), 7, (basereg), (disp)); \
  4913. } while(0)
  4914. /*
  4915. * Load fpu control word
  4916. */
  4917. #define x86_64_fldcw(inst, mem) \
  4918. do { \
  4919. *(inst)++ = (unsigned char)0xd9; \
  4920. x86_64_mem_emit((inst), 5, (mem)); \
  4921. } while(0)
  4922. #define x86_64_fldcw_membase(inst, basereg, disp) \
  4923. do { \
  4924. *(inst)++ = (unsigned char)0xd9; \
  4925. x86_64_membase_emit ((inst), 5, (basereg), (disp)); \
  4926. } while(0)
  4927. #ifdef __cplusplus
  4928. };
  4929. #endif
  4930. #endif /* _JIT_GEN_X86_64_H */