PageRenderTime 65ms CodeModel.GetById 25ms RepoModel.GetById 1ms app.codeStats 1ms

/jit/jit-gen-x86-64.h

https://bitbucket.org/philburr/libjit
C Header | 5579 lines | 4481 code | 649 blank | 449 comment | 279 complexity | 6faeb4f9f22ed74233a7a08d67540daa MD5 | raw file
Possible License(s): GPL-2.0, LGPL-2.1

Large files files are truncated, but you can click here to view the full file

  1. /*
  2. * jit-gen-x86-64.h - Macros for generating x86_64 code.
  3. *
  4. * Copyright (C) 2008 Southern Storm Software, Pty Ltd.
  5. *
  6. * This file is part of the libjit library.
  7. *
  8. * The libjit library is free software: you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public License
  10. * as published by the Free Software Foundation, either version 2.1 of
  11. * the License, or (at your option) any later version.
  12. *
  13. * The libjit library is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with the libjit library. If not, see
  20. * <http://www.gnu.org/licenses/>.
  21. */
  22. #ifndef _JIT_GEN_X86_64_H
  23. #define _JIT_GEN_X86_64_H
  24. #include <jit/jit-defs.h>
  25. #include "jit-gen-x86.h"
  26. #ifdef __cplusplus
  27. extern "C" {
  28. #endif
  29. /*
  30. * X86_64 64 bit general purpose integer registers.
  31. */
  32. typedef enum
  33. {
  34. X86_64_RAX = 0,
  35. X86_64_RCX = 1,
  36. X86_64_RDX = 2,
  37. X86_64_RBX = 3,
  38. X86_64_RSP = 4,
  39. X86_64_RBP = 5,
  40. X86_64_RSI = 6,
  41. X86_64_RDI = 7,
  42. X86_64_R8 = 8,
  43. X86_64_R9 = 9,
  44. X86_64_R10 = 10,
  45. X86_64_R11 = 11,
  46. X86_64_R12 = 12,
  47. X86_64_R13 = 13,
  48. X86_64_R14 = 14,
  49. X86_64_R15 = 15,
  50. X86_64_RIP = 16 /* This register encoding doesn't exist in the */
  51. /* instructions. It's used for RIP relative encoding. */
  52. } X86_64_Reg_No;
  53. /*
  54. * X86-64 xmm registers.
  55. */
  56. typedef enum
  57. {
  58. X86_64_XMM0 = 0,
  59. X86_64_XMM1 = 1,
  60. X86_64_XMM2 = 2,
  61. X86_64_XMM3 = 3,
  62. X86_64_XMM4 = 4,
  63. X86_64_XMM5 = 5,
  64. X86_64_XMM6 = 6,
  65. X86_64_XMM7 = 7,
  66. X86_64_XMM8 = 8,
  67. X86_64_XMM9 = 9,
  68. X86_64_XMM10 = 10,
  69. X86_64_XMM11 = 11,
  70. X86_64_XMM12 = 12,
  71. X86_64_XMM13 = 13,
  72. X86_64_XMM14 = 14,
  73. X86_64_XMM15 = 15
  74. } X86_64_XMM_Reg_No;
  75. /*
  76. * Bits in the REX prefix byte.
  77. */
  78. typedef enum
  79. {
  80. X86_64_REX_B = 1, /* 1-bit (high) extension of the ModRM r/m field */
  81. /* SIB base field, or opcode reg field, thus */
  82. /* permitting access to 16 registers. */
  83. X86_64_REX_X = 2, /* 1-bit (high) extension of the SIB index field */
  84. /* thus permitting access to 16 registers. */
  85. X86_64_REX_R = 4, /* 1-bit (high) extension of the ModRM reg field, */
  86. /* thus permitting access to 16 registers. */
  87. X86_64_REX_W = 8 /* 0 = Default operand size */
  88. /* 1 = 64 bit operand size */
  89. } X86_64_REX_Bits;
  90. /*
  91. * Third part of the opcodes for xmm instructions which are encoded
  92. * Opcode1: 0xF3 (single precision) or 0xF2 (double precision)
  93. * This is handled as a prefix.
  94. * Opcode2: 0x0F
  95. */
  96. typedef enum
  97. {
  98. XMM1_MOV = 0x10,
  99. XMM1_MOV_REV = 0x11,
  100. XMM1_ADD = 0x58,
  101. XMM1_MUL = 0x59,
  102. XMM1_SUB = 0x5C,
  103. XMM1_DIV = 0x5E
  104. } X86_64_XMM1_OP;
  105. /*
  106. * Logical opcodes used with packed single and double precision values.
  107. */
  108. typedef enum
  109. {
  110. XMM_ANDP = 0x54,
  111. XMM_ORP = 0x56,
  112. XMM_XORP = 0x57
  113. } X86_64_XMM_PLOP;
  114. /*
  115. * Rounding modes for xmm rounding instructions, the mxcsr register and
  116. * the fpu control word.
  117. */
  118. typedef enum
  119. {
  120. X86_ROUND_NEAREST = 0x00, /* Round to the nearest integer */
  121. X86_ROUND_DOWN = 0x01, /* Round towards negative infinity */
  122. X86_ROUND_UP = 0x02, /* Round towards positive infinity */
  123. X86_ROUND_ZERO = 0x03 /* Round towards zero (truncate) */
  124. } X86_64_ROUNDMODE;
  125. /*
  126. * Helper union for emmitting 64 bit immediate values.
  127. */
  128. typedef union
  129. {
  130. jit_long val;
  131. unsigned char b[8];
  132. } x86_64_imm_buf;
  133. #define x86_64_imm_emit64(inst, imm) \
  134. do { \
  135. x86_64_imm_buf imb; \
  136. imb.val = (jit_long)(imm); \
  137. *(inst)++ = imb.b[0]; \
  138. *(inst)++ = imb.b[1]; \
  139. *(inst)++ = imb.b[2]; \
  140. *(inst)++ = imb.b[3]; \
  141. *(inst)++ = imb.b[4]; \
  142. *(inst)++ = imb.b[5]; \
  143. *(inst)++ = imb.b[6]; \
  144. *(inst)++ = imb.b[7]; \
  145. } while (0)
  146. #define x86_64_imm_emit_max32(inst, imm, size) \
  147. do { \
  148. switch((size)) \
  149. { \
  150. case 1: \
  151. { \
  152. x86_imm_emit8(inst, (imm)); \
  153. } \
  154. break; \
  155. case 2: \
  156. { \
  157. x86_imm_emit16(inst, (imm)); \
  158. } \
  159. break; \
  160. case 4: \
  161. case 8: \
  162. { \
  163. x86_imm_emit32((inst), (imm)); \
  164. } \
  165. break; \
  166. default: \
  167. { \
  168. jit_assert(0); \
  169. } \
  170. } \
  171. } while(0)
  172. #define x86_64_imm_emit_max64(inst, imm, size) \
  173. do { \
  174. switch((size)) \
  175. { \
  176. case 1: \
  177. { \
  178. x86_imm_emit8(inst, (imm)); \
  179. } \
  180. break; \
  181. case 2: \
  182. { \
  183. x86_imm_emit16(inst, (imm)); \
  184. } \
  185. break; \
  186. case 4: \
  187. { \
  188. x86_imm_emit32((inst), (imm)); \
  189. } \
  190. break; \
  191. case 8: \
  192. { \
  193. x86_64_imm_emit64(inst, (imm)); \
  194. } \
  195. break; \
  196. default: \
  197. { \
  198. jit_assert(0); \
  199. } \
  200. } \
  201. } while(0)
  202. /*
  203. * Emit the Rex prefix.
  204. * The natural size is a power of 2 (1, 2, 4 or 8).
  205. * For accessing the low byte registers DIL, SIL, BPL and SPL we have to
  206. * generate a Rex prefix with the value 0x40 too.
  207. * To enable this OR the natural size with 1.
  208. */
  209. #define x86_64_rex(rex_bits) (0x40 | (rex_bits))
  210. #define x86_64_rex_emit(inst, width, modrm_reg, index_reg, rm_base_opcode_reg) \
  211. do { \
  212. unsigned char __rex_bits = \
  213. (((width) & 8) ? X86_64_REX_W : 0) | \
  214. (((modrm_reg) & 8) ? X86_64_REX_R : 0) | \
  215. (((index_reg) & 8) ? X86_64_REX_X : 0) | \
  216. (((rm_base_opcode_reg) & 8) ? X86_64_REX_B : 0); \
  217. if((__rex_bits != 0)) \
  218. { \
  219. *(inst)++ = x86_64_rex(__rex_bits); \
  220. } \
  221. else if(((width) & 1) && ((modrm_reg & 4) || (rm_base_opcode_reg & 4))) \
  222. { \
  223. *(inst)++ = x86_64_rex(0); \
  224. } \
  225. } while(0)
  226. /*
  227. * Helper for emitting the rex prefix for opcodes with 64bit default size.
  228. */
  229. #define x86_64_rex_emit64(inst, width, modrm_reg, index_reg, rm_base_opcode_reg) \
  230. do { \
  231. x86_64_rex_emit((inst), 0, (modrm_reg), (index_reg), (rm_base_opcode_reg)); \
  232. } while(0)
  233. /* In 64 bit mode, all registers have a low byte subregister */
  234. #undef X86_IS_BYTE_REG
  235. #define X86_IS_BYTE_REG(reg) 1
  236. #define x86_64_reg_emit(inst, r, regno) \
  237. do { \
  238. x86_reg_emit((inst), ((r) & 0x7), ((regno) & 0x7)); \
  239. } while(0)
  240. #define x86_64_mem_emit(inst, r, disp) \
  241. do { \
  242. x86_address_byte ((inst), 0, ((r) & 0x7), 4); \
  243. x86_address_byte ((inst), 0, 4, 5); \
  244. x86_imm_emit32((inst), (disp)); \
  245. } while(0)
  246. #define x86_64_mem64_emit(inst, r, disp) \
  247. do { \
  248. x86_address_byte ((inst), 0, ((r) & 0x7), 4); \
  249. x86_address_byte ((inst), 0, 4, 5); \
  250. x86_64_imm_emit64((inst), (disp)); \
  251. } while(0)
  252. #define x86_64_membase_emit(inst, reg, basereg, disp) \
  253. do { \
  254. if((basereg) == X86_64_RIP) \
  255. { \
  256. x86_address_byte((inst), 0, ((reg) & 0x7), 5); \
  257. x86_imm_emit32((inst), (disp)); \
  258. } \
  259. else \
  260. { \
  261. x86_membase_emit((inst), ((reg) & 0x7), ((basereg) & 0x7), (disp)); \
  262. } \
  263. } while(0)
  264. #define x86_64_memindex_emit(inst, r, basereg, disp, indexreg, shift) \
  265. do { \
  266. x86_memindex_emit((inst), ((r) & 0x7), ((basereg) & 0x7), (disp), ((indexreg) & 0x7), (shift)); \
  267. } while(0)
  268. /*
  269. * RSP, RBP and the corresponding upper registers (R12 and R13) can't be used
  270. * for relative addressing without displacement because their codes are used
  271. * for encoding addressing modes with diplacement.
  272. * So we do a membase addressing in this case with a zero offset.
  273. */
  274. #define x86_64_regp_emit(inst, r, regno) \
  275. do { \
  276. switch(regno) \
  277. { \
  278. case X86_64_RSP: \
  279. case X86_64_RBP: \
  280. case X86_64_R12: \
  281. case X86_64_R13: \
  282. { \
  283. x86_64_membase_emit((inst), (r), (regno), 0); \
  284. } \
  285. break; \
  286. default: \
  287. { \
  288. x86_address_byte((inst), 0, ((r) & 0x7), ((regno) & 0x7)); \
  289. } \
  290. break; \
  291. } \
  292. } while(0)
  293. /*
  294. * Helper to encode an opcode where the encoding is different between
  295. * 8bit and 16 ... 64 bit width in the following way:
  296. * 8 bit == opcode given
  297. * 16 ... 64 bit = opcode given | 0x1
  298. */
  299. #define x86_64_opcode1_emit(inst, opc, size) \
  300. do { \
  301. switch ((size)) \
  302. { \
  303. case 1: \
  304. { \
  305. *(inst)++ = (unsigned char)(opc); \
  306. } \
  307. break; \
  308. case 2: \
  309. case 4: \
  310. case 8: \
  311. { \
  312. *(inst)++ = ((unsigned char)(opc) | 0x1); \
  313. } \
  314. break;\
  315. default: \
  316. { \
  317. jit_assert(0); \
  318. } \
  319. } \
  320. } while(0)
  321. /*
  322. * Macros to implement the simple opcodes.
  323. */
  324. #define x86_64_alu_reg_reg_size(inst, opc, dreg, sreg, size) \
  325. do { \
  326. switch(size) \
  327. { \
  328. case 1: \
  329. { \
  330. x86_64_rex_emit(inst, size, (dreg), 0, (sreg)); \
  331. *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
  332. x86_64_reg_emit((inst), (dreg), (sreg)); \
  333. } \
  334. break; \
  335. case 2: \
  336. { \
  337. *(inst)++ = (unsigned char)0x66; \
  338. } \
  339. case 4: \
  340. case 8: \
  341. { \
  342. x86_64_rex_emit(inst, size, (dreg), 0, (sreg)); \
  343. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  344. x86_64_reg_emit((inst), (dreg), (sreg)); \
  345. } \
  346. } \
  347. } while(0)
  348. #define x86_64_alu_regp_reg_size(inst, opc, dregp, sreg, size) \
  349. do { \
  350. switch(size) \
  351. { \
  352. case 1: \
  353. { \
  354. x86_64_rex_emit(inst, size, (sreg), 0, (dregp)); \
  355. *(inst)++ = (((unsigned char)(opc)) << 3); \
  356. x86_64_regp_emit((inst), (sreg), (dregp)); \
  357. } \
  358. break; \
  359. case 2: \
  360. { \
  361. *(inst)++ = (unsigned char)0x66; \
  362. } \
  363. case 4: \
  364. case 8: \
  365. { \
  366. x86_64_rex_emit(inst, size, (sreg), 0, (dregp)); \
  367. *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
  368. x86_64_regp_emit((inst), (sreg), (dregp)); \
  369. } \
  370. } \
  371. } while(0)
  372. #define x86_64_alu_mem_reg_size(inst, opc, mem, sreg, size) \
  373. do { \
  374. switch(size) \
  375. { \
  376. case 1: \
  377. { \
  378. x86_64_rex_emit(inst, size, (sreg), 0, 0); \
  379. *(inst)++ = (((unsigned char)(opc)) << 3); \
  380. x86_64_mem_emit((inst), (sreg), (mem)); \
  381. } \
  382. break; \
  383. case 2: \
  384. { \
  385. *(inst)++ = (unsigned char)0x66; \
  386. } \
  387. case 4: \
  388. case 8: \
  389. { \
  390. x86_64_rex_emit(inst, size, (sreg), 0, 0); \
  391. *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
  392. x86_64_mem_emit((inst), (sreg), (mem)); \
  393. } \
  394. } \
  395. } while(0)
  396. #define x86_64_alu_membase_reg_size(inst, opc, basereg, disp, sreg, size) \
  397. do { \
  398. switch(size) \
  399. { \
  400. case 1: \
  401. { \
  402. x86_64_rex_emit(inst, size, (sreg), 0, (basereg)); \
  403. *(inst)++ = (((unsigned char)(opc)) << 3); \
  404. x86_64_membase_emit((inst), (sreg), (basereg), (disp)); \
  405. } \
  406. break; \
  407. case 2: \
  408. { \
  409. *(inst)++ = (unsigned char)0x66; \
  410. } \
  411. case 4: \
  412. case 8: \
  413. { \
  414. x86_64_rex_emit(inst, size, (sreg), 0, (basereg)); \
  415. *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
  416. x86_64_membase_emit((inst), (sreg), (basereg), (disp)); \
  417. } \
  418. } \
  419. } while(0)
  420. #define x86_64_alu_memindex_reg_size(inst, opc, basereg, disp, indexreg, shift, sreg, size) \
  421. do { \
  422. switch(size) \
  423. { \
  424. case 1: \
  425. { \
  426. x86_64_rex_emit(inst, size, (sreg), (indexreg), (basereg)); \
  427. *(inst)++ = (((unsigned char)(opc)) << 3); \
  428. x86_64_memindex_emit((inst), (sreg), (basereg), (disp), (indexreg), (shift)); \
  429. } \
  430. break; \
  431. case 2: \
  432. { \
  433. *(inst)++ = (unsigned char)0x66; \
  434. } \
  435. case 4: \
  436. case 8: \
  437. { \
  438. x86_64_rex_emit(inst, size, (sreg), (indexreg), (basereg)); \
  439. *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
  440. x86_64_memindex_emit((inst), (sreg), (basereg), (disp), (indexreg), (shift)); \
  441. } \
  442. } \
  443. } while(0)
  444. #define x86_64_alu_reg_regp_size(inst, opc, dreg, sregp, size) \
  445. do { \
  446. switch(size) \
  447. { \
  448. case 1: \
  449. { \
  450. x86_64_rex_emit(inst, size, (dreg), 0, (sregp)); \
  451. *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
  452. x86_64_regp_emit((inst), (dreg), (sregp)); \
  453. } \
  454. break; \
  455. case 2: \
  456. { \
  457. *(inst)++ = (unsigned char)0x66; \
  458. } \
  459. case 4: \
  460. case 8: \
  461. { \
  462. x86_64_rex_emit(inst, size, (dreg), 0, (sregp)); \
  463. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  464. x86_64_regp_emit((inst), (dreg), (sregp)); \
  465. } \
  466. } \
  467. } while(0)
  468. #define x86_64_alu_reg_mem_size(inst, opc, dreg, mem, size) \
  469. do { \
  470. switch(size) \
  471. { \
  472. case 1: \
  473. { \
  474. x86_64_rex_emit(inst, size, (dreg), 0, 0); \
  475. *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
  476. x86_64_mem_emit((inst), (dreg), (mem)); \
  477. } \
  478. break; \
  479. case 2: \
  480. { \
  481. *(inst)++ = (unsigned char)0x66; \
  482. } \
  483. case 4: \
  484. case 8: \
  485. { \
  486. x86_64_rex_emit(inst, size, (dreg), 0, 0); \
  487. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  488. x86_64_mem_emit((inst), (dreg), (mem)); \
  489. } \
  490. } \
  491. } while(0)
  492. #define x86_64_alu_reg_membase_size(inst, opc, dreg, basereg, disp, size) \
  493. do { \
  494. switch(size) \
  495. { \
  496. case 1: \
  497. { \
  498. x86_64_rex_emit(inst, size, (dreg), 0, (basereg)); \
  499. *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
  500. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  501. } \
  502. break; \
  503. case 2: \
  504. { \
  505. *(inst)++ = (unsigned char)0x66; \
  506. } \
  507. case 4: \
  508. case 8: \
  509. { \
  510. x86_64_rex_emit(inst, size, (dreg), 0, (basereg)); \
  511. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  512. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  513. } \
  514. } \
  515. } while(0)
  516. #define x86_64_alu_reg_memindex_size(inst, opc, dreg, basereg, disp, indexreg, shift, size) \
  517. do { \
  518. switch(size) \
  519. { \
  520. case 1: \
  521. { \
  522. x86_64_rex_emit(inst, size, (dreg), (indexreg), (basereg)); \
  523. *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
  524. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  525. } \
  526. break; \
  527. case 2: \
  528. { \
  529. *(inst)++ = (unsigned char)0x66; \
  530. } \
  531. case 4: \
  532. case 8: \
  533. { \
  534. x86_64_rex_emit(inst, size, (dreg), (indexreg), (basereg)); \
  535. *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
  536. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  537. } \
  538. } \
  539. } while(0)
  540. /*
  541. * The immediate value has to be at most 32 bit wide.
  542. */
  543. #define x86_64_alu_reg_imm_size(inst, opc, dreg, imm, size) \
  544. do { \
  545. if((dreg) == X86_64_RAX) \
  546. { \
  547. switch(size) \
  548. { \
  549. case 1: \
  550. { \
  551. *(inst)++ = (((unsigned char)(opc)) << 3) + 4; \
  552. x86_imm_emit8((inst), (imm)); \
  553. } \
  554. break; \
  555. case 2: \
  556. { \
  557. *(inst)++ = (unsigned char)0x66; \
  558. *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
  559. x86_imm_emit16((inst), (imm)); \
  560. } \
  561. break; \
  562. case 4: \
  563. case 8: \
  564. { \
  565. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  566. *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
  567. x86_imm_emit32((inst), (imm)); \
  568. } \
  569. } \
  570. } \
  571. else if(x86_is_imm8((imm))) \
  572. { \
  573. switch(size) \
  574. { \
  575. case 1: \
  576. { \
  577. x86_64_rex_emit(inst, size, 0, 0, (dreg)); \
  578. *(inst)++ = (unsigned char)0x80; \
  579. } \
  580. break; \
  581. case 2: \
  582. { \
  583. *(inst)++ = (unsigned char)0x66; \
  584. } \
  585. case 4: \
  586. case 8: \
  587. { \
  588. x86_64_rex_emit(inst, size, 0, 0, (dreg)); \
  589. *(inst)++ = (unsigned char)0x83; \
  590. } \
  591. } \
  592. x86_64_reg_emit((inst), (opc), (dreg)); \
  593. x86_imm_emit8((inst), (imm)); \
  594. } \
  595. else \
  596. { \
  597. switch(size) \
  598. { \
  599. case 1: \
  600. { \
  601. x86_64_rex_emit(inst, size, 0, 0, (dreg)); \
  602. *(inst)++ = (unsigned char)0x80; \
  603. x86_64_reg_emit((inst), (opc), (dreg)); \
  604. x86_imm_emit8((inst), (imm)); \
  605. jit_assert(1); \
  606. } \
  607. break; \
  608. case 2: \
  609. { \
  610. *(inst)++ = (unsigned char)0x66; \
  611. x86_64_rex_emit(inst, size, 0, 0, (dreg)); \
  612. *(inst)++ = (unsigned char)0x81; \
  613. x86_64_reg_emit((inst), (opc), (dreg)); \
  614. x86_imm_emit16((inst), (imm)); \
  615. } \
  616. break; \
  617. case 4: \
  618. case 8: \
  619. { \
  620. x86_64_rex_emit(inst, size, 0, 0, (dreg)); \
  621. *(inst)++ = (unsigned char)0x81; \
  622. x86_64_reg_emit((inst), (opc), (dreg)); \
  623. x86_imm_emit32((inst), (imm)); \
  624. } \
  625. } \
  626. } \
  627. } while(0)
  628. #define x86_64_alu_regp_imm_size(inst, opc, reg, imm, size) \
  629. do { \
  630. if(x86_is_imm8((imm))) \
  631. { \
  632. switch(size) \
  633. { \
  634. case 1: \
  635. { \
  636. x86_64_rex_emit(inst, size, 0, 0, (reg)); \
  637. *(inst)++ = (unsigned char)0x80; \
  638. } \
  639. break; \
  640. case 2: \
  641. { \
  642. *(inst)++ = (unsigned char)0x66; \
  643. } \
  644. case 4: \
  645. case 8: \
  646. { \
  647. x86_64_rex_emit(inst, size, 0, 0, (reg)); \
  648. *(inst)++ = (unsigned char)0x83; \
  649. } \
  650. } \
  651. x86_64_regp_emit((inst), (opc), (reg)); \
  652. x86_imm_emit8((inst), (imm)); \
  653. } \
  654. else \
  655. { \
  656. switch(size) \
  657. { \
  658. case 1: \
  659. { \
  660. x86_64_rex_emit(inst, size, 0, 0, (reg)); \
  661. *(inst)++ = (unsigned char)0x80; \
  662. x86_64_regp_emit((inst), (opc), (reg)); \
  663. x86_imm_emit8((inst), (imm)); \
  664. jit_assert(1); \
  665. } \
  666. break; \
  667. case 2: \
  668. { \
  669. *(inst)++ = (unsigned char)0x66; \
  670. x86_64_rex_emit(inst, size, 0, 0, (reg)); \
  671. *(inst)++ = (unsigned char)0x81; \
  672. x86_64_regp_emit((inst), (opc), (reg)); \
  673. x86_imm_emit16((inst), (imm)); \
  674. } \
  675. break; \
  676. case 4: \
  677. case 8: \
  678. { \
  679. x86_64_rex_emit(inst, size, 0, 0, (reg)); \
  680. *(inst)++ = (unsigned char)0x81; \
  681. x86_64_regp_emit((inst), (opc), (reg)); \
  682. x86_imm_emit32((inst), (imm)); \
  683. } \
  684. } \
  685. } \
  686. } while(0)
  687. #define x86_64_alu_mem_imm_size(inst, opc, mem, imm, size) \
  688. do { \
  689. if(x86_is_imm8((imm))) \
  690. { \
  691. switch(size) \
  692. { \
  693. case 1: \
  694. { \
  695. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  696. *(inst)++ = (unsigned char)0x80; \
  697. } \
  698. break; \
  699. case 2: \
  700. { \
  701. *(inst)++ = (unsigned char)0x66; \
  702. } \
  703. case 4: \
  704. case 8: \
  705. { \
  706. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  707. *(inst)++ = (unsigned char)0x83; \
  708. } \
  709. } \
  710. x86_64_mem_emit((inst), (opc), (mem)); \
  711. x86_imm_emit8((inst), (imm)); \
  712. } \
  713. else \
  714. { \
  715. switch(size) \
  716. { \
  717. case 1: \
  718. { \
  719. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  720. *(inst)++ = (unsigned char)0x80; \
  721. x86_64_mem_emit((inst), (opc), (mem)); \
  722. x86_imm_emit8((inst), (imm)); \
  723. jit_assert(1); \
  724. } \
  725. break; \
  726. case 2: \
  727. { \
  728. *(inst)++ = (unsigned char)0x66; \
  729. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  730. *(inst)++ = (unsigned char)0x81; \
  731. x86_64_mem_emit((inst), (opc), (mem)); \
  732. x86_imm_emit16((inst), (imm)); \
  733. } \
  734. break; \
  735. case 4: \
  736. case 8: \
  737. { \
  738. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  739. *(inst)++ = (unsigned char)0x81; \
  740. x86_64_mem_emit((inst), (opc), (mem)); \
  741. x86_imm_emit32((inst), (imm)); \
  742. } \
  743. } \
  744. } \
  745. } while(0)
  746. #define x86_64_alu_membase_imm_size(inst, opc, basereg, disp, imm, size) \
  747. do { \
  748. if(x86_is_imm8((imm))) \
  749. { \
  750. switch(size) \
  751. { \
  752. case 1: \
  753. { \
  754. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  755. *(inst)++ = (unsigned char)0x80; \
  756. } \
  757. break; \
  758. case 2: \
  759. { \
  760. *(inst)++ = (unsigned char)0x66; \
  761. } \
  762. case 4: \
  763. case 8: \
  764. { \
  765. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  766. *(inst)++ = (unsigned char)0x83; \
  767. } \
  768. } \
  769. x86_64_membase_emit((inst), (opc), (basereg), (disp)); \
  770. x86_imm_emit8((inst), (imm)); \
  771. } \
  772. else \
  773. { \
  774. switch(size) \
  775. { \
  776. case 1: \
  777. { \
  778. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  779. *(inst)++ = (unsigned char)0x80; \
  780. x86_64_membase_emit((inst), (opc), (basereg), (disp)); \
  781. x86_imm_emit8((inst), (imm)); \
  782. jit_assert(1); \
  783. } \
  784. break; \
  785. case 2: \
  786. { \
  787. *(inst)++ = (unsigned char)0x66; \
  788. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  789. *(inst)++ = (unsigned char)0x81; \
  790. x86_64_membase_emit((inst), (opc), (basereg), (disp)); \
  791. x86_imm_emit16((inst), (imm)); \
  792. } \
  793. break; \
  794. case 4: \
  795. case 8: \
  796. { \
  797. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  798. *(inst)++ = (unsigned char)0x81; \
  799. x86_64_membase_emit((inst), (opc), (basereg), (disp)); \
  800. x86_imm_emit32((inst), (imm)); \
  801. } \
  802. } \
  803. } \
  804. } while(0)
  805. #define x86_64_alu_memindex_imm_size(inst, opc, basereg, disp, indexreg, shift, imm, size) \
  806. do { \
  807. if(x86_is_imm8((imm))) \
  808. { \
  809. switch(size) \
  810. { \
  811. case 1: \
  812. { \
  813. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  814. *(inst)++ = (unsigned char)0x80; \
  815. } \
  816. break; \
  817. case 2: \
  818. { \
  819. *(inst)++ = (unsigned char)0x66; \
  820. } \
  821. case 4: \
  822. case 8: \
  823. { \
  824. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  825. *(inst)++ = (unsigned char)0x83; \
  826. } \
  827. } \
  828. x86_64_memindex_emit((inst), (opc), (basereg), (disp), (indexreg), (shift)); \
  829. x86_imm_emit8((inst), (imm)); \
  830. } \
  831. else \
  832. { \
  833. switch(size) \
  834. { \
  835. case 1: \
  836. { \
  837. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  838. *(inst)++ = (unsigned char)0x80; \
  839. x86_64_memindex_emit((inst), (opc), (basereg), (disp), (indexreg), (shift)); \
  840. x86_imm_emit8((inst), (imm)); \
  841. jit_assert(1); \
  842. } \
  843. break; \
  844. case 2: \
  845. { \
  846. *(inst)++ = (unsigned char)0x66; \
  847. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  848. *(inst)++ = (unsigned char)0x81; \
  849. x86_64_memindex_emit((inst), (opc), (basereg), (disp), (indexreg), (shift)); \
  850. x86_imm_emit16((inst), (imm)); \
  851. } \
  852. break; \
  853. case 4: \
  854. case 8: \
  855. { \
  856. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  857. *(inst)++ = (unsigned char)0x81; \
  858. x86_64_memindex_emit((inst), (opc), (basereg), (disp), (indexreg), (shift)); \
  859. x86_imm_emit32((inst), (imm)); \
  860. } \
  861. } \
  862. } \
  863. } while(0)
  864. /*
  865. * Instructions with one opcode (plus optional r/m)
  866. */
  867. /*
  868. * Unary opcodes
  869. */
  870. #define x86_64_alu1_reg(inst, opc1, r, reg) \
  871. do { \
  872. x86_64_rex_emit((inst), 0, 0, 0, (reg)); \
  873. *(inst)++ = (unsigned char)(opc1); \
  874. x86_64_reg_emit((inst), (r), (reg)); \
  875. } while(0)
  876. #define x86_64_alu1_regp(inst, opc1, r, regp) \
  877. do { \
  878. x86_64_rex_emit((inst), 0, 0, 0, (regp)); \
  879. *(inst)++ = (unsigned char)(opc1); \
  880. x86_64_regp_emit((inst), (r), (regp)); \
  881. } while(0)
  882. #define x86_64_alu1_mem(inst, opc1, r, mem) \
  883. do { \
  884. *(inst)++ = (unsigned char)(opc1); \
  885. x86_64_mem_emit((inst), (r), (mem)); \
  886. } while(0)
  887. #define x86_64_alu1_membase(inst, opc1, r, basereg, disp) \
  888. do { \
  889. x86_64_rex_emit((inst), 0, 0, 0, (basereg)); \
  890. *(inst)++ = (unsigned char)(opc1); \
  891. x86_64_membase_emit((inst), (r), (basereg), (disp)); \
  892. } while(0)
  893. #define x86_64_alu1_memindex(inst, opc1, r, basereg, disp, indexreg, shift) \
  894. do { \
  895. x86_64_rex_emit((inst), 0, 0, (indexreg), (basereg)); \
  896. *(inst)++ = (unsigned char)(opc1); \
  897. x86_64_memindex_emit((inst), (r), (basereg), (disp), (indexreg), (shift)); \
  898. } while(0)
  899. #define x86_64_alu1_reg_size(inst, opc1, r, reg, size) \
  900. do { \
  901. if((size) == 2) \
  902. { \
  903. *(inst)++ = (unsigned char)0x66; \
  904. } \
  905. x86_64_rex_emit((inst), (size), 0, 0, (reg)); \
  906. x86_64_opcode1_emit((inst), (opc1), (size)); \
  907. x86_64_reg_emit((inst), (r), (reg)); \
  908. } while(0)
  909. #define x86_64_alu1_regp_size(inst, opc1, r, regp, size) \
  910. do { \
  911. if((size) == 2) \
  912. { \
  913. *(inst)++ = (unsigned char)0x66; \
  914. } \
  915. x86_64_rex_emit((inst), (size), 0, 0, (regp)); \
  916. x86_64_opcode1_emit((inst), (opc1), (size)); \
  917. x86_64_regp_emit((inst), (r), (regp)); \
  918. } while(0)
  919. #define x86_64_alu1_mem_size(inst, opc1, r, mem, size) \
  920. do { \
  921. if((size) == 2) \
  922. { \
  923. *(inst)++ = (unsigned char)0x66; \
  924. } \
  925. x86_64_rex_emit((inst), (size), 0, 0, 0); \
  926. x86_64_opcode1_emit((inst), (opc1), (size)); \
  927. x86_64_mem_emit((inst), (r), (mem)); \
  928. } while(0)
  929. #define x86_64_alu1_membase_size(inst, opc1, r, basereg, disp, size) \
  930. do { \
  931. if((size) == 2) \
  932. { \
  933. *(inst)++ = (unsigned char)0x66; \
  934. } \
  935. x86_64_rex_emit((inst), (size), 0, 0, (basereg)); \
  936. x86_64_opcode1_emit((inst), (opc1), (size)); \
  937. x86_64_membase_emit((inst), (r), (basereg), (disp)); \
  938. } while(0)
  939. #define x86_64_alu1_memindex_size(inst, opc1, r, basereg, disp, indexreg, shift, size) \
  940. do { \
  941. if((size) == 2) \
  942. { \
  943. *(inst)++ = (unsigned char)0x66; \
  944. } \
  945. x86_64_rex_emit((inst), (size), 0, (indexreg), (basereg)); \
  946. x86_64_opcode1_emit((inst), (opc1), (size)); \
  947. x86_64_memindex_emit((inst), (r), (basereg), (disp), (indexreg), (shift)); \
  948. } while(0)
  949. #define x86_64_alu1_reg_reg_size(inst, opc1, dreg, sreg, size) \
  950. do { \
  951. if((size) == 2) \
  952. { \
  953. *(inst)++ = (unsigned char)0x66; \
  954. } \
  955. x86_64_rex_emit((inst), (size), (dreg), 0, (sreg)); \
  956. *(inst)++ = (unsigned char)(opc1); \
  957. x86_64_reg_emit((inst), (dreg), (sreg)); \
  958. } while(0)
  959. #define x86_64_alu1_reg_regp_size(inst, opc1, dreg, sregp, size) \
  960. do { \
  961. if((size) == 2) \
  962. { \
  963. *(inst)++ = (unsigned char)0x66; \
  964. } \
  965. x86_64_rex_emit((inst), (size), (dreg), 0, (sregp)); \
  966. *(inst)++ = (unsigned char)(opc1); \
  967. x86_64_regp_emit((inst), (dreg), (sregp)); \
  968. } while(0)
  969. #define x86_64_alu1_reg_mem_size(inst, opc1, dreg, mem, size) \
  970. do { \
  971. if((size) == 2) \
  972. { \
  973. *(inst)++ = (unsigned char)0x66; \
  974. } \
  975. x86_64_rex_emit((inst), (size), (dreg), 0, 0); \
  976. *(inst)++ = (unsigned char)(opc1); \
  977. x86_64_mem_emit((inst), (dreg), (mem)); \
  978. } while(0)
  979. #define x86_64_alu1_reg_membase_size(inst, opc1, dreg, basereg, disp, size) \
  980. do { \
  981. if((size) == 2) \
  982. { \
  983. *(inst)++ = (unsigned char)0x66; \
  984. } \
  985. x86_64_rex_emit((inst), (size), (dreg), 0, (basereg)); \
  986. *(inst)++ = (unsigned char)(opc1); \
  987. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  988. } while(0)
  989. #define x86_64_alu1_reg_memindex_size(inst, opc1, dreg, basereg, disp, indexreg, shift, size) \
  990. do { \
  991. if((size) == 2) \
  992. { \
  993. *(inst)++ = (unsigned char)0x66; \
  994. } \
  995. x86_64_rex_emit((inst), (size), (dreg), (indexreg), (basereg)); \
  996. *(inst)++ = (unsigned char)(opc1); \
  997. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  998. } while(0)
  999. #define x86_64_alu2_reg_reg_size(inst, opc1, opc2, dreg, sreg, size) \
  1000. do { \
  1001. if((size) == 2) \
  1002. { \
  1003. *(inst)++ = (unsigned char)0x66; \
  1004. } \
  1005. x86_64_rex_emit((inst), (size), (dreg), 0, (sreg)); \
  1006. *(inst)++ = (unsigned char)(opc1); \
  1007. *(inst)++ = (unsigned char)(opc2); \
  1008. x86_64_reg_emit((inst), (dreg), (sreg)); \
  1009. } while(0)
  1010. #define x86_64_alu2_reg_regp_size(inst, opc1, opc2, dreg, sregp, size) \
  1011. do { \
  1012. if((size) == 2) \
  1013. { \
  1014. *(inst)++ = (unsigned char)0x66; \
  1015. } \
  1016. x86_64_rex_emit((inst), (size), (dreg), 0, (sregp)); \
  1017. *(inst)++ = (unsigned char)(opc1); \
  1018. *(inst)++ = (unsigned char)(opc2); \
  1019. x86_64_regp_emit((inst), (dreg), (sregp)); \
  1020. } while(0)
  1021. #define x86_64_alu2_reg_mem_size(inst, opc1, opc2, dreg, mem, size) \
  1022. do { \
  1023. if((size) == 2) \
  1024. { \
  1025. *(inst)++ = (unsigned char)0x66; \
  1026. } \
  1027. x86_64_rex_emit((inst), (size), (dreg), 0, 0); \
  1028. *(inst)++ = (unsigned char)(opc1); \
  1029. *(inst)++ = (unsigned char)(opc2); \
  1030. x86_64_mem_emit((inst), (dreg), (mem)); \
  1031. } while(0)
  1032. #define x86_64_alu2_reg_membase_size(inst, opc1, opc2, dreg, basereg, disp, size) \
  1033. do { \
  1034. if((size) == 2) \
  1035. { \
  1036. *(inst)++ = (unsigned char)0x66; \
  1037. } \
  1038. x86_64_rex_emit((inst), (size), (dreg), 0, (basereg)); \
  1039. *(inst)++ = (unsigned char)(opc1); \
  1040. *(inst)++ = (unsigned char)(opc2); \
  1041. x86_64_membase_emit((inst), (dreg), (basereg), (disp)); \
  1042. } while(0)
  1043. #define x86_64_alu2_reg_memindex_size(inst, opc1, opc2, dreg, basereg, disp, indexreg, shift, size) \
  1044. do { \
  1045. if((size) == 2) \
  1046. { \
  1047. *(inst)++ = (unsigned char)0x66; \
  1048. } \
  1049. x86_64_rex_emit((inst), (size), (dreg), (indexreg), (basereg)); \
  1050. *(inst)++ = (unsigned char)(opc1); \
  1051. *(inst)++ = (unsigned char)(opc2); \
  1052. x86_64_memindex_emit((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
  1053. } while(0)
  1054. /*
  1055. * Group1 general instructions
  1056. */
  1057. #define x86_64_alu_reg_reg(inst, opc, dreg, sreg) \
  1058. do { \
  1059. x86_64_alu_reg_reg_size((inst), (opc), (dreg), (sreg), 8); \
  1060. } while(0)
  1061. #define x86_64_alu_reg_imm(inst, opc, dreg, imm) \
  1062. do { \
  1063. x86_64_alu_reg_imm_size((inst), (opc), (dreg), (imm), 8); \
  1064. } while(0)
  1065. /*
  1066. * ADC: Add with carry
  1067. */
  1068. #define x86_64_adc_reg_reg_size(inst, dreg, sreg, size) \
  1069. do { \
  1070. x86_64_alu_reg_reg_size((inst), 2, (dreg), (sreg), (size)); \
  1071. } while(0)
  1072. #define x86_64_adc_regp_reg_size(inst, dregp, sreg, size) \
  1073. do { \
  1074. x86_64_alu_regp_reg_size((inst), 2, (dregp), (sreg), (size)); \
  1075. } while(0)
  1076. #define x86_64_adc_mem_reg_size(inst, mem, sreg, size) \
  1077. do { \
  1078. x86_64_alu_mem_reg_size((inst), 2, (mem), (sreg), (size)); \
  1079. } while(0)
  1080. #define x86_64_adc_membase_reg_size(inst, basereg, disp, sreg, size) \
  1081. do { \
  1082. x86_64_alu_membase_reg_size((inst), 2, (basereg), (disp), (sreg), (size)); \
  1083. } while(0)
  1084. #define x86_64_adc_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1085. do { \
  1086. x86_64_alu_memindex_reg_size((inst), 2, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1087. } while(0)
  1088. #define x86_64_adc_reg_regp_size(inst, dreg, sregp, size) \
  1089. do { \
  1090. x86_64_alu_reg_regp_size((inst), 2, (dreg), (sregp), (size)); \
  1091. } while(0)
  1092. #define x86_64_adc_reg_mem_size(inst, dreg, mem, size) \
  1093. do { \
  1094. x86_64_alu_reg_mem_size((inst), 2, (dreg), (mem), (size)); \
  1095. } while(0)
  1096. #define x86_64_adc_reg_membase_size(inst, dreg, basereg, disp, size) \
  1097. do { \
  1098. x86_64_alu_reg_membase_size((inst), 2, (dreg), (basereg), (disp), (size)); \
  1099. } while(0)
  1100. #define x86_64_adc_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1101. do { \
  1102. x86_64_alu_reg_memindex_size((inst), 2, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1103. } while(0)
  1104. #define x86_64_adc_reg_imm_size(inst, dreg, imm, size) \
  1105. do { \
  1106. x86_64_alu_reg_imm_size((inst), 2, (dreg), (imm), (size)); \
  1107. } while(0)
  1108. #define x86_64_adc_regp_imm_size(inst, reg, imm, size) \
  1109. do { \
  1110. x86_64_alu_regp_imm_size((inst), 2, (reg), (imm), (size)); \
  1111. } while(0)
  1112. #define x86_64_adc_mem_imm_size(inst, mem, imm, size) \
  1113. do { \
  1114. x86_64_alu_mem_imm_size(inst, 2, mem, imm, size); \
  1115. } while(0)
  1116. #define x86_64_adc_membase_imm_size(inst, basereg, disp, imm, size) \
  1117. do { \
  1118. x86_64_alu_membase_imm_size((inst), 2, (basereg), (disp), (imm), (size)); \
  1119. } while(0)
  1120. #define x86_64_adc_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1121. do { \
  1122. x86_64_alu_memindex_imm_size((inst), 2, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1123. } while(0)
  1124. /*
  1125. * ADD
  1126. */
  1127. #define x86_64_add_reg_reg_size(inst, dreg, sreg, size) \
  1128. do { \
  1129. x86_64_alu_reg_reg_size((inst), 0, (dreg), (sreg), (size)); \
  1130. } while(0)
  1131. #define x86_64_add_regp_reg_size(inst, dregp, sreg, size) \
  1132. do { \
  1133. x86_64_alu_regp_reg_size((inst), 0, (dregp), (sreg), (size)); \
  1134. } while(0)
  1135. #define x86_64_add_mem_reg_size(inst, mem, sreg, size) \
  1136. do { \
  1137. x86_64_alu_mem_reg_size((inst), 0, (mem), (sreg), (size)); \
  1138. } while(0)
  1139. #define x86_64_add_membase_reg_size(inst, basereg, disp, sreg, size) \
  1140. do { \
  1141. x86_64_alu_membase_reg_size((inst), 0, (basereg), (disp), (sreg), (size)); \
  1142. } while(0)
  1143. #define x86_64_add_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1144. do { \
  1145. x86_64_alu_memindex_reg_size((inst), 0, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1146. } while(0)
  1147. #define x86_64_add_reg_regp_size(inst, dreg, sregp, size) \
  1148. do { \
  1149. x86_64_alu_reg_regp_size((inst), 0, (dreg), (sregp), (size)); \
  1150. } while(0)
  1151. #define x86_64_add_reg_mem_size(inst, dreg, mem, size) \
  1152. do { \
  1153. x86_64_alu_reg_mem_size((inst), 0, (dreg), (mem), (size)); \
  1154. } while(0)
  1155. #define x86_64_add_reg_membase_size(inst, dreg, basereg, disp, size) \
  1156. do { \
  1157. x86_64_alu_reg_membase_size((inst), 0, (dreg), (basereg), (disp), (size)); \
  1158. } while(0)
  1159. #define x86_64_add_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1160. do { \
  1161. x86_64_alu_reg_memindex_size((inst), 0, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1162. } while(0)
  1163. #define x86_64_add_reg_imm_size(inst, dreg, imm, size) \
  1164. do { \
  1165. x86_64_alu_reg_imm_size((inst), 0, (dreg), (imm), (size)); \
  1166. } while(0)
  1167. #define x86_64_add_regp_imm_size(inst, reg, imm, size) \
  1168. do { \
  1169. x86_64_alu_regp_imm_size((inst), 0, (reg), (imm), (size)); \
  1170. } while(0)
  1171. #define x86_64_add_mem_imm_size(inst, mem, imm, size) \
  1172. do { \
  1173. x86_64_alu_mem_imm_size(inst, 0, mem, imm, size); \
  1174. } while(0)
  1175. #define x86_64_add_membase_imm_size(inst, basereg, disp, imm, size) \
  1176. do { \
  1177. x86_64_alu_membase_imm_size((inst), 0, (basereg), (disp), (imm), (size)); \
  1178. } while(0)
  1179. #define x86_64_add_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1180. do { \
  1181. x86_64_alu_memindex_imm_size((inst), 0, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1182. } while(0)
  1183. /*
  1184. * AND
  1185. */
  1186. #define x86_64_and_reg_reg_size(inst, dreg, sreg, size) \
  1187. do { \
  1188. x86_64_alu_reg_reg_size((inst), 4, (dreg), (sreg), (size)); \
  1189. } while(0)
  1190. #define x86_64_and_regp_reg_size(inst, dregp, sreg, size) \
  1191. do { \
  1192. x86_64_alu_regp_reg_size((inst), 4, (dregp), (sreg), (size)); \
  1193. } while(0)
  1194. #define x86_64_and_mem_reg_size(inst, mem, sreg, size) \
  1195. do { \
  1196. x86_64_alu_mem_reg_size((inst), 4, (mem), (sreg), (size)); \
  1197. } while(0)
  1198. #define x86_64_and_membase_reg_size(inst, basereg, disp, sreg, size) \
  1199. do { \
  1200. x86_64_alu_membase_reg_size((inst), 4, (basereg), (disp), (sreg), (size)); \
  1201. } while(0)
  1202. #define x86_64_and_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1203. do { \
  1204. x86_64_alu_memindex_reg_size((inst), 4, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1205. } while(0)
  1206. #define x86_64_and_reg_regp_size(inst, dreg, sregp, size) \
  1207. do { \
  1208. x86_64_alu_reg_regp_size((inst), 4, (dreg), (sregp), (size)); \
  1209. } while(0)
  1210. #define x86_64_and_reg_mem_size(inst, dreg, mem, size) \
  1211. do { \
  1212. x86_64_alu_reg_mem_size((inst), 4, (dreg), (mem), (size)); \
  1213. } while(0)
  1214. #define x86_64_and_reg_membase_size(inst, dreg, basereg, disp, size) \
  1215. do { \
  1216. x86_64_alu_reg_membase_size((inst), 4, (dreg), (basereg), (disp), (size)); \
  1217. } while(0)
  1218. #define x86_64_and_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1219. do { \
  1220. x86_64_alu_reg_memindex_size((inst), 4, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1221. } while(0)
  1222. #define x86_64_and_reg_imm_size(inst, dreg, imm, size) \
  1223. do { \
  1224. x86_64_alu_reg_imm_size((inst), 4, (dreg), (imm), (size)); \
  1225. } while(0)
  1226. #define x86_64_and_regp_imm_size(inst, reg, imm, size) \
  1227. do { \
  1228. x86_64_alu_regp_imm_size((inst), 4, (reg), (imm), (size)); \
  1229. } while(0)
  1230. #define x86_64_and_mem_imm_size(inst, mem, imm, size) \
  1231. do { \
  1232. x86_64_alu_mem_imm_size(inst, 4, mem, imm, size); \
  1233. } while(0)
  1234. #define x86_64_and_membase_imm_size(inst, basereg, disp, imm, size) \
  1235. do { \
  1236. x86_64_alu_membase_imm_size((inst), 4, (basereg), (disp), (imm), (size)); \
  1237. } while(0)
  1238. #define x86_64_and_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1239. do { \
  1240. x86_64_alu_memindex_imm_size((inst), 4, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1241. } while(0)
  1242. /*
  1243. * CMP: compare
  1244. */
  1245. #define x86_64_cmp_reg_reg_size(inst, dreg, sreg, size) \
  1246. do { \
  1247. x86_64_alu_reg_reg_size((inst), 7, (dreg), (sreg), (size)); \
  1248. } while(0)
  1249. #define x86_64_cmp_regp_reg_size(inst, dregp, sreg, size) \
  1250. do { \
  1251. x86_64_alu_regp_reg_size((inst), 7, (dregp), (sreg), (size)); \
  1252. } while(0)
  1253. #define x86_64_cmp_mem_reg_size(inst, mem, sreg, size) \
  1254. do { \
  1255. x86_64_alu_mem_reg_size((inst), 7, (mem), (sreg), (size)); \
  1256. } while(0)
  1257. #define x86_64_cmp_membase_reg_size(inst, basereg, disp, sreg, size) \
  1258. do { \
  1259. x86_64_alu_membase_reg_size((inst), 7, (basereg), (disp), (sreg), (size)); \
  1260. } while(0)
  1261. #define x86_64_cmp_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1262. do { \
  1263. x86_64_alu_memindex_reg_size((inst), 7, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1264. } while(0)
  1265. #define x86_64_cmp_reg_regp_size(inst, dreg, sregp, size) \
  1266. do { \
  1267. x86_64_alu_reg_regp_size((inst), 7, (dreg), (sregp), (size)); \
  1268. } while(0)
  1269. #define x86_64_cmp_reg_mem_size(inst, dreg, mem, size) \
  1270. do { \
  1271. x86_64_alu_reg_mem_size((inst), 7, (dreg), (mem), (size)); \
  1272. } while(0)
  1273. #define x86_64_cmp_reg_membase_size(inst, dreg, basereg, disp, size) \
  1274. do { \
  1275. x86_64_alu_reg_membase_size((inst), 7, (dreg), (basereg), (disp), (size)); \
  1276. } while(0)
  1277. #define x86_64_cmp_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1278. do { \
  1279. x86_64_alu_reg_memindex_size((inst), 7, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1280. } while(0)
  1281. #define x86_64_cmp_reg_imm_size(inst, dreg, imm, size) \
  1282. do { \
  1283. x86_64_alu_reg_imm_size((inst), 7, (dreg), (imm), (size)); \
  1284. } while(0)
  1285. #define x86_64_cmp_regp_imm_size(inst, reg, imm, size) \
  1286. do { \
  1287. x86_64_alu_regp_imm_size((inst), 7, (reg), (imm), (size)); \
  1288. } while(0)
  1289. #define x86_64_cmp_mem_imm_size(inst, mem, imm, size) \
  1290. do { \
  1291. x86_64_alu_mem_imm_size(inst, 7, mem, imm, size); \
  1292. } while(0)
  1293. #define x86_64_cmp_membase_imm_size(inst, basereg, disp, imm, size) \
  1294. do { \
  1295. x86_64_alu_membase_imm_size((inst), 7, (basereg), (disp), (imm), (size)); \
  1296. } while(0)
  1297. #define x86_64_cmp_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1298. do { \
  1299. x86_64_alu_memindex_imm_size((inst), 7, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1300. } while(0)
  1301. /*
  1302. * OR
  1303. */
  1304. #define x86_64_or_reg_reg_size(inst, dreg, sreg, size) \
  1305. do { \
  1306. x86_64_alu_reg_reg_size((inst), 1, (dreg), (sreg), (size)); \
  1307. } while(0)
  1308. #define x86_64_or_regp_reg_size(inst, dregp, sreg, size) \
  1309. do { \
  1310. x86_64_alu_regp_reg_size((inst), 1, (dregp), (sreg), (size)); \
  1311. } while(0)
  1312. #define x86_64_or_mem_reg_size(inst, mem, sreg, size) \
  1313. do { \
  1314. x86_64_alu_mem_reg_size((inst), 1, (mem), (sreg), (size)); \
  1315. } while(0)
  1316. #define x86_64_or_membase_reg_size(inst, basereg, disp, sreg, size) \
  1317. do { \
  1318. x86_64_alu_membase_reg_size((inst), 1, (basereg), (disp), (sreg), (size)); \
  1319. } while(0)
  1320. #define x86_64_or_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1321. do { \
  1322. x86_64_alu_memindex_reg_size((inst), 1, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1323. } while(0)
  1324. #define x86_64_or_reg_regp_size(inst, dreg, sregp, size) \
  1325. do { \
  1326. x86_64_alu_reg_regp_size((inst), 1, (dreg), (sregp), (size)); \
  1327. } while(0)
  1328. #define x86_64_or_reg_mem_size(inst, dreg, mem, size) \
  1329. do { \
  1330. x86_64_alu_reg_mem_size((inst), 1, (dreg), (mem), (size)); \
  1331. } while(0)
  1332. #define x86_64_or_reg_membase_size(inst, dreg, basereg, disp, size) \
  1333. do { \
  1334. x86_64_alu_reg_membase_size((inst), 1, (dreg), (basereg), (disp), (size)); \
  1335. } while(0)
  1336. #define x86_64_or_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1337. do { \
  1338. x86_64_alu_reg_memindex_size((inst), 1, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1339. } while(0)
  1340. #define x86_64_or_reg_imm_size(inst, dreg, imm, size) \
  1341. do { \
  1342. x86_64_alu_reg_imm_size((inst), 1, (dreg), (imm), (size)); \
  1343. } while(0)
  1344. #define x86_64_or_regp_imm_size(inst, reg, imm, size) \
  1345. do { \
  1346. x86_64_alu_regp_imm_size((inst), 1, (reg), (imm), (size)); \
  1347. } while(0)
  1348. #define x86_64_or_mem_imm_size(inst, mem, imm, size) \
  1349. do { \
  1350. x86_64_alu_mem_imm_size(inst, 1, mem, imm, size); \
  1351. } while(0)
  1352. #define x86_64_or_membase_imm_size(inst, basereg, disp, imm, size) \
  1353. do { \
  1354. x86_64_alu_membase_imm_size((inst), 1, (basereg), (disp), (imm), (size)); \
  1355. } while(0)
  1356. #define x86_64_or_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1357. do { \
  1358. x86_64_alu_memindex_imm_size((inst), 1, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1359. } while(0)
  1360. /*
  1361. * SBB: Subtract with borrow from al
  1362. */
  1363. #define x86_64_sbb_reg_reg_size(inst, dreg, sreg, size) \
  1364. do { \
  1365. x86_64_alu_reg_reg_size((inst), 3, (dreg), (sreg), (size)); \
  1366. } while(0)
  1367. #define x86_64_sbb_regp_reg_size(inst, dregp, sreg, size) \
  1368. do { \
  1369. x86_64_alu_regp_reg_size((inst), 3, (dregp), (sreg), (size)); \
  1370. } while(0)
  1371. #define x86_64_sbb_mem_reg_size(inst, mem, sreg, size) \
  1372. do { \
  1373. x86_64_alu_mem_reg_size((inst), 3, (mem), (sreg), (size)); \
  1374. } while(0)
  1375. #define x86_64_sbb_membase_reg_size(inst, basereg, disp, sreg, size) \
  1376. do { \
  1377. x86_64_alu_membase_reg_size((inst), 3, (basereg), (disp), (sreg), (size)); \
  1378. } while(0)
  1379. #define x86_64_sbb_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1380. do { \
  1381. x86_64_alu_memindex_reg_size((inst), 3, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1382. } while(0)
  1383. #define x86_64_sbb_reg_regp_size(inst, dreg, sregp, size) \
  1384. do { \
  1385. x86_64_alu_reg_regp_size((inst), 3, (dreg), (sregp), (size)); \
  1386. } while(0)
  1387. #define x86_64_sbb_reg_mem_size(inst, dreg, mem, size) \
  1388. do { \
  1389. x86_64_alu_reg_mem_size((inst), 3, (dreg), (mem), (size)); \
  1390. } while(0)
  1391. #define x86_64_sbb_reg_membase_size(inst, dreg, basereg, disp, size) \
  1392. do { \
  1393. x86_64_alu_reg_membase_size((inst), 3, (dreg), (basereg), (disp), (size)); \
  1394. } while(0)
  1395. #define x86_64_sbb_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1396. do { \
  1397. x86_64_alu_reg_memindex_size((inst), 3, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1398. } while(0)
  1399. #define x86_64_sbb_reg_imm_size(inst, dreg, imm, size) \
  1400. do { \
  1401. x86_64_alu_reg_imm_size((inst), 3, (dreg), (imm), (size)); \
  1402. } while(0)
  1403. #define x86_64_sbb_regp_imm_size(inst, reg, imm, size) \
  1404. do { \
  1405. x86_64_alu_regp_imm_size((inst), 3, (reg), (imm), (size)); \
  1406. } while(0)
  1407. #define x86_64_sbb_mem_imm_size(inst, mem, imm, size) \
  1408. do { \
  1409. x86_64_alu_mem_imm_size(inst, 3, mem, imm, size); \
  1410. } while(0)
  1411. #define x86_64_sbb_membase_imm_size(inst, basereg, disp, imm, size) \
  1412. do { \
  1413. x86_64_alu_membase_imm_size((inst), 3, (basereg), (disp), (imm), (size)); \
  1414. } while(0)
  1415. #define x86_64_sbb_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1416. do { \
  1417. x86_64_alu_memindex_imm_size((inst), 3, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1418. } while(0)
  1419. /*
  1420. * SUB: Subtract
  1421. */
  1422. #define x86_64_sub_reg_reg_size(inst, dreg, sreg, size) \
  1423. do { \
  1424. x86_64_alu_reg_reg_size((inst), 5, (dreg), (sreg), (size)); \
  1425. } while(0)
  1426. #define x86_64_sub_regp_reg_size(inst, dregp, sreg, size) \
  1427. do { \
  1428. x86_64_alu_regp_reg_size((inst), 5, (dregp), (sreg), (size)); \
  1429. } while(0)
  1430. #define x86_64_sub_mem_reg_size(inst, mem, sreg, size) \
  1431. do { \
  1432. x86_64_alu_mem_reg_size((inst), 5, (mem), (sreg), (size)); \
  1433. } while(0)
  1434. #define x86_64_sub_membase_reg_size(inst, basereg, disp, sreg, size) \
  1435. do { \
  1436. x86_64_alu_membase_reg_size((inst), 5, (basereg), (disp), (sreg), (size)); \
  1437. } while(0)
  1438. #define x86_64_sub_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1439. do { \
  1440. x86_64_alu_memindex_reg_size((inst), 5, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1441. } while(0)
  1442. #define x86_64_sub_reg_regp_size(inst, dreg, sregp, size) \
  1443. do { \
  1444. x86_64_alu_reg_regp_size((inst), 5, (dreg), (sregp), (size)); \
  1445. } while(0)
  1446. #define x86_64_sub_reg_mem_size(inst, dreg, mem, size) \
  1447. do { \
  1448. x86_64_alu_reg_mem_size((inst), 5, (dreg), (mem), (size)); \
  1449. } while(0)
  1450. #define x86_64_sub_reg_membase_size(inst, dreg, basereg, disp, size) \
  1451. do { \
  1452. x86_64_alu_reg_membase_size((inst), 5, (dreg), (basereg), (disp), (size)); \
  1453. } while(0)
  1454. #define x86_64_sub_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1455. do { \
  1456. x86_64_alu_reg_memindex_size((inst), 5, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1457. } while(0)
  1458. #define x86_64_sub_reg_imm_size(inst, dreg, imm, size) \
  1459. do { \
  1460. x86_64_alu_reg_imm_size((inst), 5, (dreg), (imm), (size)); \
  1461. } while(0)
  1462. #define x86_64_sub_regp_imm_size(inst, reg, imm, size) \
  1463. do { \
  1464. x86_64_alu_regp_imm_size((inst), 5, (reg), (imm), (size)); \
  1465. } while(0)
  1466. #define x86_64_sub_mem_imm_size(inst, mem, imm, size) \
  1467. do { \
  1468. x86_64_alu_mem_imm_size(inst, 5, mem, imm, size); \
  1469. } while(0)
  1470. #define x86_64_sub_membase_imm_size(inst, basereg, disp, imm, size) \
  1471. do { \
  1472. x86_64_alu_membase_imm_size((inst), 5, (basereg), (disp), (imm), (size)); \
  1473. } while(0)
  1474. #define x86_64_sub_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1475. do { \
  1476. x86_64_alu_memindex_imm_size((inst), 5, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1477. } while(0)
  1478. /*
  1479. * XOR
  1480. */
  1481. #define x86_64_xor_reg_reg_size(inst, dreg, sreg, size) \
  1482. do { \
  1483. x86_64_alu_reg_reg_size((inst), 6, (dreg), (sreg), (size)); \
  1484. } while(0)
  1485. #define x86_64_xor_regp_reg_size(inst, dregp, sreg, size) \
  1486. do { \
  1487. x86_64_alu_regp_reg_size((inst), 6, (dregp), (sreg), (size)); \
  1488. } while(0)
  1489. #define x86_64_xor_mem_reg_size(inst, mem, sreg, size) \
  1490. do { \
  1491. x86_64_alu_mem_reg_size((inst), 6, (mem), (sreg), (size)); \
  1492. } while(0)
  1493. #define x86_64_xor_membase_reg_size(inst, basereg, disp, sreg, size) \
  1494. do { \
  1495. x86_64_alu_membase_reg_size((inst), 6, (basereg), (disp), (sreg), (size)); \
  1496. } while(0)
  1497. #define x86_64_xor_memindex_reg_size(inst, basereg, disp, indexreg, shift, sreg, size) \
  1498. do { \
  1499. x86_64_alu_memindex_reg_size((inst), 6, (basereg), (disp), (indexreg), (shift), (sreg), (size)); \
  1500. } while(0)
  1501. #define x86_64_xor_reg_regp_size(inst, dreg, sregp, size) \
  1502. do { \
  1503. x86_64_alu_reg_regp_size((inst), 6, (dreg), (sregp), (size)); \
  1504. } while(0)
  1505. #define x86_64_xor_reg_mem_size(inst, dreg, mem, size) \
  1506. do { \
  1507. x86_64_alu_reg_mem_size((inst), 6, (dreg), (mem), (size)); \
  1508. } while(0)
  1509. #define x86_64_xor_reg_membase_size(inst, dreg, basereg, disp, size) \
  1510. do { \
  1511. x86_64_alu_reg_membase_size((inst), 6, (dreg), (basereg), (disp), (size)); \
  1512. } while(0)
  1513. #define x86_64_xor_reg_memindex_size(inst, dreg, basereg, disp, indexreg, shift, size) \
  1514. do { \
  1515. x86_64_alu_reg_memindex_size((inst), 6, (dreg), (basereg), (disp), (indexreg), (shift), (size)); \
  1516. } while(0)
  1517. #define x86_64_xor_reg_imm_size(inst, dreg, imm, size) \
  1518. do { \
  1519. x86_64_alu_reg_imm_size((inst), 6, (dreg), (imm), (size)); \
  1520. } while(0)
  1521. #define x86_64_xor_regp_imm_size(inst, reg, imm, size) \
  1522. do { \
  1523. x86_64_alu_regp_imm_size((inst), 6, (reg), (imm), (size)); \
  1524. } while(0)
  1525. #define x86_64_xor_mem_imm_size(inst, mem, imm, size) \
  1526. do { \
  1527. x86_64_alu_mem_imm_size(inst, 6, mem, imm, size); \
  1528. } while(0)
  1529. #define x86_64_xor_membase_imm_size(inst, basereg, disp, imm, size) \
  1530. do { \
  1531. x86_64_alu_membase_imm_size((inst), 6, (basereg), (disp), (imm), (size)); \
  1532. } while(0)
  1533. #define x86_64_xor_memindex_imm_size(inst, basereg, disp, indexreg, shift, imm, size) \
  1534. do { \
  1535. x86_64_alu_memindex_imm_size((inst), 6, (basereg), (disp), (indexreg), (shift), (imm), (size)); \
  1536. } while(0)
  1537. /*
  1538. * dec
  1539. */
  1540. #define x86_64_dec_reg_size(inst, reg, size) \
  1541. do { \
  1542. x86_64_alu1_reg_size((inst), 0xfe, 1, (reg), (size)); \
  1543. } while(0)
  1544. #define x86_64_dec_regp_size(inst, regp, size) \
  1545. do { \
  1546. x86_64_alu1_regp_size((inst), 0xfe, 1, (regp), (size)); \
  1547. } while(0)
  1548. #define x86_64_dec_mem_size(inst, mem, size) \
  1549. do { \
  1550. x86_64_alu1_mem_size((inst), 0xfe, 1, (mem), (size)); \
  1551. } while(0)
  1552. #define x86_64_dec_membase_size(inst, basereg, disp, size) \
  1553. do { \
  1554. x86_64_alu1_membase_size((inst), 0xfe, 1, (basereg), (disp), (size)); \
  1555. } while(0)
  1556. #define x86_64_dec_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1557. do { \
  1558. x86_64_alu1_memindex_size((inst), 0xfe, 1, (basereg), (disp), (indexreg), (shift), (size)); \
  1559. } while(0)
  1560. /*
  1561. * div: unsigned division RDX:RAX / operand
  1562. */
  1563. #define x86_64_div_reg_size(inst, reg, size) \
  1564. do { \
  1565. x86_64_alu1_reg_size((inst), 0xf6, 6, (reg), (size)); \
  1566. } while(0)
  1567. #define x86_64_div_regp_size(inst, regp, size) \
  1568. do { \
  1569. x86_64_alu1_regp_size((inst), 0xf6, 6, (regp), (size)); \
  1570. } while(0)
  1571. #define x86_64_div_mem_size(inst, mem, size) \
  1572. do { \
  1573. x86_64_alu1_mem_size((inst), 0xf6, 6, (mem), (size)); \
  1574. } while(0)
  1575. #define x86_64_div_membase_size(inst, basereg, disp, size) \
  1576. do { \
  1577. x86_64_alu1_membase_size((inst), 0xf6, 6, (basereg), (disp), (size)); \
  1578. } while(0)
  1579. #define x86_64_div_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1580. do { \
  1581. x86_64_alu1_memindex_size((inst), 0xf6, 6, (basereg), (disp), (indexreg), (shift), (size)); \
  1582. } while(0)
  1583. /*
  1584. * idiv: signed division RDX:RAX / operand
  1585. */
  1586. #define x86_64_idiv_reg_size(inst, reg, size) \
  1587. do { \
  1588. x86_64_alu1_reg_size((inst), 0xf6, 7, (reg), (size)); \
  1589. } while(0)
  1590. #define x86_64_idiv_regp_size(inst, regp, size) \
  1591. do { \
  1592. x86_64_alu1_regp_size((inst), 0xf6, 7, (regp), (size)); \
  1593. } while(0)
  1594. #define x86_64_idiv_mem_size(inst, mem, size) \
  1595. do { \
  1596. x86_64_alu1_mem_size((inst), 0xf6, 7, (mem), (size)); \
  1597. } while(0)
  1598. #define x86_64_idiv_membase_size(inst, basereg, disp, size) \
  1599. do { \
  1600. x86_64_alu1_membase_size((inst), 0xf6, 7, (basereg), (disp), (size)); \
  1601. } while(0)
  1602. #define x86_64_idiv_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1603. do { \
  1604. x86_64_alu1_memindex_size((inst), 0xf6, 7, (basereg), (disp), (indexreg), (shift), (size)); \
  1605. } while(0)
  1606. /*
  1607. * inc
  1608. */
  1609. #define x86_64_inc_reg_size(inst, reg, size) \
  1610. do { \
  1611. x86_64_alu1_reg_size((inst), 0xfe, 0, (reg), (size)); \
  1612. } while(0)
  1613. #define x86_64_inc_regp_size(inst, regp, size) \
  1614. do { \
  1615. x86_64_alu1_regp_size((inst), 0xfe, 0, (regp), (size)); \
  1616. } while(0)
  1617. #define x86_64_inc_mem_size(inst, mem, size) \
  1618. do { \
  1619. x86_64_alu1_mem_size((inst), 0xfe, 0, (mem), (size)); \
  1620. } while(0)
  1621. #define x86_64_inc_membase_size(inst, basereg, disp, size) \
  1622. do { \
  1623. x86_64_alu1_membase_size((inst), 0xfe, 0, (basereg), (disp), (size)); \
  1624. } while(0)
  1625. #define x86_64_inc_memindex_size(inst, basereg, disp, indexreg, shift, size) \
  1626. do { \
  1627. x86_64_alu1_memindex_size((inst), 0xfe, 0, (basereg), (disp), (indexreg), (shift), (size)); \
  1628. } while(0)
  1629. /*
  1630. * mul: multiply RDX:RAX = RAX * operand
  1631. * is_signed == 0 -> unsigned multiplication
  1632. * signed multiplication otherwise.
  1633. */
  1634. #define x86_64_mul_reg_issigned_size(inst, reg, is_signed, size) \
  1635. do { \
  1636. x86_64_alu1_reg_size((inst), 0xf6, ((is_signed) ? 5 : 4), (reg), (size)); \
  1637. } while(0)
  1638. #define x86_64_mul_regp_issigned_size(inst, regp, is_signed, size) \
  1639. do { \
  1640. x86_64_alu1_regp_size((inst), 0xf6, ((is_signed) ? 5 : 4), (regp), (size)); \
  1641. } while(0)
  1642. #define x86_64_mul_mem_issigned_size(inst, mem, is_signed, size) \
  1643. do { \
  1644. x86_64_alu1_mem_size((inst), 0xf6, ((is_signed) ? 5 : 4), (mem), (size)); \
  1645. } while(0)
  1646. #define x86_64_mul_membase_issigned_size(inst, basereg, disp, is_signed, size) \
  1647. do { \
  1648. x86_64_alu1_membase_size((inst), 0xf6, ((is_signed) ? 5 : 4), (basereg), (disp), (size)); \
  1649. } while(0)
  1650. #define x86_64_mul_memindex_issigned_size(inst, basereg, disp, indexreg, shift, is_signed, size) \
  1651. do { \
  1652. x86_64_alu1_memindex_size((inst), 0xf6, ((is_sig

Large files files are truncated, but you can click here to view the full file