/cddl/contrib/opensolaris/lib/libdtrace/common/dt_cg.c

https://bitbucket.org/freebsd/freebsd-head/ · C · 2006 lines · 1298 code · 382 blank · 326 comment · 271 complexity · 99affa0c76e011224a26a21dfc2c16c2 MD5 · raw file

  1. /*
  2. * CDDL HEADER START
  3. *
  4. * The contents of this file are subject to the terms of the
  5. * Common Development and Distribution License, Version 1.0 only
  6. * (the "License"). You may not use this file except in compliance
  7. * with the License.
  8. *
  9. * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
  10. * or http://www.opensolaris.org/os/licensing.
  11. * See the License for the specific language governing permissions
  12. * and limitations under the License.
  13. *
  14. * When distributing Covered Code, include this CDDL HEADER in each
  15. * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
  16. * If applicable, add the following below this CDDL HEADER, with the
  17. * fields enclosed by brackets "[]" replaced with your own identifying
  18. * information: Portions Copyright [yyyy] [name of copyright owner]
  19. *
  20. * CDDL HEADER END
  21. */
  22. /*
  23. * Copyright 2005 Sun Microsystems, Inc. All rights reserved.
  24. * Use is subject to license terms.
  25. */
  26. #pragma ident "%Z%%M% %I% %E% SMI"
  27. #include <sys/types.h>
  28. #include <sys/sysmacros.h>
  29. #include <sys/isa_defs.h>
  30. #include <strings.h>
  31. #include <stdlib.h>
  32. #include <setjmp.h>
  33. #include <assert.h>
  34. #include <errno.h>
  35. #include <dt_impl.h>
  36. #include <dt_grammar.h>
  37. #include <dt_parser.h>
  38. #include <dt_provider.h>
  39. static void dt_cg_node(dt_node_t *, dt_irlist_t *, dt_regset_t *);
  40. static dt_irnode_t *
  41. dt_cg_node_alloc(uint_t label, dif_instr_t instr)
  42. {
  43. dt_irnode_t *dip = malloc(sizeof (dt_irnode_t));
  44. if (dip == NULL)
  45. longjmp(yypcb->pcb_jmpbuf, EDT_NOMEM);
  46. dip->di_label = label;
  47. dip->di_instr = instr;
  48. dip->di_extern = NULL;
  49. dip->di_next = NULL;
  50. return (dip);
  51. }
  52. /*
  53. * Code generator wrapper function for ctf_member_info. If we are given a
  54. * reference to a forward declaration tag, search the entire type space for
  55. * the actual definition and then call ctf_member_info on the result.
  56. */
  57. static ctf_file_t *
  58. dt_cg_membinfo(ctf_file_t *fp, ctf_id_t type, const char *s, ctf_membinfo_t *mp)
  59. {
  60. while (ctf_type_kind(fp, type) == CTF_K_FORWARD) {
  61. char n[DT_TYPE_NAMELEN];
  62. dtrace_typeinfo_t dtt;
  63. if (ctf_type_name(fp, type, n, sizeof (n)) == NULL ||
  64. dt_type_lookup(n, &dtt) == -1 || (
  65. dtt.dtt_ctfp == fp && dtt.dtt_type == type))
  66. break; /* unable to improve our position */
  67. fp = dtt.dtt_ctfp;
  68. type = ctf_type_resolve(fp, dtt.dtt_type);
  69. }
  70. if (ctf_member_info(fp, type, s, mp) == CTF_ERR)
  71. return (NULL); /* ctf_errno is set for us */
  72. return (fp);
  73. }
  74. static void
  75. dt_cg_xsetx(dt_irlist_t *dlp, dt_ident_t *idp, uint_t lbl, int reg, uint64_t x)
  76. {
  77. int flag = idp != NULL ? DT_INT_PRIVATE : DT_INT_SHARED;
  78. int intoff = dt_inttab_insert(yypcb->pcb_inttab, x, flag);
  79. dif_instr_t instr = DIF_INSTR_SETX((uint_t)intoff, reg);
  80. if (intoff == -1)
  81. longjmp(yypcb->pcb_jmpbuf, EDT_NOMEM);
  82. if (intoff > DIF_INTOFF_MAX)
  83. longjmp(yypcb->pcb_jmpbuf, EDT_INT2BIG);
  84. dt_irlist_append(dlp, dt_cg_node_alloc(lbl, instr));
  85. if (idp != NULL)
  86. dlp->dl_last->di_extern = idp;
  87. }
  88. static void
  89. dt_cg_setx(dt_irlist_t *dlp, int reg, uint64_t x)
  90. {
  91. dt_cg_xsetx(dlp, NULL, DT_LBL_NONE, reg, x);
  92. }
  93. /*
  94. * When loading bit-fields, we want to convert a byte count in the range
  95. * 1-8 to the closest power of 2 (e.g. 3->4, 5->8, etc). The clp2() function
  96. * is a clever implementation from "Hacker's Delight" by Henry Warren, Jr.
  97. */
  98. static size_t
  99. clp2(size_t x)
  100. {
  101. x--;
  102. x |= (x >> 1);
  103. x |= (x >> 2);
  104. x |= (x >> 4);
  105. x |= (x >> 8);
  106. x |= (x >> 16);
  107. return (x + 1);
  108. }
  109. /*
  110. * Lookup the correct load opcode to use for the specified node and CTF type.
  111. * We determine the size and convert it to a 3-bit index. Our lookup table
  112. * is constructed to use a 5-bit index, consisting of the 3-bit size 0-7, a
  113. * bit for the sign, and a bit for userland address. For example, a 4-byte
  114. * signed load from userland would be at the following table index:
  115. * user=1 sign=1 size=4 => binary index 11011 = decimal index 27
  116. */
  117. static uint_t
  118. dt_cg_load(dt_node_t *dnp, ctf_file_t *ctfp, ctf_id_t type)
  119. {
  120. static const uint_t ops[] = {
  121. DIF_OP_LDUB, DIF_OP_LDUH, 0, DIF_OP_LDUW,
  122. 0, 0, 0, DIF_OP_LDX,
  123. DIF_OP_LDSB, DIF_OP_LDSH, 0, DIF_OP_LDSW,
  124. 0, 0, 0, DIF_OP_LDX,
  125. DIF_OP_ULDUB, DIF_OP_ULDUH, 0, DIF_OP_ULDUW,
  126. 0, 0, 0, DIF_OP_ULDX,
  127. DIF_OP_ULDSB, DIF_OP_ULDSH, 0, DIF_OP_ULDSW,
  128. 0, 0, 0, DIF_OP_ULDX,
  129. };
  130. ctf_encoding_t e;
  131. ssize_t size;
  132. /*
  133. * If we're loading a bit-field, the size of our load is found by
  134. * rounding cte_bits up to a byte boundary and then finding the
  135. * nearest power of two to this value (see clp2(), above).
  136. */
  137. if ((dnp->dn_flags & DT_NF_BITFIELD) &&
  138. ctf_type_encoding(ctfp, type, &e) != CTF_ERR)
  139. size = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY);
  140. else
  141. size = ctf_type_size(ctfp, type);
  142. if (size < 1 || size > 8 || (size & (size - 1)) != 0) {
  143. xyerror(D_UNKNOWN, "internal error -- cg cannot load "
  144. "size %ld when passed by value\n", (long)size);
  145. }
  146. size--; /* convert size to 3-bit index */
  147. if (dnp->dn_flags & DT_NF_SIGNED)
  148. size |= 0x08;
  149. if (dnp->dn_flags & DT_NF_USERLAND)
  150. size |= 0x10;
  151. return (ops[size]);
  152. }
  153. static void
  154. dt_cg_ptrsize(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp,
  155. uint_t op, int dreg)
  156. {
  157. ctf_file_t *ctfp = dnp->dn_ctfp;
  158. ctf_arinfo_t r;
  159. dif_instr_t instr;
  160. ctf_id_t type;
  161. uint_t kind;
  162. ssize_t size;
  163. int sreg;
  164. if ((sreg = dt_regset_alloc(drp)) == -1)
  165. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  166. type = ctf_type_resolve(ctfp, dnp->dn_type);
  167. kind = ctf_type_kind(ctfp, type);
  168. assert(kind == CTF_K_POINTER || kind == CTF_K_ARRAY);
  169. if (kind == CTF_K_ARRAY) {
  170. if (ctf_array_info(ctfp, type, &r) != 0) {
  171. yypcb->pcb_hdl->dt_ctferr = ctf_errno(ctfp);
  172. longjmp(yypcb->pcb_jmpbuf, EDT_CTF);
  173. }
  174. type = r.ctr_contents;
  175. } else
  176. type = ctf_type_reference(ctfp, type);
  177. if ((size = ctf_type_size(ctfp, type)) == 1)
  178. return; /* multiply or divide by one can be omitted */
  179. dt_cg_setx(dlp, sreg, size);
  180. instr = DIF_INSTR_FMT(op, dreg, sreg, dreg);
  181. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  182. dt_regset_free(drp, sreg);
  183. }
  184. /*
  185. * If the result of a "." or "->" operation is a bit-field, we use this routine
  186. * to generate an epilogue to the load instruction that extracts the value. In
  187. * the diagrams below the "ld??" is the load instruction that is generated to
  188. * load the containing word that is generating prior to calling this function.
  189. *
  190. * Epilogue for unsigned fields: Epilogue for signed fields:
  191. *
  192. * ldu? [r1], r1 lds? [r1], r1
  193. * setx USHIFT, r2 setx 64 - SSHIFT, r2
  194. * srl r1, r2, r1 sll r1, r2, r1
  195. * setx (1 << bits) - 1, r2 setx 64 - bits, r2
  196. * and r1, r2, r1 sra r1, r2, r1
  197. *
  198. * The *SHIFT constants above changes value depending on the endian-ness of our
  199. * target architecture. Refer to the comments below for more details.
  200. */
  201. static void
  202. dt_cg_field_get(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp,
  203. ctf_file_t *fp, const ctf_membinfo_t *mp)
  204. {
  205. ctf_encoding_t e;
  206. dif_instr_t instr;
  207. uint64_t shift;
  208. int r1, r2;
  209. if (ctf_type_encoding(fp, mp->ctm_type, &e) != 0 || e.cte_bits > 64) {
  210. xyerror(D_UNKNOWN, "cg: bad field: off %lu type <%ld> "
  211. "bits %u\n", mp->ctm_offset, mp->ctm_type, e.cte_bits);
  212. }
  213. assert(dnp->dn_op == DT_TOK_PTR || dnp->dn_op == DT_TOK_DOT);
  214. r1 = dnp->dn_left->dn_reg;
  215. if ((r2 = dt_regset_alloc(drp)) == -1)
  216. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  217. /*
  218. * On little-endian architectures, ctm_offset counts from the right so
  219. * ctm_offset % NBBY itself is the amount we want to shift right to
  220. * move the value bits to the little end of the register to mask them.
  221. * On big-endian architectures, ctm_offset counts from the left so we
  222. * must subtract (ctm_offset % NBBY + cte_bits) from the size in bits
  223. * we used for the load. The size of our load in turn is found by
  224. * rounding cte_bits up to a byte boundary and then finding the
  225. * nearest power of two to this value (see clp2(), above). These
  226. * properties are used to compute shift as USHIFT or SSHIFT, below.
  227. */
  228. if (dnp->dn_flags & DT_NF_SIGNED) {
  229. #if BYTE_ORDER == _BIG_ENDIAN
  230. shift = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY) * NBBY -
  231. mp->ctm_offset % NBBY;
  232. #else
  233. shift = mp->ctm_offset % NBBY + e.cte_bits;
  234. #endif
  235. dt_cg_setx(dlp, r2, 64 - shift);
  236. instr = DIF_INSTR_FMT(DIF_OP_SLL, r1, r2, r1);
  237. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  238. dt_cg_setx(dlp, r2, 64 - e.cte_bits);
  239. instr = DIF_INSTR_FMT(DIF_OP_SRA, r1, r2, r1);
  240. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  241. } else {
  242. #if BYTE_ORDER == _BIG_ENDIAN
  243. shift = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY) * NBBY -
  244. (mp->ctm_offset % NBBY + e.cte_bits);
  245. #else
  246. shift = mp->ctm_offset % NBBY;
  247. #endif
  248. dt_cg_setx(dlp, r2, shift);
  249. instr = DIF_INSTR_FMT(DIF_OP_SRL, r1, r2, r1);
  250. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  251. dt_cg_setx(dlp, r2, (1ULL << e.cte_bits) - 1);
  252. instr = DIF_INSTR_FMT(DIF_OP_AND, r1, r2, r1);
  253. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  254. }
  255. dt_regset_free(drp, r2);
  256. }
  257. /*
  258. * If the destination of a store operation is a bit-field, we use this routine
  259. * to generate a prologue to the store instruction that loads the surrounding
  260. * bits, clears the destination field, and ORs in the new value of the field.
  261. * In the diagram below the "st?" is the store instruction that is generated to
  262. * store the containing word that is generating after calling this function.
  263. *
  264. * ld [dst->dn_reg], r1
  265. * setx ~(((1 << cte_bits) - 1) << (ctm_offset % NBBY)), r2
  266. * and r1, r2, r1
  267. *
  268. * setx (1 << cte_bits) - 1, r2
  269. * and src->dn_reg, r2, r2
  270. * setx ctm_offset % NBBY, r3
  271. * sll r2, r3, r2
  272. *
  273. * or r1, r2, r1
  274. * st? r1, [dst->dn_reg]
  275. *
  276. * This routine allocates a new register to hold the value to be stored and
  277. * returns it. The caller is responsible for freeing this register later.
  278. */
  279. static int
  280. dt_cg_field_set(dt_node_t *src, dt_irlist_t *dlp,
  281. dt_regset_t *drp, dt_node_t *dst)
  282. {
  283. uint64_t cmask, fmask, shift;
  284. dif_instr_t instr;
  285. int r1, r2, r3;
  286. ctf_membinfo_t m;
  287. ctf_encoding_t e;
  288. ctf_file_t *fp, *ofp;
  289. ctf_id_t type;
  290. assert(dst->dn_op == DT_TOK_PTR || dst->dn_op == DT_TOK_DOT);
  291. assert(dst->dn_right->dn_kind == DT_NODE_IDENT);
  292. fp = dst->dn_left->dn_ctfp;
  293. type = ctf_type_resolve(fp, dst->dn_left->dn_type);
  294. if (dst->dn_op == DT_TOK_PTR) {
  295. type = ctf_type_reference(fp, type);
  296. type = ctf_type_resolve(fp, type);
  297. }
  298. if ((fp = dt_cg_membinfo(ofp = fp, type,
  299. dst->dn_right->dn_string, &m)) == NULL) {
  300. yypcb->pcb_hdl->dt_ctferr = ctf_errno(ofp);
  301. longjmp(yypcb->pcb_jmpbuf, EDT_CTF);
  302. }
  303. if (ctf_type_encoding(fp, m.ctm_type, &e) != 0 || e.cte_bits > 64) {
  304. xyerror(D_UNKNOWN, "cg: bad field: off %lu type <%ld> "
  305. "bits %u\n", m.ctm_offset, m.ctm_type, e.cte_bits);
  306. }
  307. if ((r1 = dt_regset_alloc(drp)) == -1 ||
  308. (r2 = dt_regset_alloc(drp)) == -1 ||
  309. (r3 = dt_regset_alloc(drp)) == -1)
  310. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  311. /*
  312. * Compute shifts and masks. We need to compute "shift" as the amount
  313. * we need to shift left to position our field in the containing word.
  314. * Refer to the comments in dt_cg_field_get(), above, for more info.
  315. * We then compute fmask as the mask that truncates the value in the
  316. * input register to width cte_bits, and cmask as the mask used to
  317. * pass through the containing bits and zero the field bits.
  318. */
  319. #if BYTE_ORDER == _BIG_ENDIAN
  320. shift = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY) * NBBY -
  321. (m.ctm_offset % NBBY + e.cte_bits);
  322. #else
  323. shift = m.ctm_offset % NBBY;
  324. #endif
  325. fmask = (1ULL << e.cte_bits) - 1;
  326. cmask = ~(fmask << shift);
  327. instr = DIF_INSTR_LOAD(
  328. dt_cg_load(dst, fp, m.ctm_type), dst->dn_reg, r1);
  329. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  330. dt_cg_setx(dlp, r2, cmask);
  331. instr = DIF_INSTR_FMT(DIF_OP_AND, r1, r2, r1);
  332. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  333. dt_cg_setx(dlp, r2, fmask);
  334. instr = DIF_INSTR_FMT(DIF_OP_AND, src->dn_reg, r2, r2);
  335. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  336. dt_cg_setx(dlp, r3, shift);
  337. instr = DIF_INSTR_FMT(DIF_OP_SLL, r2, r3, r2);
  338. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  339. instr = DIF_INSTR_FMT(DIF_OP_OR, r1, r2, r1);
  340. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  341. dt_regset_free(drp, r3);
  342. dt_regset_free(drp, r2);
  343. return (r1);
  344. }
  345. static void
  346. dt_cg_store(dt_node_t *src, dt_irlist_t *dlp, dt_regset_t *drp, dt_node_t *dst)
  347. {
  348. ctf_encoding_t e;
  349. dif_instr_t instr;
  350. size_t size;
  351. int reg;
  352. /*
  353. * If we're loading a bit-field, the size of our store is found by
  354. * rounding dst's cte_bits up to a byte boundary and then finding the
  355. * nearest power of two to this value (see clp2(), above).
  356. */
  357. if ((dst->dn_flags & DT_NF_BITFIELD) &&
  358. ctf_type_encoding(dst->dn_ctfp, dst->dn_type, &e) != CTF_ERR)
  359. size = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY);
  360. else
  361. size = dt_node_type_size(src);
  362. if (src->dn_flags & DT_NF_REF) {
  363. if ((reg = dt_regset_alloc(drp)) == -1)
  364. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  365. dt_cg_setx(dlp, reg, size);
  366. instr = DIF_INSTR_COPYS(src->dn_reg, reg, dst->dn_reg);
  367. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  368. dt_regset_free(drp, reg);
  369. } else {
  370. if (dst->dn_flags & DT_NF_BITFIELD)
  371. reg = dt_cg_field_set(src, dlp, drp, dst);
  372. else
  373. reg = src->dn_reg;
  374. switch (size) {
  375. case 1:
  376. instr = DIF_INSTR_STORE(DIF_OP_STB, reg, dst->dn_reg);
  377. break;
  378. case 2:
  379. instr = DIF_INSTR_STORE(DIF_OP_STH, reg, dst->dn_reg);
  380. break;
  381. case 4:
  382. instr = DIF_INSTR_STORE(DIF_OP_STW, reg, dst->dn_reg);
  383. break;
  384. case 8:
  385. instr = DIF_INSTR_STORE(DIF_OP_STX, reg, dst->dn_reg);
  386. break;
  387. default:
  388. xyerror(D_UNKNOWN, "internal error -- cg cannot store "
  389. "size %lu when passed by value\n", (ulong_t)size);
  390. }
  391. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  392. if (dst->dn_flags & DT_NF_BITFIELD)
  393. dt_regset_free(drp, reg);
  394. }
  395. }
  396. /*
  397. * Generate code for a typecast or for argument promotion from the type of the
  398. * actual to the type of the formal. We need to generate code for casts when
  399. * a scalar type is being narrowed or changing signed-ness. We first shift the
  400. * desired bits high (losing excess bits if narrowing) and then shift them down
  401. * using logical shift (unsigned result) or arithmetic shift (signed result).
  402. */
  403. static void
  404. dt_cg_typecast(const dt_node_t *src, const dt_node_t *dst,
  405. dt_irlist_t *dlp, dt_regset_t *drp)
  406. {
  407. size_t srcsize = dt_node_type_size(src);
  408. size_t dstsize = dt_node_type_size(dst);
  409. dif_instr_t instr;
  410. int reg, n;
  411. if (dt_node_is_scalar(dst) && (dstsize < srcsize ||
  412. (src->dn_flags & DT_NF_SIGNED) ^ (dst->dn_flags & DT_NF_SIGNED))) {
  413. if ((reg = dt_regset_alloc(drp)) == -1)
  414. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  415. if (dstsize < srcsize)
  416. n = sizeof (uint64_t) * NBBY - dstsize * NBBY;
  417. else
  418. n = sizeof (uint64_t) * NBBY - srcsize * NBBY;
  419. dt_cg_setx(dlp, reg, n);
  420. instr = DIF_INSTR_FMT(DIF_OP_SLL,
  421. src->dn_reg, reg, dst->dn_reg);
  422. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  423. instr = DIF_INSTR_FMT((dst->dn_flags & DT_NF_SIGNED) ?
  424. DIF_OP_SRA : DIF_OP_SRL, dst->dn_reg, reg, dst->dn_reg);
  425. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  426. dt_regset_free(drp, reg);
  427. }
  428. }
  429. /*
  430. * Generate code to push the specified argument list on to the tuple stack.
  431. * We use this routine for handling subroutine calls and associative arrays.
  432. * We must first generate code for all subexpressions before loading the stack
  433. * because any subexpression could itself require the use of the tuple stack.
  434. * This holds a number of registers equal to the number of arguments, but this
  435. * is not a huge problem because the number of arguments can't exceed the
  436. * number of tuple register stack elements anyway. At most one extra register
  437. * is required (either by dt_cg_typecast() or for dtdt_size, below). This
  438. * implies that a DIF implementation should offer a number of general purpose
  439. * registers at least one greater than the number of tuple registers.
  440. */
  441. static void
  442. dt_cg_arglist(dt_ident_t *idp, dt_node_t *args,
  443. dt_irlist_t *dlp, dt_regset_t *drp)
  444. {
  445. const dt_idsig_t *isp = idp->di_data;
  446. dt_node_t *dnp;
  447. int i = 0;
  448. for (dnp = args; dnp != NULL; dnp = dnp->dn_list)
  449. dt_cg_node(dnp, dlp, drp);
  450. dt_irlist_append(dlp,
  451. dt_cg_node_alloc(DT_LBL_NONE, DIF_INSTR_FLUSHTS));
  452. for (dnp = args; dnp != NULL; dnp = dnp->dn_list, i++) {
  453. dtrace_diftype_t t;
  454. dif_instr_t instr;
  455. uint_t op;
  456. int reg;
  457. dt_node_diftype(yypcb->pcb_hdl, dnp, &t);
  458. isp->dis_args[i].dn_reg = dnp->dn_reg; /* re-use register */
  459. dt_cg_typecast(dnp, &isp->dis_args[i], dlp, drp);
  460. isp->dis_args[i].dn_reg = -1;
  461. if (t.dtdt_flags & DIF_TF_BYREF)
  462. op = DIF_OP_PUSHTR;
  463. else
  464. op = DIF_OP_PUSHTV;
  465. if (t.dtdt_size != 0) {
  466. if ((reg = dt_regset_alloc(drp)) == -1)
  467. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  468. dt_cg_setx(dlp, reg, t.dtdt_size);
  469. } else
  470. reg = DIF_REG_R0;
  471. instr = DIF_INSTR_PUSHTS(op, t.dtdt_kind, reg, dnp->dn_reg);
  472. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  473. dt_regset_free(drp, dnp->dn_reg);
  474. if (reg != DIF_REG_R0)
  475. dt_regset_free(drp, reg);
  476. }
  477. if (i > yypcb->pcb_hdl->dt_conf.dtc_diftupregs)
  478. longjmp(yypcb->pcb_jmpbuf, EDT_NOTUPREG);
  479. }
  480. static void
  481. dt_cg_arithmetic_op(dt_node_t *dnp, dt_irlist_t *dlp,
  482. dt_regset_t *drp, uint_t op)
  483. {
  484. int is_ptr_op = (dnp->dn_op == DT_TOK_ADD || dnp->dn_op == DT_TOK_SUB ||
  485. dnp->dn_op == DT_TOK_ADD_EQ || dnp->dn_op == DT_TOK_SUB_EQ);
  486. int lp_is_ptr = dt_node_is_pointer(dnp->dn_left);
  487. int rp_is_ptr = dt_node_is_pointer(dnp->dn_right);
  488. dif_instr_t instr;
  489. if (lp_is_ptr && rp_is_ptr) {
  490. assert(dnp->dn_op == DT_TOK_SUB);
  491. is_ptr_op = 0;
  492. }
  493. dt_cg_node(dnp->dn_left, dlp, drp);
  494. if (is_ptr_op && rp_is_ptr)
  495. dt_cg_ptrsize(dnp, dlp, drp, DIF_OP_MUL, dnp->dn_left->dn_reg);
  496. dt_cg_node(dnp->dn_right, dlp, drp);
  497. if (is_ptr_op && lp_is_ptr)
  498. dt_cg_ptrsize(dnp, dlp, drp, DIF_OP_MUL, dnp->dn_right->dn_reg);
  499. instr = DIF_INSTR_FMT(op, dnp->dn_left->dn_reg,
  500. dnp->dn_right->dn_reg, dnp->dn_left->dn_reg);
  501. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  502. dt_regset_free(drp, dnp->dn_right->dn_reg);
  503. dnp->dn_reg = dnp->dn_left->dn_reg;
  504. if (lp_is_ptr && rp_is_ptr)
  505. dt_cg_ptrsize(dnp->dn_right,
  506. dlp, drp, DIF_OP_UDIV, dnp->dn_reg);
  507. }
  508. static uint_t
  509. dt_cg_stvar(const dt_ident_t *idp)
  510. {
  511. static const uint_t aops[] = { DIF_OP_STGAA, DIF_OP_STTAA, DIF_OP_NOP };
  512. static const uint_t sops[] = { DIF_OP_STGS, DIF_OP_STTS, DIF_OP_STLS };
  513. uint_t i = (((idp->di_flags & DT_IDFLG_LOCAL) != 0) << 1) |
  514. ((idp->di_flags & DT_IDFLG_TLS) != 0);
  515. return (idp->di_kind == DT_IDENT_ARRAY ? aops[i] : sops[i]);
  516. }
  517. static void
  518. dt_cg_prearith_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp, uint_t op)
  519. {
  520. ctf_file_t *ctfp = dnp->dn_ctfp;
  521. dif_instr_t instr;
  522. ctf_id_t type;
  523. ssize_t size = 1;
  524. int reg;
  525. if (dt_node_is_pointer(dnp)) {
  526. type = ctf_type_resolve(ctfp, dnp->dn_type);
  527. assert(ctf_type_kind(ctfp, type) == CTF_K_POINTER);
  528. size = ctf_type_size(ctfp, ctf_type_reference(ctfp, type));
  529. }
  530. dt_cg_node(dnp->dn_child, dlp, drp);
  531. dnp->dn_reg = dnp->dn_child->dn_reg;
  532. if ((reg = dt_regset_alloc(drp)) == -1)
  533. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  534. dt_cg_setx(dlp, reg, size);
  535. instr = DIF_INSTR_FMT(op, dnp->dn_reg, reg, dnp->dn_reg);
  536. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  537. dt_regset_free(drp, reg);
  538. /*
  539. * If we are modifying a variable, generate an stv instruction from
  540. * the variable specified by the identifier. If we are storing to a
  541. * memory address, generate code again for the left-hand side using
  542. * DT_NF_REF to get the address, and then generate a store to it.
  543. * In both paths, we store the value in dnp->dn_reg (the new value).
  544. */
  545. if (dnp->dn_child->dn_kind == DT_NODE_VAR) {
  546. dt_ident_t *idp = dt_ident_resolve(dnp->dn_child->dn_ident);
  547. idp->di_flags |= DT_IDFLG_DIFW;
  548. instr = DIF_INSTR_STV(dt_cg_stvar(idp),
  549. idp->di_id, dnp->dn_reg);
  550. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  551. } else {
  552. uint_t rbit = dnp->dn_child->dn_flags & DT_NF_REF;
  553. assert(dnp->dn_child->dn_flags & DT_NF_WRITABLE);
  554. assert(dnp->dn_child->dn_flags & DT_NF_LVALUE);
  555. dnp->dn_child->dn_flags |= DT_NF_REF; /* force pass-by-ref */
  556. dt_cg_node(dnp->dn_child, dlp, drp);
  557. dt_cg_store(dnp, dlp, drp, dnp->dn_child);
  558. dt_regset_free(drp, dnp->dn_child->dn_reg);
  559. dnp->dn_left->dn_flags &= ~DT_NF_REF;
  560. dnp->dn_left->dn_flags |= rbit;
  561. }
  562. }
  563. static void
  564. dt_cg_postarith_op(dt_node_t *dnp, dt_irlist_t *dlp,
  565. dt_regset_t *drp, uint_t op)
  566. {
  567. ctf_file_t *ctfp = dnp->dn_ctfp;
  568. dif_instr_t instr;
  569. ctf_id_t type;
  570. ssize_t size = 1;
  571. int nreg;
  572. if (dt_node_is_pointer(dnp)) {
  573. type = ctf_type_resolve(ctfp, dnp->dn_type);
  574. assert(ctf_type_kind(ctfp, type) == CTF_K_POINTER);
  575. size = ctf_type_size(ctfp, ctf_type_reference(ctfp, type));
  576. }
  577. dt_cg_node(dnp->dn_child, dlp, drp);
  578. dnp->dn_reg = dnp->dn_child->dn_reg;
  579. if ((nreg = dt_regset_alloc(drp)) == -1)
  580. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  581. dt_cg_setx(dlp, nreg, size);
  582. instr = DIF_INSTR_FMT(op, dnp->dn_reg, nreg, nreg);
  583. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  584. /*
  585. * If we are modifying a variable, generate an stv instruction from
  586. * the variable specified by the identifier. If we are storing to a
  587. * memory address, generate code again for the left-hand side using
  588. * DT_NF_REF to get the address, and then generate a store to it.
  589. * In both paths, we store the value from 'nreg' (the new value).
  590. */
  591. if (dnp->dn_child->dn_kind == DT_NODE_VAR) {
  592. dt_ident_t *idp = dt_ident_resolve(dnp->dn_child->dn_ident);
  593. idp->di_flags |= DT_IDFLG_DIFW;
  594. instr = DIF_INSTR_STV(dt_cg_stvar(idp), idp->di_id, nreg);
  595. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  596. } else {
  597. uint_t rbit = dnp->dn_child->dn_flags & DT_NF_REF;
  598. int oreg = dnp->dn_reg;
  599. assert(dnp->dn_child->dn_flags & DT_NF_WRITABLE);
  600. assert(dnp->dn_child->dn_flags & DT_NF_LVALUE);
  601. dnp->dn_child->dn_flags |= DT_NF_REF; /* force pass-by-ref */
  602. dt_cg_node(dnp->dn_child, dlp, drp);
  603. dnp->dn_reg = nreg;
  604. dt_cg_store(dnp, dlp, drp, dnp->dn_child);
  605. dnp->dn_reg = oreg;
  606. dt_regset_free(drp, dnp->dn_child->dn_reg);
  607. dnp->dn_left->dn_flags &= ~DT_NF_REF;
  608. dnp->dn_left->dn_flags |= rbit;
  609. }
  610. dt_regset_free(drp, nreg);
  611. }
  612. /*
  613. * Determine if we should perform signed or unsigned comparison for an OP2.
  614. * If both operands are of arithmetic type, perform the usual arithmetic
  615. * conversions to determine the common real type for comparison [ISOC 6.5.8.3].
  616. */
  617. static int
  618. dt_cg_compare_signed(dt_node_t *dnp)
  619. {
  620. dt_node_t dn;
  621. if (dt_node_is_string(dnp->dn_left) ||
  622. dt_node_is_string(dnp->dn_right))
  623. return (1); /* strings always compare signed */
  624. else if (!dt_node_is_arith(dnp->dn_left) ||
  625. !dt_node_is_arith(dnp->dn_right))
  626. return (0); /* non-arithmetic types always compare unsigned */
  627. bzero(&dn, sizeof (dn));
  628. dt_node_promote(dnp->dn_left, dnp->dn_right, &dn);
  629. return (dn.dn_flags & DT_NF_SIGNED);
  630. }
  631. static void
  632. dt_cg_compare_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp, uint_t op)
  633. {
  634. uint_t lbl_true = dt_irlist_label(dlp);
  635. uint_t lbl_post = dt_irlist_label(dlp);
  636. dif_instr_t instr;
  637. uint_t opc;
  638. dt_cg_node(dnp->dn_left, dlp, drp);
  639. dt_cg_node(dnp->dn_right, dlp, drp);
  640. if (dt_node_is_string(dnp->dn_left) || dt_node_is_string(dnp->dn_right))
  641. opc = DIF_OP_SCMP;
  642. else
  643. opc = DIF_OP_CMP;
  644. instr = DIF_INSTR_CMP(opc, dnp->dn_left->dn_reg, dnp->dn_right->dn_reg);
  645. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  646. dt_regset_free(drp, dnp->dn_right->dn_reg);
  647. dnp->dn_reg = dnp->dn_left->dn_reg;
  648. instr = DIF_INSTR_BRANCH(op, lbl_true);
  649. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  650. instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg);
  651. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  652. instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post);
  653. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  654. dt_cg_xsetx(dlp, NULL, lbl_true, dnp->dn_reg, 1);
  655. dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP));
  656. }
  657. /*
  658. * Code generation for the ternary op requires some trickery with the assembler
  659. * in order to conserve registers. We generate code for dn_expr and dn_left
  660. * and free their registers so they do not have be consumed across codegen for
  661. * dn_right. We insert a dummy MOV at the end of dn_left into the destination
  662. * register, which is not yet known because we haven't done dn_right yet, and
  663. * save the pointer to this instruction node. We then generate code for
  664. * dn_right and use its register as our output. Finally, we reach back and
  665. * patch the instruction for dn_left to move its output into this register.
  666. */
  667. static void
  668. dt_cg_ternary_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
  669. {
  670. uint_t lbl_false = dt_irlist_label(dlp);
  671. uint_t lbl_post = dt_irlist_label(dlp);
  672. dif_instr_t instr;
  673. dt_irnode_t *dip;
  674. dt_cg_node(dnp->dn_expr, dlp, drp);
  675. instr = DIF_INSTR_TST(dnp->dn_expr->dn_reg);
  676. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  677. dt_regset_free(drp, dnp->dn_expr->dn_reg);
  678. instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false);
  679. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  680. dt_cg_node(dnp->dn_left, dlp, drp);
  681. instr = DIF_INSTR_MOV(dnp->dn_left->dn_reg, DIF_REG_R0);
  682. dip = dt_cg_node_alloc(DT_LBL_NONE, instr); /* save dip for below */
  683. dt_irlist_append(dlp, dip);
  684. dt_regset_free(drp, dnp->dn_left->dn_reg);
  685. instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post);
  686. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  687. dt_irlist_append(dlp, dt_cg_node_alloc(lbl_false, DIF_INSTR_NOP));
  688. dt_cg_node(dnp->dn_right, dlp, drp);
  689. dnp->dn_reg = dnp->dn_right->dn_reg;
  690. /*
  691. * Now that dn_reg is assigned, reach back and patch the correct MOV
  692. * instruction into the tail of dn_left. We know dn_reg was unused
  693. * at that point because otherwise dn_right couldn't have allocated it.
  694. */
  695. dip->di_instr = DIF_INSTR_MOV(dnp->dn_left->dn_reg, dnp->dn_reg);
  696. dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP));
  697. }
  698. static void
  699. dt_cg_logical_and(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
  700. {
  701. uint_t lbl_false = dt_irlist_label(dlp);
  702. uint_t lbl_post = dt_irlist_label(dlp);
  703. dif_instr_t instr;
  704. dt_cg_node(dnp->dn_left, dlp, drp);
  705. instr = DIF_INSTR_TST(dnp->dn_left->dn_reg);
  706. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  707. dt_regset_free(drp, dnp->dn_left->dn_reg);
  708. instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false);
  709. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  710. dt_cg_node(dnp->dn_right, dlp, drp);
  711. instr = DIF_INSTR_TST(dnp->dn_right->dn_reg);
  712. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  713. dnp->dn_reg = dnp->dn_right->dn_reg;
  714. instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false);
  715. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  716. dt_cg_setx(dlp, dnp->dn_reg, 1);
  717. instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post);
  718. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  719. instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg);
  720. dt_irlist_append(dlp, dt_cg_node_alloc(lbl_false, instr));
  721. dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP));
  722. }
  723. static void
  724. dt_cg_logical_xor(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
  725. {
  726. uint_t lbl_next = dt_irlist_label(dlp);
  727. uint_t lbl_tail = dt_irlist_label(dlp);
  728. dif_instr_t instr;
  729. dt_cg_node(dnp->dn_left, dlp, drp);
  730. instr = DIF_INSTR_TST(dnp->dn_left->dn_reg);
  731. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  732. instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_next);
  733. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  734. dt_cg_setx(dlp, dnp->dn_left->dn_reg, 1);
  735. dt_irlist_append(dlp, dt_cg_node_alloc(lbl_next, DIF_INSTR_NOP));
  736. dt_cg_node(dnp->dn_right, dlp, drp);
  737. instr = DIF_INSTR_TST(dnp->dn_right->dn_reg);
  738. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  739. instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_tail);
  740. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  741. dt_cg_setx(dlp, dnp->dn_right->dn_reg, 1);
  742. instr = DIF_INSTR_FMT(DIF_OP_XOR, dnp->dn_left->dn_reg,
  743. dnp->dn_right->dn_reg, dnp->dn_left->dn_reg);
  744. dt_irlist_append(dlp, dt_cg_node_alloc(lbl_tail, instr));
  745. dt_regset_free(drp, dnp->dn_right->dn_reg);
  746. dnp->dn_reg = dnp->dn_left->dn_reg;
  747. }
  748. static void
  749. dt_cg_logical_or(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
  750. {
  751. uint_t lbl_true = dt_irlist_label(dlp);
  752. uint_t lbl_false = dt_irlist_label(dlp);
  753. uint_t lbl_post = dt_irlist_label(dlp);
  754. dif_instr_t instr;
  755. dt_cg_node(dnp->dn_left, dlp, drp);
  756. instr = DIF_INSTR_TST(dnp->dn_left->dn_reg);
  757. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  758. dt_regset_free(drp, dnp->dn_left->dn_reg);
  759. instr = DIF_INSTR_BRANCH(DIF_OP_BNE, lbl_true);
  760. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  761. dt_cg_node(dnp->dn_right, dlp, drp);
  762. instr = DIF_INSTR_TST(dnp->dn_right->dn_reg);
  763. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  764. dnp->dn_reg = dnp->dn_right->dn_reg;
  765. instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false);
  766. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  767. dt_cg_xsetx(dlp, NULL, lbl_true, dnp->dn_reg, 1);
  768. instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post);
  769. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  770. instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg);
  771. dt_irlist_append(dlp, dt_cg_node_alloc(lbl_false, instr));
  772. dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP));
  773. }
  774. static void
  775. dt_cg_logical_neg(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
  776. {
  777. uint_t lbl_zero = dt_irlist_label(dlp);
  778. uint_t lbl_post = dt_irlist_label(dlp);
  779. dif_instr_t instr;
  780. dt_cg_node(dnp->dn_child, dlp, drp);
  781. dnp->dn_reg = dnp->dn_child->dn_reg;
  782. instr = DIF_INSTR_TST(dnp->dn_reg);
  783. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  784. instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_zero);
  785. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  786. instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg);
  787. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  788. instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post);
  789. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  790. dt_cg_xsetx(dlp, NULL, lbl_zero, dnp->dn_reg, 1);
  791. dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP));
  792. }
  793. static void
  794. dt_cg_asgn_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
  795. {
  796. dif_instr_t instr;
  797. dt_ident_t *idp;
  798. /*
  799. * If we are performing a structure assignment of a translated type,
  800. * we must instantiate all members and create a snapshot of the object
  801. * in scratch space. We allocs a chunk of memory, generate code for
  802. * each member, and then set dnp->dn_reg to the scratch object address.
  803. */
  804. if ((idp = dt_node_resolve(dnp->dn_right, DT_IDENT_XLSOU)) != NULL) {
  805. ctf_membinfo_t ctm;
  806. dt_xlator_t *dxp = idp->di_data;
  807. dt_node_t *mnp, dn, mn;
  808. int r1, r2;
  809. /*
  810. * Create two fake dt_node_t's representing operator "." and a
  811. * right-hand identifier child node. These will be repeatedly
  812. * modified according to each instantiated member so that we
  813. * can pass them to dt_cg_store() and effect a member store.
  814. */
  815. bzero(&dn, sizeof (dt_node_t));
  816. dn.dn_kind = DT_NODE_OP2;
  817. dn.dn_op = DT_TOK_DOT;
  818. dn.dn_left = dnp;
  819. dn.dn_right = &mn;
  820. bzero(&mn, sizeof (dt_node_t));
  821. mn.dn_kind = DT_NODE_IDENT;
  822. mn.dn_op = DT_TOK_IDENT;
  823. /*
  824. * Allocate a register for our scratch data pointer. First we
  825. * set it to the size of our data structure, and then replace
  826. * it with the result of an allocs of the specified size.
  827. */
  828. if ((r1 = dt_regset_alloc(drp)) == -1)
  829. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  830. dt_cg_setx(dlp, r1,
  831. ctf_type_size(dxp->dx_dst_ctfp, dxp->dx_dst_base));
  832. instr = DIF_INSTR_ALLOCS(r1, r1);
  833. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  834. /*
  835. * When dt_cg_asgn_op() is called, we have already generated
  836. * code for dnp->dn_right, which is the translator input. We
  837. * now associate this register with the translator's input
  838. * identifier so it can be referenced during our member loop.
  839. */
  840. dxp->dx_ident->di_flags |= DT_IDFLG_CGREG;
  841. dxp->dx_ident->di_id = dnp->dn_right->dn_reg;
  842. for (mnp = dxp->dx_members; mnp != NULL; mnp = mnp->dn_list) {
  843. /*
  844. * Generate code for the translator member expression,
  845. * and then cast the result to the member type.
  846. */
  847. dt_cg_node(mnp->dn_membexpr, dlp, drp);
  848. mnp->dn_reg = mnp->dn_membexpr->dn_reg;
  849. dt_cg_typecast(mnp->dn_membexpr, mnp, dlp, drp);
  850. /*
  851. * Ask CTF for the offset of the member so we can store
  852. * to the appropriate offset. This call has already
  853. * been done once by the parser, so it should succeed.
  854. */
  855. if (ctf_member_info(dxp->dx_dst_ctfp, dxp->dx_dst_base,
  856. mnp->dn_membname, &ctm) == CTF_ERR) {
  857. yypcb->pcb_hdl->dt_ctferr =
  858. ctf_errno(dxp->dx_dst_ctfp);
  859. longjmp(yypcb->pcb_jmpbuf, EDT_CTF);
  860. }
  861. /*
  862. * If the destination member is at offset 0, store the
  863. * result directly to r1 (the scratch buffer address).
  864. * Otherwise allocate another temporary for the offset
  865. * and add r1 to it before storing the result.
  866. */
  867. if (ctm.ctm_offset != 0) {
  868. if ((r2 = dt_regset_alloc(drp)) == -1)
  869. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  870. /*
  871. * Add the member offset rounded down to the
  872. * nearest byte. If the offset was not aligned
  873. * on a byte boundary, this member is a bit-
  874. * field and dt_cg_store() will handle masking.
  875. */
  876. dt_cg_setx(dlp, r2, ctm.ctm_offset / NBBY);
  877. instr = DIF_INSTR_FMT(DIF_OP_ADD, r1, r2, r2);
  878. dt_irlist_append(dlp,
  879. dt_cg_node_alloc(DT_LBL_NONE, instr));
  880. dt_node_type_propagate(mnp, &dn);
  881. dn.dn_right->dn_string = mnp->dn_membname;
  882. dn.dn_reg = r2;
  883. dt_cg_store(mnp, dlp, drp, &dn);
  884. dt_regset_free(drp, r2);
  885. } else {
  886. dt_node_type_propagate(mnp, &dn);
  887. dn.dn_right->dn_string = mnp->dn_membname;
  888. dn.dn_reg = r1;
  889. dt_cg_store(mnp, dlp, drp, &dn);
  890. }
  891. dt_regset_free(drp, mnp->dn_reg);
  892. }
  893. dxp->dx_ident->di_flags &= ~DT_IDFLG_CGREG;
  894. dxp->dx_ident->di_id = 0;
  895. if (dnp->dn_right->dn_reg != -1)
  896. dt_regset_free(drp, dnp->dn_right->dn_reg);
  897. assert(dnp->dn_reg == dnp->dn_right->dn_reg);
  898. dnp->dn_reg = r1;
  899. }
  900. /*
  901. * If we are storing to a variable, generate an stv instruction from
  902. * the variable specified by the identifier. If we are storing to a
  903. * memory address, generate code again for the left-hand side using
  904. * DT_NF_REF to get the address, and then generate a store to it.
  905. * In both paths, we assume dnp->dn_reg already has the new value.
  906. */
  907. if (dnp->dn_left->dn_kind == DT_NODE_VAR) {
  908. idp = dt_ident_resolve(dnp->dn_left->dn_ident);
  909. if (idp->di_kind == DT_IDENT_ARRAY)
  910. dt_cg_arglist(idp, dnp->dn_left->dn_args, dlp, drp);
  911. idp->di_flags |= DT_IDFLG_DIFW;
  912. instr = DIF_INSTR_STV(dt_cg_stvar(idp),
  913. idp->di_id, dnp->dn_reg);
  914. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  915. } else {
  916. uint_t rbit = dnp->dn_left->dn_flags & DT_NF_REF;
  917. assert(dnp->dn_left->dn_flags & DT_NF_WRITABLE);
  918. assert(dnp->dn_left->dn_flags & DT_NF_LVALUE);
  919. dnp->dn_left->dn_flags |= DT_NF_REF; /* force pass-by-ref */
  920. dt_cg_node(dnp->dn_left, dlp, drp);
  921. dt_cg_store(dnp, dlp, drp, dnp->dn_left);
  922. dt_regset_free(drp, dnp->dn_left->dn_reg);
  923. dnp->dn_left->dn_flags &= ~DT_NF_REF;
  924. dnp->dn_left->dn_flags |= rbit;
  925. }
  926. }
  927. static void
  928. dt_cg_assoc_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
  929. {
  930. dif_instr_t instr;
  931. uint_t op;
  932. assert(dnp->dn_kind == DT_NODE_VAR);
  933. assert(!(dnp->dn_ident->di_flags & DT_IDFLG_LOCAL));
  934. assert(dnp->dn_args != NULL);
  935. dt_cg_arglist(dnp->dn_ident, dnp->dn_args, dlp, drp);
  936. if ((dnp->dn_reg = dt_regset_alloc(drp)) == -1)
  937. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  938. if (dnp->dn_ident->di_flags & DT_IDFLG_TLS)
  939. op = DIF_OP_LDTAA;
  940. else
  941. op = DIF_OP_LDGAA;
  942. dnp->dn_ident->di_flags |= DT_IDFLG_DIFR;
  943. instr = DIF_INSTR_LDV(op, dnp->dn_ident->di_id, dnp->dn_reg);
  944. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  945. /*
  946. * If the associative array is a pass-by-reference type, then we are
  947. * loading its value as a pointer to either load or store through it.
  948. * The array element in question may not have been faulted in yet, in
  949. * which case DIF_OP_LD*AA will return zero. We append an epilogue
  950. * of instructions similar to the following:
  951. *
  952. * ld?aa id, %r1 ! base ld?aa instruction above
  953. * tst %r1 ! start of epilogue
  954. * +--- bne label
  955. * | setx size, %r1
  956. * | allocs %r1, %r1
  957. * | st?aa id, %r1
  958. * | ld?aa id, %r1
  959. * v
  960. * label: < rest of code >
  961. *
  962. * The idea is that we allocs a zero-filled chunk of scratch space and
  963. * do a DIF_OP_ST*AA to fault in and initialize the array element, and
  964. * then reload it to get the faulted-in address of the new variable
  965. * storage. This isn't cheap, but pass-by-ref associative array values
  966. * are (thus far) uncommon and the allocs cost only occurs once. If
  967. * this path becomes important to DTrace users, we can improve things
  968. * by adding a new DIF opcode to fault in associative array elements.
  969. */
  970. if (dnp->dn_flags & DT_NF_REF) {
  971. uint_t stvop = op == DIF_OP_LDTAA ? DIF_OP_STTAA : DIF_OP_STGAA;
  972. uint_t label = dt_irlist_label(dlp);
  973. instr = DIF_INSTR_TST(dnp->dn_reg);
  974. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  975. instr = DIF_INSTR_BRANCH(DIF_OP_BNE, label);
  976. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  977. dt_cg_setx(dlp, dnp->dn_reg, dt_node_type_size(dnp));
  978. instr = DIF_INSTR_ALLOCS(dnp->dn_reg, dnp->dn_reg);
  979. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  980. dnp->dn_ident->di_flags |= DT_IDFLG_DIFW;
  981. instr = DIF_INSTR_STV(stvop, dnp->dn_ident->di_id, dnp->dn_reg);
  982. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  983. instr = DIF_INSTR_LDV(op, dnp->dn_ident->di_id, dnp->dn_reg);
  984. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  985. dt_irlist_append(dlp, dt_cg_node_alloc(label, DIF_INSTR_NOP));
  986. }
  987. }
  988. static void
  989. dt_cg_array_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
  990. {
  991. dt_probe_t *prp = yypcb->pcb_probe;
  992. uintmax_t saved = dnp->dn_args->dn_value;
  993. dt_ident_t *idp = dnp->dn_ident;
  994. dif_instr_t instr;
  995. uint_t op;
  996. size_t size;
  997. int reg, n;
  998. assert(dnp->dn_kind == DT_NODE_VAR);
  999. assert(!(idp->di_flags & DT_IDFLG_LOCAL));
  1000. assert(dnp->dn_args->dn_kind == DT_NODE_INT);
  1001. assert(dnp->dn_args->dn_list == NULL);
  1002. /*
  1003. * If this is a reference in the args[] array, temporarily modify the
  1004. * array index according to the static argument mapping (if any),
  1005. * unless the argument reference is provided by a dynamic translator.
  1006. * If we're using a dynamic translator for args[], then just set dn_reg
  1007. * to an invalid reg and return: DIF_OP_XLARG will fetch the arg later.
  1008. */
  1009. if (idp->di_id == DIF_VAR_ARGS) {
  1010. if ((idp->di_kind == DT_IDENT_XLPTR ||
  1011. idp->di_kind == DT_IDENT_XLSOU) &&
  1012. dt_xlator_dynamic(idp->di_data)) {
  1013. dnp->dn_reg = -1;
  1014. return;
  1015. }
  1016. dnp->dn_args->dn_value = prp->pr_mapping[saved];
  1017. }
  1018. dt_cg_node(dnp->dn_args, dlp, drp);
  1019. dnp->dn_args->dn_value = saved;
  1020. dnp->dn_reg = dnp->dn_args->dn_reg;
  1021. if (idp->di_flags & DT_IDFLG_TLS)
  1022. op = DIF_OP_LDTA;
  1023. else
  1024. op = DIF_OP_LDGA;
  1025. idp->di_flags |= DT_IDFLG_DIFR;
  1026. instr = DIF_INSTR_LDA(op, idp->di_id,
  1027. dnp->dn_args->dn_reg, dnp->dn_reg);
  1028. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  1029. /*
  1030. * If this is a reference to the args[] array, we need to take the
  1031. * additional step of explicitly eliminating any bits larger than the
  1032. * type size: the DIF interpreter in the kernel will always give us
  1033. * the raw (64-bit) argument value, and any bits larger than the type
  1034. * size may be junk. As a practical matter, this arises only on 64-bit
  1035. * architectures and only when the argument index is larger than the
  1036. * number of arguments passed directly to DTrace: if a 8-, 16- or
  1037. * 32-bit argument must be retrieved from the stack, it is possible
  1038. * (and it some cases, likely) that the upper bits will be garbage.
  1039. */
  1040. if (idp->di_id != DIF_VAR_ARGS || !dt_node_is_scalar(dnp))
  1041. return;
  1042. if ((size = dt_node_type_size(dnp)) == sizeof (uint64_t))
  1043. return;
  1044. if ((reg = dt_regset_alloc(drp)) == -1)
  1045. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  1046. assert(size < sizeof (uint64_t));
  1047. n = sizeof (uint64_t) * NBBY - size * NBBY;
  1048. dt_cg_setx(dlp, reg, n);
  1049. instr = DIF_INSTR_FMT(DIF_OP_SLL, dnp->dn_reg, reg, dnp->dn_reg);
  1050. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  1051. instr = DIF_INSTR_FMT((dnp->dn_flags & DT_NF_SIGNED) ?
  1052. DIF_OP_SRA : DIF_OP_SRL, dnp->dn_reg, reg, dnp->dn_reg);
  1053. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  1054. dt_regset_free(drp, reg);
  1055. }
  1056. /*
  1057. * Generate code for an inlined variable reference. Inlines can be used to
  1058. * define either scalar or associative array substitutions. For scalars, we
  1059. * simply generate code for the parse tree saved in the identifier's din_root,
  1060. * and then cast the resulting expression to the inline's declaration type.
  1061. * For arrays, we take the input parameter subtrees from dnp->dn_args and
  1062. * temporarily store them in the din_root of each din_argv[i] identifier,
  1063. * which are themselves inlines and were set up for us by the parser. The
  1064. * result is that any reference to the inlined parameter inside the top-level
  1065. * din_root will turn into a recursive call to dt_cg_inline() for a scalar
  1066. * inline whose din_root will refer to the subtree pointed to by the argument.
  1067. */
  1068. static void
  1069. dt_cg_inline(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
  1070. {
  1071. dt_ident_t *idp = dnp->dn_ident;
  1072. dt_idnode_t *inp = idp->di_iarg;
  1073. dt_idnode_t *pinp;
  1074. dt_node_t *pnp;
  1075. int i;
  1076. assert(idp->di_flags & DT_IDFLG_INLINE);
  1077. assert(idp->di_ops == &dt_idops_inline);
  1078. if (idp->di_kind == DT_IDENT_ARRAY) {
  1079. for (i = 0, pnp = dnp->dn_args;
  1080. pnp != NULL; pnp = pnp->dn_list, i++) {
  1081. if (inp->din_argv[i] != NULL) {
  1082. pinp = inp->din_argv[i]->di_iarg;
  1083. pinp->din_root = pnp;
  1084. }
  1085. }
  1086. }
  1087. dt_cg_node(inp->din_root, dlp, drp);
  1088. dnp->dn_reg = inp->din_root->dn_reg;
  1089. dt_cg_typecast(inp->din_root, dnp, dlp, drp);
  1090. if (idp->di_kind == DT_IDENT_ARRAY) {
  1091. for (i = 0; i < inp->din_argc; i++) {
  1092. pinp = inp->din_argv[i]->di_iarg;
  1093. pinp->din_root = NULL;
  1094. }
  1095. }
  1096. }
  1097. static void
  1098. dt_cg_func_typeref(dtrace_hdl_t *dtp, dt_node_t *dnp)
  1099. {
  1100. dtrace_typeinfo_t dtt;
  1101. dt_node_t *addr = dnp->dn_args;
  1102. dt_node_t *nelm = addr->dn_list;
  1103. dt_node_t *strp = nelm->dn_list;
  1104. dt_node_t *typs = strp->dn_list;
  1105. char buf[DT_TYPE_NAMELEN];
  1106. char *p;
  1107. ctf_type_name(addr->dn_ctfp, addr->dn_type, buf, sizeof (buf));
  1108. /*
  1109. * XXX Hack alert! XXX
  1110. * The prototype has two dummy args that we munge to represent
  1111. * the type string and the type size.
  1112. *
  1113. * Yes, I hear your grumble, but it works for now. We'll come
  1114. * up with a more elegant implementation later. :-)
  1115. */
  1116. free(strp->dn_string);
  1117. if ((p = strchr(buf, '*')) != NULL)
  1118. *p = '\0';
  1119. strp->dn_string = strdup(buf);
  1120. if (dtrace_lookup_by_type(dtp, DTRACE_OBJ_EVERY, buf, &dtt) < 0)
  1121. return;
  1122. typs->dn_value = ctf_type_size(dtt.dtt_ctfp, dtt.dtt_type);
  1123. }
  1124. static void
  1125. dt_cg_node(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp)
  1126. {
  1127. ctf_file_t *ctfp = dnp->dn_ctfp;
  1128. ctf_file_t *octfp;
  1129. ctf_membinfo_t m;
  1130. ctf_id_t type;
  1131. dif_instr_t instr;
  1132. dt_ident_t *idp;
  1133. ssize_t stroff;
  1134. uint_t op;
  1135. int reg;
  1136. switch (dnp->dn_op) {
  1137. case DT_TOK_COMMA:
  1138. dt_cg_node(dnp->dn_left, dlp, drp);
  1139. dt_regset_free(drp, dnp->dn_left->dn_reg);
  1140. dt_cg_node(dnp->dn_right, dlp, drp);
  1141. dnp->dn_reg = dnp->dn_right->dn_reg;
  1142. break;
  1143. case DT_TOK_ASGN:
  1144. dt_cg_node(dnp->dn_right, dlp, drp);
  1145. dnp->dn_reg = dnp->dn_right->dn_reg;
  1146. dt_cg_asgn_op(dnp, dlp, drp);
  1147. break;
  1148. case DT_TOK_ADD_EQ:
  1149. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_ADD);
  1150. dt_cg_asgn_op(dnp, dlp, drp);
  1151. break;
  1152. case DT_TOK_SUB_EQ:
  1153. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SUB);
  1154. dt_cg_asgn_op(dnp, dlp, drp);
  1155. break;
  1156. case DT_TOK_MUL_EQ:
  1157. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_MUL);
  1158. dt_cg_asgn_op(dnp, dlp, drp);
  1159. break;
  1160. case DT_TOK_DIV_EQ:
  1161. dt_cg_arithmetic_op(dnp, dlp, drp,
  1162. (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SDIV : DIF_OP_UDIV);
  1163. dt_cg_asgn_op(dnp, dlp, drp);
  1164. break;
  1165. case DT_TOK_MOD_EQ:
  1166. dt_cg_arithmetic_op(dnp, dlp, drp,
  1167. (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SREM : DIF_OP_UREM);
  1168. dt_cg_asgn_op(dnp, dlp, drp);
  1169. break;
  1170. case DT_TOK_AND_EQ:
  1171. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_AND);
  1172. dt_cg_asgn_op(dnp, dlp, drp);
  1173. break;
  1174. case DT_TOK_XOR_EQ:
  1175. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_XOR);
  1176. dt_cg_asgn_op(dnp, dlp, drp);
  1177. break;
  1178. case DT_TOK_OR_EQ:
  1179. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_OR);
  1180. dt_cg_asgn_op(dnp, dlp, drp);
  1181. break;
  1182. case DT_TOK_LSH_EQ:
  1183. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SLL);
  1184. dt_cg_asgn_op(dnp, dlp, drp);
  1185. break;
  1186. case DT_TOK_RSH_EQ:
  1187. dt_cg_arithmetic_op(dnp, dlp, drp,
  1188. (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SRA : DIF_OP_SRL);
  1189. dt_cg_asgn_op(dnp, dlp, drp);
  1190. break;
  1191. case DT_TOK_QUESTION:
  1192. dt_cg_ternary_op(dnp, dlp, drp);
  1193. break;
  1194. case DT_TOK_LOR:
  1195. dt_cg_logical_or(dnp, dlp, drp);
  1196. break;
  1197. case DT_TOK_LXOR:
  1198. dt_cg_logical_xor(dnp, dlp, drp);
  1199. break;
  1200. case DT_TOK_LAND:
  1201. dt_cg_logical_and(dnp, dlp, drp);
  1202. break;
  1203. case DT_TOK_BOR:
  1204. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_OR);
  1205. break;
  1206. case DT_TOK_XOR:
  1207. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_XOR);
  1208. break;
  1209. case DT_TOK_BAND:
  1210. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_AND);
  1211. break;
  1212. case DT_TOK_EQU:
  1213. dt_cg_compare_op(dnp, dlp, drp, DIF_OP_BE);
  1214. break;
  1215. case DT_TOK_NEQ:
  1216. dt_cg_compare_op(dnp, dlp, drp, DIF_OP_BNE);
  1217. break;
  1218. case DT_TOK_LT:
  1219. dt_cg_compare_op(dnp, dlp, drp,
  1220. dt_cg_compare_signed(dnp) ? DIF_OP_BL : DIF_OP_BLU);
  1221. break;
  1222. case DT_TOK_LE:
  1223. dt_cg_compare_op(dnp, dlp, drp,
  1224. dt_cg_compare_signed(dnp) ? DIF_OP_BLE : DIF_OP_BLEU);
  1225. break;
  1226. case DT_TOK_GT:
  1227. dt_cg_compare_op(dnp, dlp, drp,
  1228. dt_cg_compare_signed(dnp) ? DIF_OP_BG : DIF_OP_BGU);
  1229. break;
  1230. case DT_TOK_GE:
  1231. dt_cg_compare_op(dnp, dlp, drp,
  1232. dt_cg_compare_signed(dnp) ? DIF_OP_BGE : DIF_OP_BGEU);
  1233. break;
  1234. case DT_TOK_LSH:
  1235. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SLL);
  1236. break;
  1237. case DT_TOK_RSH:
  1238. dt_cg_arithmetic_op(dnp, dlp, drp,
  1239. (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SRA : DIF_OP_SRL);
  1240. break;
  1241. case DT_TOK_ADD:
  1242. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_ADD);
  1243. break;
  1244. case DT_TOK_SUB:
  1245. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SUB);
  1246. break;
  1247. case DT_TOK_MUL:
  1248. dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_MUL);
  1249. break;
  1250. case DT_TOK_DIV:
  1251. dt_cg_arithmetic_op(dnp, dlp, drp,
  1252. (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SDIV : DIF_OP_UDIV);
  1253. break;
  1254. case DT_TOK_MOD:
  1255. dt_cg_arithmetic_op(dnp, dlp, drp,
  1256. (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SREM : DIF_OP_UREM);
  1257. break;
  1258. case DT_TOK_LNEG:
  1259. dt_cg_logical_neg(dnp, dlp, drp);
  1260. break;
  1261. case DT_TOK_BNEG:
  1262. dt_cg_node(dnp->dn_child, dlp, drp);
  1263. dnp->dn_reg = dnp->dn_child->dn_reg;
  1264. instr = DIF_INSTR_NOT(dnp->dn_reg, dnp->dn_reg);
  1265. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  1266. break;
  1267. case DT_TOK_PREINC:
  1268. dt_cg_prearith_op(dnp, dlp, drp, DIF_OP_ADD);
  1269. break;
  1270. case DT_TOK_POSTINC:
  1271. dt_cg_postarith_op(dnp, dlp, drp, DIF_OP_ADD);
  1272. break;
  1273. case DT_TOK_PREDEC:
  1274. dt_cg_prearith_op(dnp, dlp, drp, DIF_OP_SUB);
  1275. break;
  1276. case DT_TOK_POSTDEC:
  1277. dt_cg_postarith_op(dnp, dlp, drp, DIF_OP_SUB);
  1278. break;
  1279. case DT_TOK_IPOS:
  1280. dt_cg_node(dnp->dn_child, dlp, drp);
  1281. dnp->dn_reg = dnp->dn_child->dn_reg;
  1282. break;
  1283. case DT_TOK_INEG:
  1284. dt_cg_node(dnp->dn_child, dlp, drp);
  1285. dnp->dn_reg = dnp->dn_child->dn_reg;
  1286. instr = DIF_INSTR_FMT(DIF_OP_SUB, DIF_REG_R0,
  1287. dnp->dn_reg, dnp->dn_reg);
  1288. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  1289. break;
  1290. case DT_TOK_DEREF:
  1291. dt_cg_node(dnp->dn_child, dlp, drp);
  1292. dnp->dn_reg = dnp->dn_child->dn_reg;
  1293. if (!(dnp->dn_flags & DT_NF_REF)) {
  1294. uint_t ubit = dnp->dn_flags & DT_NF_USERLAND;
  1295. /*
  1296. * Save and restore DT_NF_USERLAND across dt_cg_load():
  1297. * we need the sign bit from dnp and the user bit from
  1298. * dnp->dn_child in order to get the proper opcode.
  1299. */
  1300. dnp->dn_flags |=
  1301. (dnp->dn_child->dn_flags & DT_NF_USERLAND);
  1302. instr = DIF_INSTR_LOAD(dt_cg_load(dnp, ctfp,
  1303. dnp->dn_type), dnp->dn_reg, dnp->dn_reg);
  1304. dnp->dn_flags &= ~DT_NF_USERLAND;
  1305. dnp->dn_flags |= ubit;
  1306. dt_irlist_append(dlp,
  1307. dt_cg_node_alloc(DT_LBL_NONE, instr));
  1308. }
  1309. break;
  1310. case DT_TOK_ADDROF: {
  1311. uint_t rbit = dnp->dn_child->dn_flags & DT_NF_REF;
  1312. dnp->dn_child->dn_flags |= DT_NF_REF; /* force pass-by-ref */
  1313. dt_cg_node(dnp->dn_child, dlp, drp);
  1314. dnp->dn_reg = dnp->dn_child->dn_reg;
  1315. dnp->dn_child->dn_flags &= ~DT_NF_REF;
  1316. dnp->dn_child->dn_flags |= rbit;
  1317. break;
  1318. }
  1319. case DT_TOK_SIZEOF: {
  1320. size_t size = dt_node_sizeof(dnp->dn_child);
  1321. if ((dnp->dn_reg = dt_regset_alloc(drp)) == -1)
  1322. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  1323. assert(size != 0);
  1324. dt_cg_setx(dlp, dnp->dn_reg, size);
  1325. break;
  1326. }
  1327. case DT_TOK_STRINGOF:
  1328. dt_cg_node(dnp->dn_child, dlp, drp);
  1329. dnp->dn_reg = dnp->dn_child->dn_reg;
  1330. break;
  1331. case DT_TOK_XLATE:
  1332. /*
  1333. * An xlate operator appears in either an XLATOR, indicating a
  1334. * reference to a dynamic translator, or an OP2, indicating
  1335. * use of the xlate operator in the user's program. For the
  1336. * dynamic case, generate an xlate opcode with a reference to
  1337. * the corresponding member, pre-computed for us in dn_members.
  1338. */
  1339. if (dnp->dn_kind == DT_NODE_XLATOR) {
  1340. dt_xlator_t *dxp = dnp->dn_xlator;
  1341. assert(dxp->dx_ident->di_flags & DT_IDFLG_CGREG);
  1342. assert(dxp->dx_ident->di_id != 0);
  1343. if ((dnp->dn_reg = dt_regset_alloc(drp)) == -1)
  1344. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  1345. if (dxp->dx_arg == -1) {
  1346. instr = DIF_INSTR_MOV(
  1347. dxp->dx_ident->di_id, dnp->dn_reg);
  1348. dt_irlist_append(dlp,
  1349. dt_cg_node_alloc(DT_LBL_NONE, instr));
  1350. op = DIF_OP_XLATE;
  1351. } else
  1352. op = DIF_OP_XLARG;
  1353. instr = DIF_INSTR_XLATE(op, 0, dnp->dn_reg);
  1354. dt_irlist_append(dlp,
  1355. dt_cg_node_alloc(DT_LBL_NONE, instr));
  1356. dlp->dl_last->di_extern = dnp->dn_xmember;
  1357. break;
  1358. }
  1359. assert(dnp->dn_kind == DT_NODE_OP2);
  1360. dt_cg_node(dnp->dn_right, dlp, drp);
  1361. dnp->dn_reg = dnp->dn_right->dn_reg;
  1362. break;
  1363. case DT_TOK_LPAR:
  1364. dt_cg_node(dnp->dn_right, dlp, drp);
  1365. dnp->dn_reg = dnp->dn_right->dn_reg;
  1366. dt_cg_typecast(dnp->dn_right, dnp, dlp, drp);
  1367. break;
  1368. case DT_TOK_PTR:
  1369. case DT_TOK_DOT:
  1370. assert(dnp->dn_right->dn_kind == DT_NODE_IDENT);
  1371. dt_cg_node(dnp->dn_left, dlp, drp);
  1372. /*
  1373. * If the left-hand side of PTR or DOT is a dynamic variable,
  1374. * we expect it to be the output of a D translator. In this
  1375. * case, we look up the parse tree corresponding to the member
  1376. * that is being accessed and run the code generator over it.
  1377. * We then cast the result as if by the assignment operator.
  1378. */
  1379. if ((idp = dt_node_resolve(
  1380. dnp->dn_left, DT_IDENT_XLSOU)) != NULL ||
  1381. (idp = dt_node_resolve(
  1382. dnp->dn_left, DT_IDENT_XLPTR)) != NULL) {
  1383. dt_xlator_t *dxp;
  1384. dt_node_t *mnp;
  1385. dxp = idp->di_data;
  1386. mnp = dt_xlator_member(dxp, dnp->dn_right->dn_string);
  1387. assert(mnp != NULL);
  1388. dxp->dx_ident->di_flags |= DT_IDFLG_CGREG;
  1389. dxp->dx_ident->di_id = dnp->dn_left->dn_reg;
  1390. dt_cg_node(mnp->dn_membexpr, dlp, drp);
  1391. dnp->dn_reg = mnp->dn_membexpr->dn_reg;
  1392. dt_cg_typecast(mnp->dn_membexpr, dnp, dlp, drp);
  1393. dxp->dx_ident->di_flags &= ~DT_IDFLG_CGREG;
  1394. dxp->dx_ident->di_id = 0;
  1395. if (dnp->dn_left->dn_reg != -1)
  1396. dt_regset_free(drp, dnp->dn_left->dn_reg);
  1397. break;
  1398. }
  1399. ctfp = dnp->dn_left->dn_ctfp;
  1400. type = ctf_type_resolve(ctfp, dnp->dn_left->dn_type);
  1401. if (dnp->dn_op == DT_TOK_PTR) {
  1402. type = ctf_type_reference(ctfp, type);
  1403. type = ctf_type_resolve(ctfp, type);
  1404. }
  1405. if ((ctfp = dt_cg_membinfo(octfp = ctfp, type,
  1406. dnp->dn_right->dn_string, &m)) == NULL) {
  1407. yypcb->pcb_hdl->dt_ctferr = ctf_errno(octfp);
  1408. longjmp(yypcb->pcb_jmpbuf, EDT_CTF);
  1409. }
  1410. if (m.ctm_offset != 0) {
  1411. if ((reg = dt_regset_alloc(drp)) == -1)
  1412. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  1413. /*
  1414. * If the offset is not aligned on a byte boundary, it
  1415. * is a bit-field member and we will extract the value
  1416. * bits below after we generate the appropriate load.
  1417. */
  1418. dt_cg_setx(dlp, reg, m.ctm_offset / NBBY);
  1419. instr = DIF_INSTR_FMT(DIF_OP_ADD,
  1420. dnp->dn_left->dn_reg, reg, dnp->dn_left->dn_reg);
  1421. dt_irlist_append(dlp,
  1422. dt_cg_node_alloc(DT_LBL_NONE, instr));
  1423. dt_regset_free(drp, reg);
  1424. }
  1425. if (!(dnp->dn_flags & DT_NF_REF)) {
  1426. uint_t ubit = dnp->dn_flags & DT_NF_USERLAND;
  1427. /*
  1428. * Save and restore DT_NF_USERLAND across dt_cg_load():
  1429. * we need the sign bit from dnp and the user bit from
  1430. * dnp->dn_left in order to get the proper opcode.
  1431. */
  1432. dnp->dn_flags |=
  1433. (dnp->dn_left->dn_flags & DT_NF_USERLAND);
  1434. instr = DIF_INSTR_LOAD(dt_cg_load(dnp,
  1435. ctfp, m.ctm_type), dnp->dn_left->dn_reg,
  1436. dnp->dn_left->dn_reg);
  1437. dnp->dn_flags &= ~DT_NF_USERLAND;
  1438. dnp->dn_flags |= ubit;
  1439. dt_irlist_append(dlp,
  1440. dt_cg_node_alloc(DT_LBL_NONE, instr));
  1441. if (dnp->dn_flags & DT_NF_BITFIELD)
  1442. dt_cg_field_get(dnp, dlp, drp, ctfp, &m);
  1443. }
  1444. dnp->dn_reg = dnp->dn_left->dn_reg;
  1445. break;
  1446. case DT_TOK_STRING:
  1447. if ((dnp->dn_reg = dt_regset_alloc(drp)) == -1)
  1448. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  1449. assert(dnp->dn_kind == DT_NODE_STRING);
  1450. stroff = dt_strtab_insert(yypcb->pcb_strtab, dnp->dn_string);
  1451. if (stroff == -1L)
  1452. longjmp(yypcb->pcb_jmpbuf, EDT_NOMEM);
  1453. if (stroff > DIF_STROFF_MAX)
  1454. longjmp(yypcb->pcb_jmpbuf, EDT_STR2BIG);
  1455. instr = DIF_INSTR_SETS((ulong_t)stroff, dnp->dn_reg);
  1456. dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr));
  1457. break;
  1458. case DT_TOK_IDENT:
  1459. /*
  1460. * If the specified identifier is a variable on which we have
  1461. * set the code generator register flag, then this variable
  1462. * has already had code generated for it and saved in di_id.
  1463. * Allocate a new register and copy the existing value to it.
  1464. */
  1465. if (dnp->dn_kind == DT_NODE_VAR &&
  1466. (dnp->dn_ident->di_flags & DT_IDFLG_CGREG)) {
  1467. if ((dnp->dn_reg = dt_regset_alloc(drp)) == -1)
  1468. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  1469. instr = DIF_INSTR_MOV(dnp->dn_ident->di_id,
  1470. dnp->dn_reg);
  1471. dt_irlist_append(dlp,
  1472. dt_cg_node_alloc(DT_LBL_NONE, instr));
  1473. break;
  1474. }
  1475. /*
  1476. * Identifiers can represent function calls, variable refs, or
  1477. * symbols. First we check for inlined variables, and handle
  1478. * them by generating code for the inline parse tree.
  1479. */
  1480. if (dnp->dn_kind == DT_NODE_VAR &&
  1481. (dnp->dn_ident->di_flags & DT_IDFLG_INLINE)) {
  1482. dt_cg_inline(dnp, dlp, drp);
  1483. break;
  1484. }
  1485. switch (dnp->dn_kind) {
  1486. case DT_NODE_FUNC: {
  1487. dtrace_hdl_t *dtp = yypcb->pcb_hdl;
  1488. if ((idp = dnp->dn_ident)->di_kind != DT_IDENT_FUNC) {
  1489. dnerror(dnp, D_CG_EXPR, "%s %s( ) may not be "
  1490. "called from a D expression (D program "
  1491. "context required)\n",
  1492. dt_idkind_name(idp->di_kind), idp->di_name);
  1493. }
  1494. switch (idp->di_id) {
  1495. case DIF_SUBR_TYPEREF:
  1496. dt_cg_func_typeref(dtp, dnp);
  1497. break;
  1498. default:
  1499. break;
  1500. }
  1501. dt_cg_arglist(dnp->dn_ident, dnp->dn_args, dlp, drp);
  1502. if ((dnp->dn_reg = dt_regset_alloc(drp)) == -1)
  1503. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  1504. instr = DIF_INSTR_CALL(
  1505. dnp->dn_ident->di_id, dnp->dn_reg);
  1506. dt_irlist_append(dlp,
  1507. dt_cg_node_alloc(DT_LBL_NONE, instr));
  1508. break;
  1509. }
  1510. case DT_NODE_VAR:
  1511. if (dnp->dn_ident->di_kind == DT_IDENT_XLSOU ||
  1512. dnp->dn_ident->di_kind == DT_IDENT_XLPTR) {
  1513. /*
  1514. * This can only happen if we have translated
  1515. * args[]. See dt_idcook_args() for details.
  1516. */
  1517. assert(dnp->dn_ident->di_id == DIF_VAR_ARGS);
  1518. dt_cg_array_op(dnp, dlp, drp);
  1519. break;
  1520. }
  1521. if (dnp->dn_ident->di_kind == DT_IDENT_ARRAY) {
  1522. if (dnp->dn_ident->di_id > DIF_VAR_ARRAY_MAX)
  1523. dt_cg_assoc_op(dnp, dlp, drp);
  1524. else
  1525. dt_cg_array_op(dnp, dlp, drp);
  1526. break;
  1527. }
  1528. if ((dnp->dn_reg = dt_regset_alloc(drp)) == -1)
  1529. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  1530. if (dnp->dn_ident->di_flags & DT_IDFLG_LOCAL)
  1531. op = DIF_OP_LDLS;
  1532. else if (dnp->dn_ident->di_flags & DT_IDFLG_TLS)
  1533. op = DIF_OP_LDTS;
  1534. else
  1535. op = DIF_OP_LDGS;
  1536. dnp->dn_ident->di_flags |= DT_IDFLG_DIFR;
  1537. instr = DIF_INSTR_LDV(op,
  1538. dnp->dn_ident->di_id, dnp->dn_reg);
  1539. dt_irlist_append(dlp,
  1540. dt_cg_node_alloc(DT_LBL_NONE, instr));
  1541. break;
  1542. case DT_NODE_SYM: {
  1543. dtrace_hdl_t *dtp = yypcb->pcb_hdl;
  1544. dtrace_syminfo_t *sip = dnp->dn_ident->di_data;
  1545. GElf_Sym sym;
  1546. if (dtrace_lookup_by_name(dtp,
  1547. sip->dts_object, sip->dts_name, &sym, NULL) == -1) {
  1548. xyerror(D_UNKNOWN, "cg failed for symbol %s`%s:"
  1549. " %s\n", sip->dts_object, sip->dts_name,
  1550. dtrace_errmsg(dtp, dtrace_errno(dtp)));
  1551. }
  1552. if ((dnp->dn_reg = dt_regset_alloc(drp)) == -1)
  1553. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  1554. dt_cg_xsetx(dlp, dnp->dn_ident,
  1555. DT_LBL_NONE, dnp->dn_reg, sym.st_value);
  1556. if (!(dnp->dn_flags & DT_NF_REF)) {
  1557. instr = DIF_INSTR_LOAD(dt_cg_load(dnp, ctfp,
  1558. dnp->dn_type), dnp->dn_reg, dnp->dn_reg);
  1559. dt_irlist_append(dlp,
  1560. dt_cg_node_alloc(DT_LBL_NONE, instr));
  1561. }
  1562. break;
  1563. }
  1564. default:
  1565. xyerror(D_UNKNOWN, "internal error -- node type %u is "
  1566. "not valid for an identifier\n", dnp->dn_kind);
  1567. }
  1568. break;
  1569. case DT_TOK_INT:
  1570. if ((dnp->dn_reg = dt_regset_alloc(drp)) == -1)
  1571. longjmp(yypcb->pcb_jmpbuf, EDT_NOREG);
  1572. dt_cg_setx(dlp, dnp->dn_reg, dnp->dn_value);
  1573. break;
  1574. default:
  1575. xyerror(D_UNKNOWN, "internal error -- token type %u is not a "
  1576. "valid D compilation token\n", dnp->dn_op);
  1577. }
  1578. }
  1579. void
  1580. dt_cg(dt_pcb_t *pcb, dt_node_t *dnp)
  1581. {
  1582. dif_instr_t instr;
  1583. dt_xlator_t *dxp;
  1584. if (pcb->pcb_regs == NULL && (pcb->pcb_regs =
  1585. dt_regset_create(pcb->pcb_hdl->dt_conf.dtc_difintregs)) == NULL)
  1586. longjmp(pcb->pcb_jmpbuf, EDT_NOMEM);
  1587. dt_regset_reset(pcb->pcb_regs);
  1588. (void) dt_regset_alloc(pcb->pcb_regs); /* allocate %r0 */
  1589. if (pcb->pcb_inttab != NULL)
  1590. dt_inttab_destroy(pcb->pcb_inttab);
  1591. if ((pcb->pcb_inttab = dt_inttab_create(yypcb->pcb_hdl)) == NULL)
  1592. longjmp(pcb->pcb_jmpbuf, EDT_NOMEM);
  1593. if (pcb->pcb_strtab != NULL)
  1594. dt_strtab_destroy(pcb->pcb_strtab);
  1595. if ((pcb->pcb_strtab = dt_strtab_create(BUFSIZ)) == NULL)
  1596. longjmp(pcb->pcb_jmpbuf, EDT_NOMEM);
  1597. dt_irlist_destroy(&pcb->pcb_ir);
  1598. dt_irlist_create(&pcb->pcb_ir);
  1599. assert(pcb->pcb_dret == NULL);
  1600. pcb->pcb_dret = dnp;
  1601. if (dt_node_is_dynamic(dnp)) {
  1602. dnerror(dnp, D_CG_DYN, "expression cannot evaluate to result "
  1603. "of dynamic type\n");
  1604. }
  1605. /*
  1606. * If we're generating code for a translator body, assign the input
  1607. * parameter to the first available register (i.e. caller passes %r1).
  1608. */
  1609. if (dnp->dn_kind == DT_NODE_MEMBER) {
  1610. dxp = dnp->dn_membxlator;
  1611. dnp = dnp->dn_membexpr;
  1612. dxp->dx_ident->di_flags |= DT_IDFLG_CGREG;
  1613. dxp->dx_ident->di_id = dt_regset_alloc(pcb->pcb_regs);
  1614. }
  1615. dt_cg_node(dnp, &pcb->pcb_ir, pcb->pcb_regs);
  1616. instr = DIF_INSTR_RET(dnp->dn_reg);
  1617. dt_regset_free(pcb->pcb_regs, dnp->dn_reg);
  1618. dt_irlist_append(&pcb->pcb_ir, dt_cg_node_alloc(DT_LBL_NONE, instr));
  1619. if (dnp->dn_kind == DT_NODE_MEMBER) {
  1620. dt_regset_free(pcb->pcb_regs, dxp->dx_ident->di_id);
  1621. dxp->dx_ident->di_id = 0;
  1622. dxp->dx_ident->di_flags &= ~DT_IDFLG_CGREG;
  1623. }
  1624. }