/js/lib/Socket.IO-node/support/expresso/deps/jscoverage/js/jsemit.cpp
C++ | 1964 lines | 1374 code | 204 blank | 386 comment | 322 complexity | 16f85088fb3a3e12c285222bebf12ed7 MD5 | raw file
Possible License(s): GPL-2.0, LGPL-2.1, MPL-2.0-no-copyleft-exception, BSD-3-Clause
Large files files are truncated, but you can click here to view the full file
1/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- 2 * vim: set ts=8 sw=4 et tw=99: 3 * 4 * ***** BEGIN LICENSE BLOCK ***** 5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1 6 * 7 * The contents of this file are subject to the Mozilla Public License Version 8 * 1.1 (the "License"); you may not use this file except in compliance with 9 * the License. You may obtain a copy of the License at 10 * http://www.mozilla.org/MPL/ 11 * 12 * Software distributed under the License is distributed on an "AS IS" basis, 13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License 14 * for the specific language governing rights and limitations under the 15 * License. 16 * 17 * The Original Code is Mozilla Communicator client code, released 18 * March 31, 1998. 19 * 20 * The Initial Developer of the Original Code is 21 * Netscape Communications Corporation. 22 * Portions created by the Initial Developer are Copyright (C) 1998 23 * the Initial Developer. All Rights Reserved. 24 * 25 * Contributor(s): 26 * 27 * Alternatively, the contents of this file may be used under the terms of 28 * either of the GNU General Public License Version 2 or later (the "GPL"), 29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), 30 * in which case the provisions of the GPL or the LGPL are applicable instead 31 * of those above. If you wish to allow use of your version of this file only 32 * under the terms of either the GPL or the LGPL, and not to allow others to 33 * use your version of this file under the terms of the MPL, indicate your 34 * decision by deleting the provisions above and replace them with the notice 35 * and other provisions required by the GPL or the LGPL. If you do not delete 36 * the provisions above, a recipient may use your version of this file under 37 * the terms of any one of the MPL, the GPL or the LGPL. 38 * 39 * ***** END LICENSE BLOCK ***** */ 40 41/* 42 * JS bytecode generation. 43 */ 44#include "jsstddef.h" 45#ifdef HAVE_MEMORY_H 46#include <memory.h> 47#endif 48#include <string.h> 49#include "jstypes.h" 50#include "jsarena.h" /* Added by JSIFY */ 51#include "jsutil.h" /* Added by JSIFY */ 52#include "jsbit.h" 53#include "jsprf.h" 54#include "jsapi.h" 55#include "jsatom.h" 56#include "jsbool.h" 57#include "jscntxt.h" 58#include "jsversion.h" 59#include "jsemit.h" 60#include "jsfun.h" 61#include "jsnum.h" 62#include "jsopcode.h" 63#include "jsparse.h" 64#include "jsregexp.h" 65#include "jsscan.h" 66#include "jsscope.h" 67#include "jsscript.h" 68#include "jsautooplen.h" 69#include "jsstaticcheck.h" 70 71/* Allocation chunk counts, must be powers of two in general. */ 72#define BYTECODE_CHUNK 256 /* code allocation increment */ 73#define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */ 74#define TRYNOTE_CHUNK 64 /* trynote allocation increment */ 75 76/* Macros to compute byte sizes from typed element counts. */ 77#define BYTECODE_SIZE(n) ((n) * sizeof(jsbytecode)) 78#define SRCNOTE_SIZE(n) ((n) * sizeof(jssrcnote)) 79#define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote)) 80 81static JSBool 82NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind, 83 uintN stackDepth, size_t start, size_t end); 84 85JS_FRIEND_API(void) 86js_InitCodeGenerator(JSContext *cx, JSCodeGenerator *cg, JSParseContext *pc, 87 JSArenaPool *codePool, JSArenaPool *notePool, 88 uintN lineno) 89{ 90 memset(cg, 0, sizeof *cg); 91 TREE_CONTEXT_INIT(&cg->treeContext, pc); 92 cg->codePool = codePool; 93 cg->notePool = notePool; 94 cg->codeMark = JS_ARENA_MARK(codePool); 95 cg->noteMark = JS_ARENA_MARK(notePool); 96 cg->current = &cg->main; 97 cg->firstLine = cg->prolog.currentLine = cg->main.currentLine = lineno; 98 ATOM_LIST_INIT(&cg->atomList); 99 cg->prolog.noteMask = cg->main.noteMask = SRCNOTE_CHUNK - 1; 100 ATOM_LIST_INIT(&cg->constList); 101 ATOM_LIST_INIT(&cg->upvarList); 102} 103 104JS_FRIEND_API(void) 105js_FinishCodeGenerator(JSContext *cx, JSCodeGenerator *cg) 106{ 107 TREE_CONTEXT_FINISH(cx, &cg->treeContext); 108 JS_ARENA_RELEASE(cg->codePool, cg->codeMark); 109 JS_ARENA_RELEASE(cg->notePool, cg->noteMark); 110 111 /* NB: non-null only after OOM. */ 112 if (cg->spanDeps) 113 JS_free(cx, cg->spanDeps); 114 115 if (cg->upvarMap.vector) 116 JS_free(cx, cg->upvarMap.vector); 117} 118 119static ptrdiff_t 120EmitCheck(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t delta) 121{ 122 jsbytecode *base, *limit, *next; 123 ptrdiff_t offset, length; 124 size_t incr, size; 125 126 base = CG_BASE(cg); 127 next = CG_NEXT(cg); 128 limit = CG_LIMIT(cg); 129 offset = PTRDIFF(next, base, jsbytecode); 130 if (next + delta > limit) { 131 length = offset + delta; 132 length = (length <= BYTECODE_CHUNK) 133 ? BYTECODE_CHUNK 134 : JS_BIT(JS_CeilingLog2(length)); 135 incr = BYTECODE_SIZE(length); 136 if (!base) { 137 JS_ARENA_ALLOCATE_CAST(base, jsbytecode *, cg->codePool, incr); 138 } else { 139 size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode)); 140 incr -= size; 141 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr); 142 } 143 if (!base) { 144 js_ReportOutOfScriptQuota(cx); 145 return -1; 146 } 147 CG_BASE(cg) = base; 148 CG_LIMIT(cg) = base + length; 149 CG_NEXT(cg) = base + offset; 150 } 151 return offset; 152} 153 154static void 155UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target) 156{ 157 jsbytecode *pc; 158 JSOp op; 159 const JSCodeSpec *cs; 160 uintN depth; 161 intN nuses, ndefs; 162 163 pc = CG_CODE(cg, target); 164 op = (JSOp) *pc; 165 cs = &js_CodeSpec[op]; 166 if (cs->format & JOF_TMPSLOT_MASK) { 167 depth = (uintN) cg->stackDepth + 168 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT); 169 if (depth > cg->maxStackDepth) 170 cg->maxStackDepth = depth; 171 } 172 nuses = cs->nuses; 173 if (nuses < 0) 174 nuses = js_GetVariableStackUseLength(op, pc); 175 cg->stackDepth -= nuses; 176 JS_ASSERT(cg->stackDepth >= 0); 177 if (cg->stackDepth < 0) { 178 char numBuf[12]; 179 JSTokenStream *ts; 180 181 JS_snprintf(numBuf, sizeof numBuf, "%d", target); 182 ts = &cg->treeContext.parseContext->tokenStream; 183 JS_ReportErrorFlagsAndNumber(cx, JSREPORT_WARNING, 184 js_GetErrorMessage, NULL, 185 JSMSG_STACK_UNDERFLOW, 186 ts->filename ? ts->filename : "stdin", 187 numBuf); 188 } 189 ndefs = cs->ndefs; 190 if (ndefs < 0) { 191 JSObject *blockObj; 192 193 /* We just executed IndexParsedObject */ 194 JS_ASSERT(op == JSOP_ENTERBLOCK); 195 JS_ASSERT(nuses == 0); 196 blockObj = cg->objectList.lastPob->object; 197 JS_ASSERT(STOBJ_GET_CLASS(blockObj) == &js_BlockClass); 198 JS_ASSERT(JSVAL_IS_VOID(blockObj->fslots[JSSLOT_BLOCK_DEPTH])); 199 200 OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth); 201 ndefs = OBJ_BLOCK_COUNT(cx, blockObj); 202 } 203 cg->stackDepth += ndefs; 204 if ((uintN)cg->stackDepth > cg->maxStackDepth) 205 cg->maxStackDepth = cg->stackDepth; 206} 207 208ptrdiff_t 209js_Emit1(JSContext *cx, JSCodeGenerator *cg, JSOp op) 210{ 211 ptrdiff_t offset = EmitCheck(cx, cg, op, 1); 212 213 if (offset >= 0) { 214 *CG_NEXT(cg)++ = (jsbytecode)op; 215 UpdateDepth(cx, cg, offset); 216 } 217 return offset; 218} 219 220ptrdiff_t 221js_Emit2(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1) 222{ 223 ptrdiff_t offset = EmitCheck(cx, cg, op, 2); 224 225 if (offset >= 0) { 226 jsbytecode *next = CG_NEXT(cg); 227 next[0] = (jsbytecode)op; 228 next[1] = op1; 229 CG_NEXT(cg) = next + 2; 230 UpdateDepth(cx, cg, offset); 231 } 232 return offset; 233} 234 235ptrdiff_t 236js_Emit3(JSContext *cx, JSCodeGenerator *cg, JSOp op, jsbytecode op1, 237 jsbytecode op2) 238{ 239 ptrdiff_t offset = EmitCheck(cx, cg, op, 3); 240 241 if (offset >= 0) { 242 jsbytecode *next = CG_NEXT(cg); 243 next[0] = (jsbytecode)op; 244 next[1] = op1; 245 next[2] = op2; 246 CG_NEXT(cg) = next + 3; 247 UpdateDepth(cx, cg, offset); 248 } 249 return offset; 250} 251 252ptrdiff_t 253js_EmitN(JSContext *cx, JSCodeGenerator *cg, JSOp op, size_t extra) 254{ 255 ptrdiff_t length = 1 + (ptrdiff_t)extra; 256 ptrdiff_t offset = EmitCheck(cx, cg, op, length); 257 258 if (offset >= 0) { 259 jsbytecode *next = CG_NEXT(cg); 260 *next = (jsbytecode)op; 261 memset(next + 1, 0, BYTECODE_SIZE(extra)); 262 CG_NEXT(cg) = next + length; 263 264 /* 265 * Don't UpdateDepth if op's use-count comes from the immediate 266 * operand yet to be stored in the extra bytes after op. 267 */ 268 if (js_CodeSpec[op].nuses >= 0) 269 UpdateDepth(cx, cg, offset); 270 } 271 return offset; 272} 273 274/* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */ 275const char js_with_statement_str[] = "with statement"; 276const char js_finally_block_str[] = "finally block"; 277const char js_script_str[] = "script"; 278 279static const char *statementName[] = { 280 "label statement", /* LABEL */ 281 "if statement", /* IF */ 282 "else statement", /* ELSE */ 283 "destructuring body", /* BODY */ 284 "switch statement", /* SWITCH */ 285 "block", /* BLOCK */ 286 js_with_statement_str, /* WITH */ 287 "catch block", /* CATCH */ 288 "try block", /* TRY */ 289 js_finally_block_str, /* FINALLY */ 290 js_finally_block_str, /* SUBROUTINE */ 291 "do loop", /* DO_LOOP */ 292 "for loop", /* FOR_LOOP */ 293 "for/in loop", /* FOR_IN_LOOP */ 294 "while loop", /* WHILE_LOOP */ 295}; 296 297JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT); 298 299static const char * 300StatementName(JSCodeGenerator *cg) 301{ 302 if (!cg->treeContext.topStmt) 303 return js_script_str; 304 return statementName[cg->treeContext.topStmt->type]; 305} 306 307static void 308ReportStatementTooLarge(JSContext *cx, JSCodeGenerator *cg) 309{ 310 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NEED_DIET, 311 StatementName(cg)); 312} 313 314/** 315 Span-dependent instructions in JS bytecode consist of the jump (JOF_JUMP) 316 and switch (JOF_LOOKUPSWITCH, JOF_TABLESWITCH) format opcodes, subdivided 317 into unconditional (gotos and gosubs), and conditional jumps or branches 318 (which pop a value, test it, and jump depending on its value). Most jumps 319 have just one immediate operand, a signed offset from the jump opcode's pc 320 to the target bytecode. The lookup and table switch opcodes may contain 321 many jump offsets. 322 323 Mozilla bug #80981 (http://bugzilla.mozilla.org/show_bug.cgi?id=80981) was 324 fixed by adding extended "X" counterparts to the opcodes/formats (NB: X is 325 suffixed to prefer JSOP_ORX thereby avoiding a JSOP_XOR name collision for 326 the extended form of the JSOP_OR branch opcode). The unextended or short 327 formats have 16-bit signed immediate offset operands, the extended or long 328 formats have 32-bit signed immediates. The span-dependency problem consists 329 of selecting as few long instructions as possible, or about as few -- since 330 jumps can span other jumps, extending one jump may cause another to need to 331 be extended. 332 333 Most JS scripts are short, so need no extended jumps. We optimize for this 334 case by generating short jumps until we know a long jump is needed. After 335 that point, we keep generating short jumps, but each jump's 16-bit immediate 336 offset operand is actually an unsigned index into cg->spanDeps, an array of 337 JSSpanDep structs. Each struct tells the top offset in the script of the 338 opcode, the "before" offset of the jump (which will be the same as top for 339 simplex jumps, but which will index further into the bytecode array for a 340 non-initial jump offset in a lookup or table switch), the after "offset" 341 adjusted during span-dependent instruction selection (initially the same 342 value as the "before" offset), and the jump target (more below). 343 344 Since we generate cg->spanDeps lazily, from within js_SetJumpOffset, we must 345 ensure that all bytecode generated so far can be inspected to discover where 346 the jump offset immediate operands lie within CG_CODE(cg). But the bonus is 347 that we generate span-dependency records sorted by their offsets, so we can 348 binary-search when trying to find a JSSpanDep for a given bytecode offset, 349 or the nearest JSSpanDep at or above a given pc. 350 351 To avoid limiting scripts to 64K jumps, if the cg->spanDeps index overflows 352 65534, we store SPANDEP_INDEX_HUGE in the jump's immediate operand. This 353 tells us that we need to binary-search for the cg->spanDeps entry by the 354 jump opcode's bytecode offset (sd->before). 355 356 Jump targets need to be maintained in a data structure that lets us look 357 up an already-known target by its address (jumps may have a common target), 358 and that also lets us update the addresses (script-relative, a.k.a. absolute 359 offsets) of targets that come after a jump target (for when a jump below 360 that target needs to be extended). We use an AVL tree, implemented using 361 recursion, but with some tricky optimizations to its height-balancing code 362 (see http://www.cmcrossroads.com/bradapp/ftp/src/libs/C++/AvlTrees.html). 363 364 A final wrinkle: backpatch chains are linked by jump-to-jump offsets with 365 positive sign, even though they link "backward" (i.e., toward lower bytecode 366 address). We don't want to waste space and search time in the AVL tree for 367 such temporary backpatch deltas, so we use a single-bit wildcard scheme to 368 tag true JSJumpTarget pointers and encode untagged, signed (positive) deltas 369 in JSSpanDep.target pointers, depending on whether the JSSpanDep has a known 370 target, or is still awaiting backpatching. 371 372 Note that backpatch chains would present a problem for BuildSpanDepTable, 373 which inspects bytecode to build cg->spanDeps on demand, when the first 374 short jump offset overflows. To solve this temporary problem, we emit a 375 proxy bytecode (JSOP_BACKPATCH; JSOP_BACKPATCH_POP for branch ops) whose 376 nuses/ndefs counts help keep the stack balanced, but whose opcode format 377 distinguishes its backpatch delta immediate operand from a normal jump 378 offset. 379 */ 380static int 381BalanceJumpTargets(JSJumpTarget **jtp) 382{ 383 JSJumpTarget *jt, *jt2, *root; 384 int dir, otherDir, heightChanged; 385 JSBool doubleRotate; 386 387 jt = *jtp; 388 JS_ASSERT(jt->balance != 0); 389 390 if (jt->balance < -1) { 391 dir = JT_RIGHT; 392 doubleRotate = (jt->kids[JT_LEFT]->balance > 0); 393 } else if (jt->balance > 1) { 394 dir = JT_LEFT; 395 doubleRotate = (jt->kids[JT_RIGHT]->balance < 0); 396 } else { 397 return 0; 398 } 399 400 otherDir = JT_OTHER_DIR(dir); 401 if (doubleRotate) { 402 jt2 = jt->kids[otherDir]; 403 *jtp = root = jt2->kids[dir]; 404 405 jt->kids[otherDir] = root->kids[dir]; 406 root->kids[dir] = jt; 407 408 jt2->kids[dir] = root->kids[otherDir]; 409 root->kids[otherDir] = jt2; 410 411 heightChanged = 1; 412 root->kids[JT_LEFT]->balance = -JS_MAX(root->balance, 0); 413 root->kids[JT_RIGHT]->balance = -JS_MIN(root->balance, 0); 414 root->balance = 0; 415 } else { 416 *jtp = root = jt->kids[otherDir]; 417 jt->kids[otherDir] = root->kids[dir]; 418 root->kids[dir] = jt; 419 420 heightChanged = (root->balance != 0); 421 jt->balance = -((dir == JT_LEFT) ? --root->balance : ++root->balance); 422 } 423 424 return heightChanged; 425} 426 427typedef struct AddJumpTargetArgs { 428 JSContext *cx; 429 JSCodeGenerator *cg; 430 ptrdiff_t offset; 431 JSJumpTarget *node; 432} AddJumpTargetArgs; 433 434static int 435AddJumpTarget(AddJumpTargetArgs *args, JSJumpTarget **jtp) 436{ 437 JSJumpTarget *jt; 438 int balanceDelta; 439 440 jt = *jtp; 441 if (!jt) { 442 JSCodeGenerator *cg = args->cg; 443 444 jt = cg->jtFreeList; 445 if (jt) { 446 cg->jtFreeList = jt->kids[JT_LEFT]; 447 } else { 448 JS_ARENA_ALLOCATE_CAST(jt, JSJumpTarget *, &args->cx->tempPool, 449 sizeof *jt); 450 if (!jt) { 451 js_ReportOutOfScriptQuota(args->cx); 452 return 0; 453 } 454 } 455 jt->offset = args->offset; 456 jt->balance = 0; 457 jt->kids[JT_LEFT] = jt->kids[JT_RIGHT] = NULL; 458 cg->numJumpTargets++; 459 args->node = jt; 460 *jtp = jt; 461 return 1; 462 } 463 464 if (jt->offset == args->offset) { 465 args->node = jt; 466 return 0; 467 } 468 469 if (args->offset < jt->offset) 470 balanceDelta = -AddJumpTarget(args, &jt->kids[JT_LEFT]); 471 else 472 balanceDelta = AddJumpTarget(args, &jt->kids[JT_RIGHT]); 473 if (!args->node) 474 return 0; 475 476 jt->balance += balanceDelta; 477 return (balanceDelta && jt->balance) 478 ? 1 - BalanceJumpTargets(jtp) 479 : 0; 480} 481 482#ifdef DEBUG_brendan 483static int AVLCheck(JSJumpTarget *jt) 484{ 485 int lh, rh; 486 487 if (!jt) return 0; 488 JS_ASSERT(-1 <= jt->balance && jt->balance <= 1); 489 lh = AVLCheck(jt->kids[JT_LEFT]); 490 rh = AVLCheck(jt->kids[JT_RIGHT]); 491 JS_ASSERT(jt->balance == rh - lh); 492 return 1 + JS_MAX(lh, rh); 493} 494#endif 495 496static JSBool 497SetSpanDepTarget(JSContext *cx, JSCodeGenerator *cg, JSSpanDep *sd, 498 ptrdiff_t off) 499{ 500 AddJumpTargetArgs args; 501 502 if (off < JUMPX_OFFSET_MIN || JUMPX_OFFSET_MAX < off) { 503 ReportStatementTooLarge(cx, cg); 504 return JS_FALSE; 505 } 506 507 args.cx = cx; 508 args.cg = cg; 509 args.offset = sd->top + off; 510 args.node = NULL; 511 AddJumpTarget(&args, &cg->jumpTargets); 512 if (!args.node) 513 return JS_FALSE; 514 515#ifdef DEBUG_brendan 516 AVLCheck(cg->jumpTargets); 517#endif 518 519 SD_SET_TARGET(sd, args.node); 520 return JS_TRUE; 521} 522 523#define SPANDEPS_MIN 256 524#define SPANDEPS_SIZE(n) ((n) * sizeof(JSSpanDep)) 525#define SPANDEPS_SIZE_MIN SPANDEPS_SIZE(SPANDEPS_MIN) 526 527static JSBool 528AddSpanDep(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, jsbytecode *pc2, 529 ptrdiff_t off) 530{ 531 uintN index; 532 JSSpanDep *sdbase, *sd; 533 size_t size; 534 535 index = cg->numSpanDeps; 536 if (index + 1 == 0) { 537 ReportStatementTooLarge(cx, cg); 538 return JS_FALSE; 539 } 540 541 if ((index & (index - 1)) == 0 && 542 (!(sdbase = cg->spanDeps) || index >= SPANDEPS_MIN)) { 543 size = sdbase ? SPANDEPS_SIZE(index) : SPANDEPS_SIZE_MIN / 2; 544 sdbase = (JSSpanDep *) JS_realloc(cx, sdbase, size + size); 545 if (!sdbase) 546 return JS_FALSE; 547 cg->spanDeps = sdbase; 548 } 549 550 cg->numSpanDeps = index + 1; 551 sd = cg->spanDeps + index; 552 sd->top = PTRDIFF(pc, CG_BASE(cg), jsbytecode); 553 sd->offset = sd->before = PTRDIFF(pc2, CG_BASE(cg), jsbytecode); 554 555 if (js_CodeSpec[*pc].format & JOF_BACKPATCH) { 556 /* Jump offset will be backpatched if off is a non-zero "bpdelta". */ 557 if (off != 0) { 558 JS_ASSERT(off >= 1 + JUMP_OFFSET_LEN); 559 if (off > BPDELTA_MAX) { 560 ReportStatementTooLarge(cx, cg); 561 return JS_FALSE; 562 } 563 } 564 SD_SET_BPDELTA(sd, off); 565 } else if (off == 0) { 566 /* Jump offset will be patched directly, without backpatch chaining. */ 567 SD_SET_TARGET(sd, 0); 568 } else { 569 /* The jump offset in off is non-zero, therefore it's already known. */ 570 if (!SetSpanDepTarget(cx, cg, sd, off)) 571 return JS_FALSE; 572 } 573 574 if (index > SPANDEP_INDEX_MAX) 575 index = SPANDEP_INDEX_HUGE; 576 SET_SPANDEP_INDEX(pc2, index); 577 return JS_TRUE; 578} 579 580static jsbytecode * 581AddSwitchSpanDeps(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc) 582{ 583 JSOp op; 584 jsbytecode *pc2; 585 ptrdiff_t off; 586 jsint low, high; 587 uintN njumps, indexlen; 588 589 op = (JSOp) *pc; 590 JS_ASSERT(op == JSOP_TABLESWITCH || op == JSOP_LOOKUPSWITCH); 591 pc2 = pc; 592 off = GET_JUMP_OFFSET(pc2); 593 if (!AddSpanDep(cx, cg, pc, pc2, off)) 594 return NULL; 595 pc2 += JUMP_OFFSET_LEN; 596 if (op == JSOP_TABLESWITCH) { 597 low = GET_JUMP_OFFSET(pc2); 598 pc2 += JUMP_OFFSET_LEN; 599 high = GET_JUMP_OFFSET(pc2); 600 pc2 += JUMP_OFFSET_LEN; 601 njumps = (uintN) (high - low + 1); 602 indexlen = 0; 603 } else { 604 njumps = GET_UINT16(pc2); 605 pc2 += UINT16_LEN; 606 indexlen = INDEX_LEN; 607 } 608 while (njumps) { 609 --njumps; 610 pc2 += indexlen; 611 off = GET_JUMP_OFFSET(pc2); 612 if (!AddSpanDep(cx, cg, pc, pc2, off)) 613 return NULL; 614 pc2 += JUMP_OFFSET_LEN; 615 } 616 return 1 + pc2; 617} 618 619static JSBool 620BuildSpanDepTable(JSContext *cx, JSCodeGenerator *cg) 621{ 622 jsbytecode *pc, *end; 623 JSOp op; 624 const JSCodeSpec *cs; 625 ptrdiff_t off; 626 627 pc = CG_BASE(cg) + cg->spanDepTodo; 628 end = CG_NEXT(cg); 629 while (pc != end) { 630 JS_ASSERT(pc < end); 631 op = (JSOp)*pc; 632 cs = &js_CodeSpec[op]; 633 634 switch (JOF_TYPE(cs->format)) { 635 case JOF_TABLESWITCH: 636 case JOF_LOOKUPSWITCH: 637 pc = AddSwitchSpanDeps(cx, cg, pc); 638 if (!pc) 639 return JS_FALSE; 640 break; 641 642 case JOF_JUMP: 643 off = GET_JUMP_OFFSET(pc); 644 if (!AddSpanDep(cx, cg, pc, pc, off)) 645 return JS_FALSE; 646 /* FALL THROUGH */ 647 default: 648 pc += cs->length; 649 break; 650 } 651 } 652 653 return JS_TRUE; 654} 655 656static JSSpanDep * 657GetSpanDep(JSCodeGenerator *cg, jsbytecode *pc) 658{ 659 uintN index; 660 ptrdiff_t offset; 661 int lo, hi, mid; 662 JSSpanDep *sd; 663 664 index = GET_SPANDEP_INDEX(pc); 665 if (index != SPANDEP_INDEX_HUGE) 666 return cg->spanDeps + index; 667 668 offset = PTRDIFF(pc, CG_BASE(cg), jsbytecode); 669 lo = 0; 670 hi = cg->numSpanDeps - 1; 671 while (lo <= hi) { 672 mid = (lo + hi) / 2; 673 sd = cg->spanDeps + mid; 674 if (sd->before == offset) 675 return sd; 676 if (sd->before < offset) 677 lo = mid + 1; 678 else 679 hi = mid - 1; 680 } 681 682 JS_ASSERT(0); 683 return NULL; 684} 685 686static JSBool 687SetBackPatchDelta(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, 688 ptrdiff_t delta) 689{ 690 JSSpanDep *sd; 691 692 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN); 693 if (!cg->spanDeps && delta < JUMP_OFFSET_MAX) { 694 SET_JUMP_OFFSET(pc, delta); 695 return JS_TRUE; 696 } 697 698 if (delta > BPDELTA_MAX) { 699 ReportStatementTooLarge(cx, cg); 700 return JS_FALSE; 701 } 702 703 if (!cg->spanDeps && !BuildSpanDepTable(cx, cg)) 704 return JS_FALSE; 705 706 sd = GetSpanDep(cg, pc); 707 JS_ASSERT(SD_GET_BPDELTA(sd) == 0); 708 SD_SET_BPDELTA(sd, delta); 709 return JS_TRUE; 710} 711 712static void 713UpdateJumpTargets(JSJumpTarget *jt, ptrdiff_t pivot, ptrdiff_t delta) 714{ 715 if (jt->offset > pivot) { 716 jt->offset += delta; 717 if (jt->kids[JT_LEFT]) 718 UpdateJumpTargets(jt->kids[JT_LEFT], pivot, delta); 719 } 720 if (jt->kids[JT_RIGHT]) 721 UpdateJumpTargets(jt->kids[JT_RIGHT], pivot, delta); 722} 723 724static JSSpanDep * 725FindNearestSpanDep(JSCodeGenerator *cg, ptrdiff_t offset, int lo, 726 JSSpanDep *guard) 727{ 728 int num, hi, mid; 729 JSSpanDep *sdbase, *sd; 730 731 num = cg->numSpanDeps; 732 JS_ASSERT(num > 0); 733 hi = num - 1; 734 sdbase = cg->spanDeps; 735 while (lo <= hi) { 736 mid = (lo + hi) / 2; 737 sd = sdbase + mid; 738 if (sd->before == offset) 739 return sd; 740 if (sd->before < offset) 741 lo = mid + 1; 742 else 743 hi = mid - 1; 744 } 745 if (lo == num) 746 return guard; 747 sd = sdbase + lo; 748 JS_ASSERT(sd->before >= offset && (lo == 0 || sd[-1].before < offset)); 749 return sd; 750} 751 752static void 753FreeJumpTargets(JSCodeGenerator *cg, JSJumpTarget *jt) 754{ 755 if (jt->kids[JT_LEFT]) 756 FreeJumpTargets(cg, jt->kids[JT_LEFT]); 757 if (jt->kids[JT_RIGHT]) 758 FreeJumpTargets(cg, jt->kids[JT_RIGHT]); 759 jt->kids[JT_LEFT] = cg->jtFreeList; 760 cg->jtFreeList = jt; 761} 762 763static JSBool 764OptimizeSpanDeps(JSContext *cx, JSCodeGenerator *cg) 765{ 766 jsbytecode *pc, *oldpc, *base, *limit, *next; 767 JSSpanDep *sd, *sd2, *sdbase, *sdlimit, *sdtop, guard; 768 ptrdiff_t offset, growth, delta, top, pivot, span, length, target; 769 JSBool done; 770 JSOp op; 771 uint32 type; 772 size_t size, incr; 773 jssrcnote *sn, *snlimit; 774 JSSrcNoteSpec *spec; 775 uintN i, n, noteIndex; 776 JSTryNode *tryNode; 777#ifdef DEBUG_brendan 778 int passes = 0; 779#endif 780 781 base = CG_BASE(cg); 782 sdbase = cg->spanDeps; 783 sdlimit = sdbase + cg->numSpanDeps; 784 offset = CG_OFFSET(cg); 785 growth = 0; 786 787 do { 788 done = JS_TRUE; 789 delta = 0; 790 top = pivot = -1; 791 sdtop = NULL; 792 pc = NULL; 793 op = JSOP_NOP; 794 type = 0; 795#ifdef DEBUG_brendan 796 passes++; 797#endif 798 799 for (sd = sdbase; sd < sdlimit; sd++) { 800 JS_ASSERT(JT_HAS_TAG(sd->target)); 801 sd->offset += delta; 802 803 if (sd->top != top) { 804 sdtop = sd; 805 top = sd->top; 806 JS_ASSERT(top == sd->before); 807 pivot = sd->offset; 808 pc = base + top; 809 op = (JSOp) *pc; 810 type = JOF_OPTYPE(op); 811 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) { 812 /* 813 * We already extended all the jump offset operands for 814 * the opcode at sd->top. Jumps and branches have only 815 * one jump offset operand, but switches have many, all 816 * of which are adjacent in cg->spanDeps. 817 */ 818 continue; 819 } 820 821 JS_ASSERT(type == JOF_JUMP || 822 type == JOF_TABLESWITCH || 823 type == JOF_LOOKUPSWITCH); 824 } 825 826 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) { 827 span = SD_SPAN(sd, pivot); 828 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) { 829 ptrdiff_t deltaFromTop = 0; 830 831 done = JS_FALSE; 832 833 switch (op) { 834 case JSOP_GOTO: op = JSOP_GOTOX; break; 835 case JSOP_IFEQ: op = JSOP_IFEQX; break; 836 case JSOP_IFNE: op = JSOP_IFNEX; break; 837 case JSOP_OR: op = JSOP_ORX; break; 838 case JSOP_AND: op = JSOP_ANDX; break; 839 case JSOP_GOSUB: op = JSOP_GOSUBX; break; 840 case JSOP_CASE: op = JSOP_CASEX; break; 841 case JSOP_DEFAULT: op = JSOP_DEFAULTX; break; 842 case JSOP_TABLESWITCH: op = JSOP_TABLESWITCHX; break; 843 case JSOP_LOOKUPSWITCH: op = JSOP_LOOKUPSWITCHX; break; 844 default: 845 ReportStatementTooLarge(cx, cg); 846 return JS_FALSE; 847 } 848 *pc = (jsbytecode) op; 849 850 for (sd2 = sdtop; sd2 < sdlimit && sd2->top == top; sd2++) { 851 if (sd2 <= sd) { 852 /* 853 * sd2->offset already includes delta as it stood 854 * before we entered this loop, but it must also 855 * include the delta relative to top due to all the 856 * extended jump offset immediates for the opcode 857 * starting at top, which we extend in this loop. 858 * 859 * If there is only one extended jump offset, then 860 * sd2->offset won't change and this for loop will 861 * iterate once only. 862 */ 863 sd2->offset += deltaFromTop; 864 deltaFromTop += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN; 865 } else { 866 /* 867 * sd2 comes after sd, and won't be revisited by 868 * the outer for loop, so we have to increase its 869 * offset by delta, not merely by deltaFromTop. 870 */ 871 sd2->offset += delta; 872 } 873 874 delta += JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN; 875 UpdateJumpTargets(cg->jumpTargets, sd2->offset, 876 JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN); 877 } 878 sd = sd2 - 1; 879 } 880 } 881 } 882 883 growth += delta; 884 } while (!done); 885 886 if (growth) { 887#ifdef DEBUG_brendan 888 JSTokenStream *ts = &cg->treeContext.parseContext->tokenStream; 889 890 printf("%s:%u: %u/%u jumps extended in %d passes (%d=%d+%d)\n", 891 ts->filename ? ts->filename : "stdin", cg->firstLine, 892 growth / (JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN), cg->numSpanDeps, 893 passes, offset + growth, offset, growth); 894#endif 895 896 /* 897 * Ensure that we have room for the extended jumps, but don't round up 898 * to a power of two -- we're done generating code, so we cut to fit. 899 */ 900 limit = CG_LIMIT(cg); 901 length = offset + growth; 902 next = base + length; 903 if (next > limit) { 904 JS_ASSERT(length > BYTECODE_CHUNK); 905 size = BYTECODE_SIZE(PTRDIFF(limit, base, jsbytecode)); 906 incr = BYTECODE_SIZE(length) - size; 907 JS_ARENA_GROW_CAST(base, jsbytecode *, cg->codePool, size, incr); 908 if (!base) { 909 js_ReportOutOfScriptQuota(cx); 910 return JS_FALSE; 911 } 912 CG_BASE(cg) = base; 913 CG_LIMIT(cg) = next = base + length; 914 } 915 CG_NEXT(cg) = next; 916 917 /* 918 * Set up a fake span dependency record to guard the end of the code 919 * being generated. This guard record is returned as a fencepost by 920 * FindNearestSpanDep if there is no real spandep at or above a given 921 * unextended code offset. 922 */ 923 guard.top = -1; 924 guard.offset = offset + growth; 925 guard.before = offset; 926 guard.target = NULL; 927 } 928 929 /* 930 * Now work backwards through the span dependencies, copying chunks of 931 * bytecode between each extended jump toward the end of the grown code 932 * space, and restoring immediate offset operands for all jump bytecodes. 933 * The first chunk of bytecodes, starting at base and ending at the first 934 * extended jump offset (NB: this chunk includes the operation bytecode 935 * just before that immediate jump offset), doesn't need to be copied. 936 */ 937 JS_ASSERT(sd == sdlimit); 938 top = -1; 939 while (--sd >= sdbase) { 940 if (sd->top != top) { 941 top = sd->top; 942 op = (JSOp) base[top]; 943 type = JOF_OPTYPE(op); 944 945 for (sd2 = sd - 1; sd2 >= sdbase && sd2->top == top; sd2--) 946 continue; 947 sd2++; 948 pivot = sd2->offset; 949 JS_ASSERT(top == sd2->before); 950 } 951 952 oldpc = base + sd->before; 953 span = SD_SPAN(sd, pivot); 954 955 /* 956 * If this jump didn't need to be extended, restore its span immediate 957 * offset operand now, overwriting the index of sd within cg->spanDeps 958 * that was stored temporarily after *pc when BuildSpanDepTable ran. 959 * 960 * Note that span might fit in 16 bits even for an extended jump op, 961 * if the op has multiple span operands, not all of which overflowed 962 * (e.g. JSOP_LOOKUPSWITCH or JSOP_TABLESWITCH where some cases are in 963 * range for a short jump, but others are not). 964 */ 965 if (!JOF_TYPE_IS_EXTENDED_JUMP(type)) { 966 JS_ASSERT(JUMP_OFFSET_MIN <= span && span <= JUMP_OFFSET_MAX); 967 SET_JUMP_OFFSET(oldpc, span); 968 continue; 969 } 970 971 /* 972 * Set up parameters needed to copy the next run of bytecode starting 973 * at offset (which is a cursor into the unextended, original bytecode 974 * vector), down to sd->before (a cursor of the same scale as offset, 975 * it's the index of the original jump pc). Reuse delta to count the 976 * nominal number of bytes to copy. 977 */ 978 pc = base + sd->offset; 979 delta = offset - sd->before; 980 JS_ASSERT(delta >= 1 + JUMP_OFFSET_LEN); 981 982 /* 983 * Don't bother copying the jump offset we're about to reset, but do 984 * copy the bytecode at oldpc (which comes just before its immediate 985 * jump offset operand), on the next iteration through the loop, by 986 * including it in offset's new value. 987 */ 988 offset = sd->before + 1; 989 size = BYTECODE_SIZE(delta - (1 + JUMP_OFFSET_LEN)); 990 if (size) { 991 memmove(pc + 1 + JUMPX_OFFSET_LEN, 992 oldpc + 1 + JUMP_OFFSET_LEN, 993 size); 994 } 995 996 SET_JUMPX_OFFSET(pc, span); 997 } 998 999 if (growth) { 1000 /* 1001 * Fix source note deltas. Don't hardwire the delta fixup adjustment, 1002 * even though currently it must be JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN 1003 * at each sd that moved. The future may bring different offset sizes 1004 * for span-dependent instruction operands. However, we fix only main 1005 * notes here, not prolog notes -- we know that prolog opcodes are not 1006 * span-dependent, and aren't likely ever to be. 1007 */ 1008 offset = growth = 0; 1009 sd = sdbase; 1010 for (sn = cg->main.notes, snlimit = sn + cg->main.noteCount; 1011 sn < snlimit; 1012 sn = SN_NEXT(sn)) { 1013 /* 1014 * Recall that the offset of a given note includes its delta, and 1015 * tells the offset of the annotated bytecode from the main entry 1016 * point of the script. 1017 */ 1018 offset += SN_DELTA(sn); 1019 while (sd < sdlimit && sd->before < offset) { 1020 /* 1021 * To compute the delta to add to sn, we need to look at the 1022 * spandep after sd, whose offset - (before + growth) tells by 1023 * how many bytes sd's instruction grew. 1024 */ 1025 sd2 = sd + 1; 1026 if (sd2 == sdlimit) 1027 sd2 = &guard; 1028 delta = sd2->offset - (sd2->before + growth); 1029 if (delta > 0) { 1030 JS_ASSERT(delta == JUMPX_OFFSET_LEN - JUMP_OFFSET_LEN); 1031 sn = js_AddToSrcNoteDelta(cx, cg, sn, delta); 1032 if (!sn) 1033 return JS_FALSE; 1034 snlimit = cg->main.notes + cg->main.noteCount; 1035 growth += delta; 1036 } 1037 sd++; 1038 } 1039 1040 /* 1041 * If sn has span-dependent offset operands, check whether each 1042 * covers further span-dependencies, and increase those operands 1043 * accordingly. Some source notes measure offset not from the 1044 * annotated pc, but from that pc plus some small bias. NB: we 1045 * assume that spec->offsetBias can't itself span span-dependent 1046 * instructions! 1047 */ 1048 spec = &js_SrcNoteSpec[SN_TYPE(sn)]; 1049 if (spec->isSpanDep) { 1050 pivot = offset + spec->offsetBias; 1051 n = spec->arity; 1052 for (i = 0; i < n; i++) { 1053 span = js_GetSrcNoteOffset(sn, i); 1054 if (span == 0) 1055 continue; 1056 target = pivot + span * spec->isSpanDep; 1057 sd2 = FindNearestSpanDep(cg, target, 1058 (target >= pivot) 1059 ? sd - sdbase 1060 : 0, 1061 &guard); 1062 1063 /* 1064 * Increase target by sd2's before-vs-after offset delta, 1065 * which is absolute (i.e., relative to start of script, 1066 * as is target). Recompute the span by subtracting its 1067 * adjusted pivot from target. 1068 */ 1069 target += sd2->offset - sd2->before; 1070 span = target - (pivot + growth); 1071 span *= spec->isSpanDep; 1072 noteIndex = sn - cg->main.notes; 1073 if (!js_SetSrcNoteOffset(cx, cg, noteIndex, i, span)) 1074 return JS_FALSE; 1075 sn = cg->main.notes + noteIndex; 1076 snlimit = cg->main.notes + cg->main.noteCount; 1077 } 1078 } 1079 } 1080 cg->main.lastNoteOffset += growth; 1081 1082 /* 1083 * Fix try/catch notes (O(numTryNotes * log2(numSpanDeps)), but it's 1084 * not clear how we can beat that). 1085 */ 1086 for (tryNode = cg->lastTryNode; tryNode; tryNode = tryNode->prev) { 1087 /* 1088 * First, look for the nearest span dependency at/above tn->start. 1089 * There may not be any such spandep, in which case the guard will 1090 * be returned. 1091 */ 1092 offset = tryNode->note.start; 1093 sd = FindNearestSpanDep(cg, offset, 0, &guard); 1094 delta = sd->offset - sd->before; 1095 tryNode->note.start = offset + delta; 1096 1097 /* 1098 * Next, find the nearest spandep at/above tn->start + tn->length. 1099 * Use its delta minus tn->start's delta to increase tn->length. 1100 */ 1101 length = tryNode->note.length; 1102 sd2 = FindNearestSpanDep(cg, offset + length, sd - sdbase, &guard); 1103 if (sd2 != sd) { 1104 tryNode->note.length = 1105 length + sd2->offset - sd2->before - delta; 1106 } 1107 } 1108 } 1109 1110#ifdef DEBUG_brendan 1111 { 1112 uintN bigspans = 0; 1113 top = -1; 1114 for (sd = sdbase; sd < sdlimit; sd++) { 1115 offset = sd->offset; 1116 1117 /* NB: sd->top cursors into the original, unextended bytecode vector. */ 1118 if (sd->top != top) { 1119 JS_ASSERT(top == -1 || 1120 !JOF_TYPE_IS_EXTENDED_JUMP(type) || 1121 bigspans != 0); 1122 bigspans = 0; 1123 top = sd->top; 1124 JS_ASSERT(top == sd->before); 1125 op = (JSOp) base[offset]; 1126 type = JOF_OPTYPE(op); 1127 JS_ASSERT(type == JOF_JUMP || 1128 type == JOF_JUMPX || 1129 type == JOF_TABLESWITCH || 1130 type == JOF_TABLESWITCHX || 1131 type == JOF_LOOKUPSWITCH || 1132 type == JOF_LOOKUPSWITCHX); 1133 pivot = offset; 1134 } 1135 1136 pc = base + offset; 1137 if (JOF_TYPE_IS_EXTENDED_JUMP(type)) { 1138 span = GET_JUMPX_OFFSET(pc); 1139 if (span < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < span) { 1140 bigspans++; 1141 } else { 1142 JS_ASSERT(type == JOF_TABLESWITCHX || 1143 type == JOF_LOOKUPSWITCHX); 1144 } 1145 } else { 1146 span = GET_JUMP_OFFSET(pc); 1147 } 1148 JS_ASSERT(SD_SPAN(sd, pivot) == span); 1149 } 1150 JS_ASSERT(!JOF_TYPE_IS_EXTENDED_JUMP(type) || bigspans != 0); 1151 } 1152#endif 1153 1154 /* 1155 * Reset so we optimize at most once -- cg may be used for further code 1156 * generation of successive, independent, top-level statements. No jump 1157 * can span top-level statements, because JS lacks goto. 1158 */ 1159 size = SPANDEPS_SIZE(JS_BIT(JS_CeilingLog2(cg->numSpanDeps))); 1160 JS_free(cx, cg->spanDeps); 1161 cg->spanDeps = NULL; 1162 FreeJumpTargets(cg, cg->jumpTargets); 1163 cg->jumpTargets = NULL; 1164 cg->numSpanDeps = cg->numJumpTargets = 0; 1165 cg->spanDepTodo = CG_OFFSET(cg); 1166 return JS_TRUE; 1167} 1168 1169static ptrdiff_t 1170EmitJump(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t off) 1171{ 1172 JSBool extend; 1173 ptrdiff_t jmp; 1174 jsbytecode *pc; 1175 1176 extend = off < JUMP_OFFSET_MIN || JUMP_OFFSET_MAX < off; 1177 if (extend && !cg->spanDeps && !BuildSpanDepTable(cx, cg)) 1178 return -1; 1179 1180 jmp = js_Emit3(cx, cg, op, JUMP_OFFSET_HI(off), JUMP_OFFSET_LO(off)); 1181 if (jmp >= 0 && (extend || cg->spanDeps)) { 1182 pc = CG_CODE(cg, jmp); 1183 if (!AddSpanDep(cx, cg, pc, pc, off)) 1184 return -1; 1185 } 1186 return jmp; 1187} 1188 1189static ptrdiff_t 1190GetJumpOffset(JSCodeGenerator *cg, jsbytecode *pc) 1191{ 1192 JSSpanDep *sd; 1193 JSJumpTarget *jt; 1194 ptrdiff_t top; 1195 1196 if (!cg->spanDeps) 1197 return GET_JUMP_OFFSET(pc); 1198 1199 sd = GetSpanDep(cg, pc); 1200 jt = sd->target; 1201 if (!JT_HAS_TAG(jt)) 1202 return JT_TO_BPDELTA(jt); 1203 1204 top = sd->top; 1205 while (--sd >= cg->spanDeps && sd->top == top) 1206 continue; 1207 sd++; 1208 return JT_CLR_TAG(jt)->offset - sd->offset; 1209} 1210 1211JSBool 1212js_SetJumpOffset(JSContext *cx, JSCodeGenerator *cg, jsbytecode *pc, 1213 ptrdiff_t off) 1214{ 1215 if (!cg->spanDeps) { 1216 if (JUMP_OFFSET_MIN <= off && off <= JUMP_OFFSET_MAX) { 1217 SET_JUMP_OFFSET(pc, off); 1218 return JS_TRUE; 1219 } 1220 1221 if (!BuildSpanDepTable(cx, cg)) 1222 return JS_FALSE; 1223 } 1224 1225 return SetSpanDepTarget(cx, cg, GetSpanDep(cg, pc), off); 1226} 1227 1228JSBool 1229js_InStatement(JSTreeContext *tc, JSStmtType type) 1230{ 1231 JSStmtInfo *stmt; 1232 1233 for (stmt = tc->topStmt; stmt; stmt = stmt->down) { 1234 if (stmt->type == type) 1235 return JS_TRUE; 1236 } 1237 return JS_FALSE; 1238} 1239 1240void 1241js_PushStatement(JSTreeContext *tc, JSStmtInfo *stmt, JSStmtType type, 1242 ptrdiff_t top) 1243{ 1244 stmt->type = type; 1245 stmt->flags = 0; 1246 SET_STATEMENT_TOP(stmt, top); 1247 stmt->u.label = NULL; 1248 JS_ASSERT(!stmt->u.blockObj); 1249 stmt->down = tc->topStmt; 1250 tc->topStmt = stmt; 1251 if (STMT_LINKS_SCOPE(stmt)) { 1252 stmt->downScope = tc->topScopeStmt; 1253 tc->topScopeStmt = stmt; 1254 } else { 1255 stmt->downScope = NULL; 1256 } 1257} 1258 1259void 1260js_PushBlockScope(JSTreeContext *tc, JSStmtInfo *stmt, JSObject *blockObj, 1261 ptrdiff_t top) 1262{ 1263 1264 js_PushStatement(tc, stmt, STMT_BLOCK, top); 1265 stmt->flags |= SIF_SCOPE; 1266 STOBJ_SET_PARENT(blockObj, tc->blockChain); 1267 stmt->downScope = tc->topScopeStmt; 1268 tc->topScopeStmt = stmt; 1269 tc->blockChain = blockObj; 1270 stmt->u.blockObj = blockObj; 1271} 1272 1273/* 1274 * Emit a backpatch op with offset pointing to the previous jump of this type, 1275 * so that we can walk back up the chain fixing up the op and jump offset. 1276 */ 1277static ptrdiff_t 1278EmitBackPatchOp(JSContext *cx, JSCodeGenerator *cg, JSOp op, ptrdiff_t *lastp) 1279{ 1280 ptrdiff_t offset, delta; 1281 1282 offset = CG_OFFSET(cg); 1283 delta = offset - *lastp; 1284 *lastp = offset; 1285 JS_ASSERT(delta > 0); 1286 return EmitJump(cx, cg, op, delta); 1287} 1288 1289/* 1290 * Macro to emit a bytecode followed by a uint16 immediate operand stored in 1291 * big-endian order, used for arg and var numbers as well as for atomIndexes. 1292 * NB: We use cx and cg from our caller's lexical environment, and return 1293 * false on error. 1294 */ 1295#define EMIT_UINT16_IMM_OP(op, i) \ 1296 JS_BEGIN_MACRO \ 1297 if (js_Emit3(cx, cg, op, UINT16_HI(i), UINT16_LO(i)) < 0) \ 1298 return JS_FALSE; \ 1299 JS_END_MACRO 1300 1301static JSBool 1302FlushPops(JSContext *cx, JSCodeGenerator *cg, intN *npops) 1303{ 1304 JS_ASSERT(*npops != 0); 1305 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0) 1306 return JS_FALSE; 1307 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops); 1308 *npops = 0; 1309 return JS_TRUE; 1310} 1311 1312/* 1313 * Emit additional bytecode(s) for non-local jumps. 1314 */ 1315static JSBool 1316EmitNonLocalJumpFixup(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt) 1317{ 1318 intN depth, npops; 1319 JSStmtInfo *stmt; 1320 1321 /* 1322 * The non-local jump fixup we emit will unbalance cg->stackDepth, because 1323 * the fixup replicates balanced code such as JSOP_LEAVEWITH emitted at the 1324 * end of a with statement, so we save cg->stackDepth here and restore it 1325 * just before a successful return. 1326 */ 1327 depth = cg->stackDepth; 1328 npops = 0; 1329 1330#define FLUSH_POPS() if (npops && !FlushPops(cx, cg, &npops)) return JS_FALSE 1331 1332 for (stmt = cg->treeContext.topStmt; stmt != toStmt; stmt = stmt->down) { 1333 switch (stmt->type) { 1334 case STMT_FINALLY: 1335 FLUSH_POPS(); 1336 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0) 1337 return JS_FALSE; 1338 if (EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, &GOSUBS(*stmt)) < 0) 1339 return JS_FALSE; 1340 break; 1341 1342 case STMT_WITH: 1343 /* There's a With object on the stack that we need to pop. */ 1344 FLUSH_POPS(); 1345 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0) 1346 return JS_FALSE; 1347 if (js_Emit1(cx, cg, JSOP_LEAVEWITH) < 0) 1348 return JS_FALSE; 1349 break; 1350 1351 case STMT_FOR_IN_LOOP: 1352 /* 1353 * The iterator and the object being iterated need to be popped. 1354 */ 1355 FLUSH_POPS(); 1356 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0) 1357 return JS_FALSE; 1358 if (js_Emit1(cx, cg, JSOP_ENDITER) < 0) 1359 return JS_FALSE; 1360 break; 1361 1362 case STMT_SUBROUTINE: 1363 /* 1364 * There's a [exception or hole, retsub pc-index] pair on the 1365 * stack that we need to pop. 1366 */ 1367 npops += 2; 1368 break; 1369 1370 default:; 1371 } 1372 1373 if (stmt->flags & SIF_SCOPE) { 1374 uintN i; 1375 1376 /* There is a Block object with locals on the stack to pop. */ 1377 FLUSH_POPS(); 1378 if (js_NewSrcNote(cx, cg, SRC_HIDDEN) < 0) 1379 return JS_FALSE; 1380 i = OBJ_BLOCK_COUNT(cx, stmt->u.blockObj); 1381 EMIT_UINT16_IMM_OP(JSOP_LEAVEBLOCK, i); 1382 } 1383 } 1384 1385 FLUSH_POPS(); 1386 cg->stackDepth = depth; 1387 return JS_TRUE; 1388 1389#undef FLUSH_POPS 1390} 1391 1392static ptrdiff_t 1393EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt, 1394 ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType) 1395{ 1396 intN index; 1397 1398 if (!EmitNonLocalJumpFixup(cx, cg, toStmt)) 1399 return -1; 1400 1401 if (label) 1402 index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label)); 1403 else if (noteType != SRC_NULL) 1404 index = js_NewSrcNote(cx, cg, noteType); 1405 else 1406 index = 0; 1407 if (index < 0) 1408 return -1; 1409 1410 return EmitBackPatchOp(cx, cg, JSOP_BACKPATCH, lastp); 1411} 1412 1413static JSBool 1414BackPatch(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t last, 1415 jsbytecode *target, jsbytecode op) 1416{ 1417 jsbytecode *pc, *stop; 1418 ptrdiff_t delta, span; 1419 1420 pc = CG_CODE(cg, last); 1421 stop = CG_CODE(cg, -1); 1422 while (pc != stop) { 1423 delta = GetJumpOffset(cg, pc); 1424 span = PTRDIFF(target, pc, jsbytecode); 1425 CHECK_AND_SET_JUMP_OFFSET(cx, cg, pc, span); 1426 1427 /* 1428 * Set *pc after jump offset in case bpdelta didn't overflow, but span 1429 * does (if so, CHECK_AND_SET_JUMP_OFFSET might call BuildSpanDepTable 1430 * and need to see the JSOP_BACKPATCH* op at *pc). 1431 */ 1432 *pc = op; 1433 pc -= delta; 1434 } 1435 return JS_TRUE; 1436} 1437 1438void 1439js_PopStatement(JSTreeContext *tc) 1440{ 1441 JSStmtInfo *stmt; 1442 1443 stmt = tc->topStmt; 1444 tc->topStmt = stmt->down; 1445 if (STMT_LINKS_SCOPE(stmt)) { 1446 tc->topScopeStmt = stmt->downScope; 1447 if (stmt->flags & SIF_SCOPE) { 1448 tc->blockChain = STOBJ_GET_PARENT(stmt->u.blockObj); 1449 JS_SCOPE_DEPTH_METERING(--tc->scopeDepth); 1450 } 1451 } 1452} 1453 1454JSBool 1455js_PopStatementCG(JSContext *cx, JSCodeGenerator *cg) 1456{ 1457 JSStmtInfo *stmt; 1458 1459 stmt = cg->treeContext.topStmt; 1460 if (!STMT_IS_TRYING(stmt) && 1461 (!BackPatch(cx, cg, stmt->breaks, CG_NEXT(cg), JSOP_GOTO) || 1462 !BackPatch(cx, cg, stmt->continues, CG_CODE(cg, stmt->update), 1463 JSOP_GOTO))) { 1464 return JS_FALSE; 1465 } 1466 js_PopStatement(&cg->treeContext); 1467 return JS_TRUE; 1468} 1469 1470JSBool 1471js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom, 1472 JSParseNode *pn) 1473{ 1474 jsdouble dval; 1475 jsint ival; 1476 JSAtom *valueAtom; 1477 jsval v; 1478 JSAtomListElement *ale; 1479 1480 /* XXX just do numbers for now */ 1481 if (pn->pn_type == TOK_NUMBER) { 1482 dval = pn->pn_dval; 1483 if (JSDOUBLE_IS_INT(dval, ival) && INT_FITS_IN_JSVAL(ival)) { 1484 v = INT_TO_JSVAL(ival); 1485 } else { 1486 /* 1487 * We atomize double to root a jsdouble instance that we wrap as 1488 * jsval and store in cg->constList. This works because atoms are 1489 * protected from GC during compilation. 1490 */ 1491 valueAtom = js_AtomizeDouble(cx, dval); 1492 if (!valueAtom) 1493 return JS_FALSE; 1494 v = ATOM_KEY(valueAtom); 1495 } 1496 ale = js_IndexAtom(cx, atom, &cg->constList); 1497 if (!ale) 1498 return JS_FALSE; 1499 ALE_SET_VALUE(ale, v); 1500 } 1501 return JS_TRUE; 1502} 1503 1504JSStmtInfo * 1505js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp) 1506{ 1507 JSStmtInfo *stmt; 1508 JSObject *obj; 1509 JSScope *scope; 1510 JSScopeProperty *sprop; 1511 1512 for (stmt = tc->topScopeStmt; stmt; stmt = stmt->downScope) { 1513 if (stmt->type == STMT_WITH) 1514 break; 1515 1516 /* Skip "maybe scope" statements that don't contain let bindings. */ 1517 if (!(stmt->flags & SIF_SCOPE)) 1518 continue; 1519 1520 obj = stmt->u.blockObj; 1521 JS_ASSERT(LOCKED_OBJ_GET_CLASS(obj) == &js_BlockClass); 1522 scope = OBJ_SCOPE(obj); 1523 sprop = SCOPE_GET_PROPERTY(scope, ATOM_TO_JSID(atom)); 1524 if (sprop) { 1525 JS_ASSERT(sprop->flags & SPROP_HAS_SHORTID); 1526 1527 if (slotp) { 1528 JS_ASSERT(JSVAL_IS_INT(obj->fslots[JSSLOT_BLOCK_DEPTH])); 1529 *slotp = JSVAL_TO_INT(obj->fslots[JSSLOT_BLOCK_DEPTH]) + 1530 sprop->shortid; 1531 } 1532 re…
Large files files are truncated, but you can click here to view the full file