PageRenderTime 163ms CodeModel.GetById 19ms app.highlight 131ms RepoModel.GetById 1ms app.codeStats 1ms

/js/lib/Socket.IO-node/support/expresso/deps/jscoverage/js/jstracer.cpp

http://github.com/onedayitwillmake/RealtimeMultiplayerNodeJs
C++ | 1877 lines | 1476 code | 187 blank | 214 comment | 360 complexity | c3178098a8b605185c2e4b7b07c99e8c MD5 | raw file

Large files files are truncated, but you can click here to view the full file

   1/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
   2 * vim: set ts=4 sw=4 et tw=99:
   3 *
   4 * ***** BEGIN LICENSE BLOCK *****
   5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
   6 *
   7 * The contents of this file are subject to the Mozilla Public License Version
   8 * 1.1 (the "License"); you may not use this file except in compliance with
   9 * the License. You may obtain a copy of the License at
  10 * http://www.mozilla.org/MPL/
  11 *
  12 * Software distributed under the License is distributed on an "AS IS" basis,
  13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
  14 * for the specific language governing rights and limitations under the
  15 * License.
  16 *
  17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
  18 * May 28, 2008.
  19 *
  20 * The Initial Developer of the Original Code is
  21 *   Brendan Eich <brendan@mozilla.org>
  22 *
  23 * Contributor(s):
  24 *   Andreas Gal <gal@mozilla.com>
  25 *   Mike Shaver <shaver@mozilla.org>
  26 *   David Anderson <danderson@mozilla.com>
  27 *
  28 * Alternatively, the contents of this file may be used under the terms of
  29 * either of the GNU General Public License Version 2 or later (the "GPL"),
  30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
  31 * in which case the provisions of the GPL or the LGPL are applicable instead
  32 * of those above. If you wish to allow use of your version of this file only
  33 * under the terms of either the GPL or the LGPL, and not to allow others to
  34 * use your version of this file under the terms of the MPL, indicate your
  35 * decision by deleting the provisions above and replace them with the notice
  36 * and other provisions required by the GPL or the LGPL. If you do not delete
  37 * the provisions above, a recipient may use your version of this file under
  38 * the terms of any one of the MPL, the GPL or the LGPL.
  39 *
  40 * ***** END LICENSE BLOCK ***** */
  41
  42#include "jsstddef.h"           // always first
  43#include "jsbit.h"              // low-level (NSPR-based) headers next
  44#include "jsprf.h"
  45#include <math.h>               // standard headers next
  46#ifdef _MSC_VER
  47#include <malloc.h>
  48#define alloca _alloca
  49#endif
  50#ifdef SOLARIS
  51#include <alloca.h>
  52#endif
  53
  54#include "nanojit/nanojit.h"
  55#include "jsarray.h"            // higher-level library and API headers
  56#include "jsbool.h"
  57#include "jscntxt.h"
  58#include "jsdbgapi.h"
  59#include "jsemit.h"
  60#include "jsfun.h"
  61#include "jsinterp.h"
  62#include "jsiter.h"
  63#include "jsobj.h"
  64#include "jsopcode.h"
  65#include "jsregexp.h"
  66#include "jsscope.h"
  67#include "jsscript.h"
  68#include "jsdate.h"
  69#include "jsstaticcheck.h"
  70#include "jstracer.h"
  71
  72#include "jsautooplen.h"        // generated headers last
  73
  74/* Never use JSVAL_IS_BOOLEAN because it restricts the value (true, false) and 
  75   the type. What you want to use is JSVAL_TAG(x) == JSVAL_BOOLEAN and then 
  76   handle the undefined case properly (bug 457363). */
  77#undef JSVAL_IS_BOOLEAN
  78#define JSVAL_IS_BOOLEAN(x) JS_STATIC_ASSERT(0) 
  79
  80/* Use a fake tag to represent boxed values, borrowing from the integer tag
  81   range since we only use JSVAL_INT to indicate integers. */
  82#define JSVAL_BOXED 3
  83
  84/* Map to translate a type tag into a printable representation. */
  85static const char typeChar[] = "OIDVS?B?";
  86
  87/* Number of iterations of a loop where we start tracing.  That is, we don't
  88   start tracing until the beginning of the HOTLOOP-th iteration. */
  89#define HOTLOOP 2
  90
  91/* Number of times we wait to exit on a side exit before we try to extend the tree. */
  92#define HOTEXIT 1
  93
  94/* Max call depths for inlining. */
  95#define MAX_CALLDEPTH 10
  96
  97/* Max number of type mismatchs before we trash the tree. */
  98#define MAX_MISMATCH 20
  99
 100/* Max blacklist level of inner tree immediate recompiling  */
 101#define MAX_INNER_RECORD_BLACKLIST  -16
 102
 103/* Max native stack size. */
 104#define MAX_NATIVE_STACK_SLOTS 1024
 105
 106/* Max call stack size. */
 107#define MAX_CALL_STACK_ENTRIES 64
 108
 109/* Max number of branches per tree. */
 110#define MAX_BRANCHES 16
 111
 112/* Macros for demote slot lists */
 113#define ALLOCA_UNDEMOTE_SLOTLIST(num)     (unsigned*)alloca(((num) + 1) * sizeof(unsigned))
 114#define ADD_UNDEMOTE_SLOT(list, slot)     list[++list[0]] = slot
 115#define NUM_UNDEMOTE_SLOTS(list)          list[0]
 116#define CLEAR_UNDEMOTE_SLOTLIST(list)     list[0] = 0
 117
 118#ifdef JS_JIT_SPEW
 119#define ABORT_TRACE(msg)   do { debug_only_v(fprintf(stdout, "abort: %d: %s\n", __LINE__, msg);)  return false; } while (0)
 120#else
 121#define ABORT_TRACE(msg)   return false
 122#endif
 123
 124#ifdef JS_JIT_SPEW
 125struct __jitstats {
 126#define JITSTAT(x) uint64 x;
 127#include "jitstats.tbl"
 128#undef JITSTAT
 129} jitstats = { 0LL, };
 130
 131JS_STATIC_ASSERT(sizeof(jitstats) % sizeof(uint64) == 0);
 132
 133enum jitstat_ids {
 134#define JITSTAT(x) STAT ## x ## ID,
 135#include "jitstats.tbl"
 136#undef JITSTAT
 137    STAT_IDS_TOTAL
 138};
 139
 140static JSPropertySpec jitstats_props[] = {
 141#define JITSTAT(x) { #x, STAT ## x ## ID, JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT },
 142#include "jitstats.tbl"
 143#undef JITSTAT
 144    { 0 }
 145};
 146
 147static JSBool
 148jitstats_getProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
 149{
 150    int index = -1;
 151
 152    if (JSVAL_IS_STRING(id)) {
 153        JSString* str = JSVAL_TO_STRING(id);
 154        if (strcmp(JS_GetStringBytes(str), "HOTLOOP") == 0) {
 155            *vp = INT_TO_JSVAL(HOTLOOP);
 156            return JS_TRUE;
 157        }
 158    }
 159
 160    if (JSVAL_IS_INT(id))
 161        index = JSVAL_TO_INT(id);
 162
 163    uint64 result = 0;
 164    switch (index) {
 165#define JITSTAT(x) case STAT ## x ## ID: result = jitstats.x; break;
 166#include "jitstats.tbl"
 167#undef JITSTAT
 168      default:
 169        *vp = JSVAL_VOID;
 170        return JS_TRUE;
 171    }
 172
 173    if (result < JSVAL_INT_MAX) {
 174        *vp = INT_TO_JSVAL(result);
 175        return JS_TRUE;
 176    }
 177    char retstr[64];
 178    JS_snprintf(retstr, sizeof retstr, "%llu", result);
 179    *vp = STRING_TO_JSVAL(JS_NewStringCopyZ(cx, retstr));
 180    return JS_TRUE;
 181}
 182
 183JSClass jitstats_class = {
 184    "jitstats",
 185    JSCLASS_HAS_PRIVATE,
 186    JS_PropertyStub,       JS_PropertyStub,
 187    jitstats_getProperty,  JS_PropertyStub,
 188    JS_EnumerateStub,      JS_ResolveStub,
 189    JS_ConvertStub,        JS_FinalizeStub,
 190    JSCLASS_NO_OPTIONAL_MEMBERS
 191};
 192
 193void
 194js_InitJITStatsClass(JSContext *cx, JSObject *glob)
 195{
 196    JS_InitClass(cx, glob, NULL, &jitstats_class, NULL, 0, jitstats_props, NULL, NULL, NULL);
 197}
 198
 199#define AUDIT(x) (jitstats.x++)
 200#else
 201#define AUDIT(x) ((void)0)
 202#endif /* JS_JIT_SPEW */
 203
 204#define INS_CONST(c)    addName(lir->insImm(c), #c)
 205#define INS_CONSTPTR(p) addName(lir->insImmPtr((void*) (p)), #p)
 206
 207using namespace avmplus;
 208using namespace nanojit;
 209
 210static GC gc = GC();
 211static avmplus::AvmCore s_core = avmplus::AvmCore();
 212static avmplus::AvmCore* core = &s_core;
 213
 214#ifdef JS_JIT_SPEW
 215void
 216js_DumpPeerStability(Fragmento* frago, const void* ip);
 217#endif
 218
 219/* We really need a better way to configure the JIT. Shaver, where is my fancy JIT object? */
 220static bool nesting_enabled = true;
 221#if defined(NANOJIT_IA32)
 222static bool did_we_check_sse2 = false;
 223#endif
 224
 225#ifdef JS_JIT_SPEW
 226static bool verbose_debug = getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "verbose");
 227#define debug_only_v(x) if (verbose_debug) { x; }
 228#else
 229#define debug_only_v(x)
 230#endif
 231
 232/* The entire VM shares one oracle. Collisions and concurrent updates are tolerated and worst
 233   case cause performance regressions. */
 234static Oracle oracle;
 235
 236/* Blacklists the root peer fragment at a fragment's PC.  This is so blacklisting stays at the 
 237   top of the peer list and not scattered around. */
 238void
 239js_BlacklistPC(Fragmento* frago, Fragment* frag);
 240
 241Tracker::Tracker()
 242{
 243    pagelist = 0;
 244}
 245
 246Tracker::~Tracker()
 247{
 248    clear();
 249}
 250
 251jsuword
 252Tracker::getPageBase(const void* v) const
 253{
 254    return jsuword(v) & ~jsuword(NJ_PAGE_SIZE-1);
 255}
 256
 257struct Tracker::Page*
 258Tracker::findPage(const void* v) const
 259{
 260    jsuword base = getPageBase(v);
 261    struct Tracker::Page* p = pagelist;
 262    while (p) {
 263        if (p->base == base) {
 264            return p;
 265        }
 266        p = p->next;
 267    }
 268    return 0;
 269}
 270
 271struct Tracker::Page*
 272Tracker::addPage(const void* v) {
 273    jsuword base = getPageBase(v);
 274    struct Tracker::Page* p = (struct Tracker::Page*)
 275        GC::Alloc(sizeof(*p) - sizeof(p->map) + (NJ_PAGE_SIZE >> 2) * sizeof(LIns*));
 276    p->base = base;
 277    p->next = pagelist;
 278    pagelist = p;
 279    return p;
 280}
 281
 282void
 283Tracker::clear()
 284{
 285    while (pagelist) {
 286        Page* p = pagelist;
 287        pagelist = pagelist->next;
 288        GC::Free(p);
 289    }
 290}
 291
 292bool
 293Tracker::has(const void *v) const
 294{
 295    return get(v) != NULL;
 296}
 297
 298#if defined NANOJIT_64BIT
 299#define PAGEMASK	0x7ff
 300#else
 301#define PAGEMASK	0xfff
 302#endif
 303
 304LIns*
 305Tracker::get(const void* v) const
 306{
 307    struct Tracker::Page* p = findPage(v);
 308    if (!p)
 309        return NULL;
 310    return p->map[(jsuword(v) & PAGEMASK) >> 2];
 311}
 312
 313void
 314Tracker::set(const void* v, LIns* i)
 315{
 316    struct Tracker::Page* p = findPage(v);
 317    if (!p)
 318        p = addPage(v);
 319    p->map[(jsuword(v) & PAGEMASK) >> 2] = i;
 320}
 321
 322static inline bool isNumber(jsval v)
 323{
 324    return JSVAL_IS_INT(v) || JSVAL_IS_DOUBLE(v);
 325}
 326
 327static inline jsdouble asNumber(jsval v)
 328{
 329    JS_ASSERT(isNumber(v));
 330    if (JSVAL_IS_DOUBLE(v))
 331        return *JSVAL_TO_DOUBLE(v);
 332    return (jsdouble)JSVAL_TO_INT(v);
 333}
 334
 335static inline bool isInt32(jsval v)
 336{
 337    if (!isNumber(v))
 338        return false;
 339    jsdouble d = asNumber(v);
 340    jsint i;
 341    return JSDOUBLE_IS_INT(d, i);
 342}
 343
 344/* Return JSVAL_DOUBLE for all numbers (int and double) and the tag otherwise. */
 345static inline uint8 getPromotedType(jsval v) 
 346{
 347    return JSVAL_IS_INT(v) ? JSVAL_DOUBLE : uint8(JSVAL_TAG(v));
 348}
 349
 350/* Return JSVAL_INT for all whole numbers that fit into signed 32-bit and the tag otherwise. */
 351static inline uint8 getCoercedType(jsval v)
 352{
 353    return isInt32(v) ? JSVAL_INT : (uint8) JSVAL_TAG(v);
 354}
 355
 356/* Tell the oracle that a certain global variable should not be demoted. */
 357void
 358Oracle::markGlobalSlotUndemotable(JSScript* script, unsigned slot)
 359{
 360    _dontDemote.set(&gc, (slot % ORACLE_SIZE));
 361}
 362
 363/* Consult with the oracle whether we shouldn't demote a certain global variable. */
 364bool
 365Oracle::isGlobalSlotUndemotable(JSScript* script, unsigned slot) const
 366{
 367    return _dontDemote.get(slot % ORACLE_SIZE);
 368}
 369
 370/* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
 371void
 372Oracle::markStackSlotUndemotable(JSScript* script, jsbytecode* ip, unsigned slot)
 373{
 374    uint32 hash = uint32(intptr_t(ip)) + (slot << 5);
 375    hash %= ORACLE_SIZE;
 376    _dontDemote.set(&gc, hash);
 377}
 378
 379/* Consult with the oracle whether we shouldn't demote a certain slot. */
 380bool
 381Oracle::isStackSlotUndemotable(JSScript* script, jsbytecode* ip, unsigned slot) const
 382{
 383    uint32 hash = uint32(intptr_t(ip)) + (slot << 5);
 384    hash %= ORACLE_SIZE;
 385    return _dontDemote.get(hash);
 386}
 387
 388/* Clear the oracle. */
 389void
 390Oracle::clear()
 391{
 392    _dontDemote.reset();
 393}
 394
 395#if defined(NJ_SOFTFLOAT)
 396JS_DEFINE_CALLINFO_1(static, DOUBLE,    i2f, INT32,                 1, 1)
 397JS_DEFINE_CALLINFO_1(static, DOUBLE,    u2f, UINT32,                1, 1)
 398#endif
 399
 400static bool isi2f(LInsp i)
 401{
 402    if (i->isop(LIR_i2f))
 403        return true;
 404
 405#if defined(NJ_SOFTFLOAT)
 406    if (i->isop(LIR_qjoin) &&
 407        i->oprnd1()->isop(LIR_call) &&
 408        i->oprnd2()->isop(LIR_callh))
 409    {
 410        if (i->oprnd1()->callInfo() == &i2f_ci)
 411            return true;
 412    }
 413#endif
 414
 415    return false;
 416}
 417
 418static bool isu2f(LInsp i)
 419{
 420    if (i->isop(LIR_u2f))
 421        return true;
 422
 423#if defined(NJ_SOFTFLOAT)
 424    if (i->isop(LIR_qjoin) &&
 425        i->oprnd1()->isop(LIR_call) &&
 426        i->oprnd2()->isop(LIR_callh))
 427    {
 428        if (i->oprnd1()->callInfo() == &u2f_ci)
 429            return true;
 430    }
 431#endif
 432
 433    return false;
 434}
 435
 436static LInsp iu2fArg(LInsp i)
 437{
 438#if defined(NJ_SOFTFLOAT)
 439    if (i->isop(LIR_qjoin))
 440        return i->oprnd1()->arg(0);
 441#endif
 442
 443    return i->oprnd1();
 444}
 445
 446
 447static LIns* demote(LirWriter *out, LInsp i)
 448{
 449    if (i->isCall())
 450        return callArgN(i, 0);
 451    if (isi2f(i) || isu2f(i))
 452        return iu2fArg(i);
 453    if (i->isconst())
 454        return i;
 455    AvmAssert(i->isconstq());
 456    double cf = i->constvalf();
 457    int32_t ci = cf > 0x7fffffff ? uint32_t(cf) : int32_t(cf);
 458    return out->insImm(ci);
 459}
 460
 461static bool isPromoteInt(LIns* i)
 462{
 463    jsdouble d;
 464    return isi2f(i) || i->isconst() ||
 465        (i->isconstq() && (d = i->constvalf()) == jsdouble(jsint(d)) && !JSDOUBLE_IS_NEGZERO(d));
 466}
 467
 468static bool isPromoteUint(LIns* i)
 469{
 470    jsdouble d;
 471    return isu2f(i) || i->isconst() ||
 472        (i->isconstq() && (d = i->constvalf()) == (jsdouble)(jsuint)d && !JSDOUBLE_IS_NEGZERO(d));
 473}
 474
 475static bool isPromote(LIns* i)
 476{
 477    return isPromoteInt(i) || isPromoteUint(i);
 478}
 479
 480static bool isconst(LIns* i, int32_t c)
 481{
 482    return i->isconst() && i->constval() == c;
 483}
 484
 485static bool overflowSafe(LIns* i)
 486{
 487    LIns* c;
 488    return (i->isop(LIR_and) && ((c = i->oprnd2())->isconst()) &&
 489            ((c->constval() & 0xc0000000) == 0)) ||
 490           (i->isop(LIR_rsh) && ((c = i->oprnd2())->isconst()) &&
 491            ((c->constval() > 0)));
 492}
 493
 494#if defined(NJ_SOFTFLOAT)
 495/* soft float */
 496
 497JS_DEFINE_CALLINFO_1(static, DOUBLE,    fneg, DOUBLE,               1, 1)
 498JS_DEFINE_CALLINFO_2(static, INT32,     fcmpeq, DOUBLE, DOUBLE,     1, 1)
 499JS_DEFINE_CALLINFO_2(static, INT32,     fcmplt, DOUBLE, DOUBLE,     1, 1)
 500JS_DEFINE_CALLINFO_2(static, INT32,     fcmple, DOUBLE, DOUBLE,     1, 1)
 501JS_DEFINE_CALLINFO_2(static, INT32,     fcmpgt, DOUBLE, DOUBLE,     1, 1)
 502JS_DEFINE_CALLINFO_2(static, INT32,     fcmpge, DOUBLE, DOUBLE,     1, 1)
 503JS_DEFINE_CALLINFO_2(static, DOUBLE,    fmul, DOUBLE, DOUBLE,       1, 1)
 504JS_DEFINE_CALLINFO_2(static, DOUBLE,    fadd, DOUBLE, DOUBLE,       1, 1)
 505JS_DEFINE_CALLINFO_2(static, DOUBLE,    fdiv, DOUBLE, DOUBLE,       1, 1)
 506JS_DEFINE_CALLINFO_2(static, DOUBLE,    fsub, DOUBLE, DOUBLE,       1, 1)
 507
 508jsdouble FASTCALL
 509fneg(jsdouble x)
 510{
 511    return -x;
 512}
 513
 514jsdouble FASTCALL
 515i2f(int32 i)
 516{
 517    return i;
 518}
 519
 520jsdouble FASTCALL
 521u2f(jsuint u)
 522{
 523    return u;
 524}
 525
 526int32 FASTCALL
 527fcmpeq(jsdouble x, jsdouble y)
 528{
 529    return x==y;
 530}
 531
 532int32 FASTCALL
 533fcmplt(jsdouble x, jsdouble y)
 534{
 535    return x < y;
 536}
 537
 538int32 FASTCALL
 539fcmple(jsdouble x, jsdouble y)
 540{
 541    return x <= y;
 542}
 543
 544int32 FASTCALL
 545fcmpgt(jsdouble x, jsdouble y)
 546{
 547    return x > y;
 548}
 549
 550int32 FASTCALL
 551fcmpge(jsdouble x, jsdouble y)
 552{
 553    return x >= y;
 554}
 555
 556jsdouble FASTCALL
 557fmul(jsdouble x, jsdouble y)
 558{
 559    return x * y;
 560}
 561
 562jsdouble FASTCALL
 563fadd(jsdouble x, jsdouble y)
 564{
 565    return x + y;
 566}
 567
 568jsdouble FASTCALL
 569fdiv(jsdouble x, jsdouble y)
 570{
 571    return x / y;
 572}
 573
 574jsdouble FASTCALL
 575fsub(jsdouble x, jsdouble y)
 576{
 577    return x - y;
 578}
 579
 580class SoftFloatFilter: public LirWriter
 581{
 582public:
 583    SoftFloatFilter(LirWriter* out):
 584        LirWriter(out)
 585    {
 586    }
 587
 588    LInsp quadCall(const CallInfo *ci, LInsp args[]) {
 589        LInsp qlo, qhi;
 590
 591        qlo = out->insCall(ci, args);
 592        qhi = out->ins1(LIR_callh, qlo);
 593        return out->qjoin(qlo, qhi);
 594    }
 595
 596    LInsp ins1(LOpcode v, LInsp s0)
 597    {
 598        if (v == LIR_fneg)
 599            return quadCall(&fneg_ci, &s0);
 600
 601        if (v == LIR_i2f)
 602            return quadCall(&i2f_ci, &s0);
 603
 604        if (v == LIR_u2f)
 605            return quadCall(&u2f_ci, &s0);
 606
 607        return out->ins1(v, s0);
 608    }
 609
 610    LInsp ins2(LOpcode v, LInsp s0, LInsp s1)
 611    {
 612        LInsp args[2];
 613        LInsp bv;
 614
 615        // change the numeric value and order of these LIR opcodes and die
 616        if (LIR_fadd <= v && v <= LIR_fdiv) {
 617            static const CallInfo *fmap[] = { &fadd_ci, &fsub_ci, &fmul_ci, &fdiv_ci };
 618
 619            args[0] = s1;
 620            args[1] = s0;
 621
 622            return quadCall(fmap[v - LIR_fadd], args);
 623        }
 624
 625        if (LIR_feq <= v && v <= LIR_fge) {
 626            static const CallInfo *fmap[] = { &fcmpeq_ci, &fcmplt_ci, &fcmpgt_ci, &fcmple_ci, &fcmpge_ci };
 627
 628            args[0] = s1;
 629            args[1] = s0;
 630
 631            bv = out->insCall(fmap[v - LIR_feq], args);
 632            return out->ins2(LIR_eq, bv, out->insImm(1));
 633        }
 634
 635        return out->ins2(v, s0, s1);
 636    }
 637
 638    LInsp insCall(const CallInfo *ci, LInsp args[])
 639    {
 640        // if the return type is ARGSIZE_F, we have
 641        // to do a quadCall ( qjoin(call,callh) )
 642        if ((ci->_argtypes & 3) == ARGSIZE_F)
 643            return quadCall(ci, args);
 644
 645        return out->insCall(ci, args);
 646    }
 647};
 648
 649#endif // NJ_SOFTFLOAT
 650
 651class FuncFilter: public LirWriter
 652{
 653public:
 654    FuncFilter(LirWriter* out):
 655        LirWriter(out)
 656    {
 657    }
 658
 659    LInsp ins2(LOpcode v, LInsp s0, LInsp s1)
 660    {
 661        if (s0 == s1 && v == LIR_feq) {
 662            if (isPromote(s0)) {
 663                // double(int) and double(uint) cannot be nan
 664                return insImm(1);
 665            }
 666            if (s0->isop(LIR_fmul) || s0->isop(LIR_fsub) || s0->isop(LIR_fadd)) {
 667                LInsp lhs = s0->oprnd1();
 668                LInsp rhs = s0->oprnd2();
 669                if (isPromote(lhs) && isPromote(rhs)) {
 670                    // add/sub/mul promoted ints can't be nan
 671                    return insImm(1);
 672                }
 673            }
 674        } else if (LIR_feq <= v && v <= LIR_fge) {
 675            if (isPromoteInt(s0) && isPromoteInt(s1)) {
 676                // demote fcmp to cmp
 677                v = LOpcode(v + (LIR_eq - LIR_feq));
 678                return out->ins2(v, demote(out, s0), demote(out, s1));
 679            } else if (isPromoteUint(s0) && isPromoteUint(s1)) {
 680                // uint compare
 681                v = LOpcode(v + (LIR_eq - LIR_feq));
 682                if (v != LIR_eq)
 683                    v = LOpcode(v + (LIR_ult - LIR_lt)); // cmp -> ucmp
 684                return out->ins2(v, demote(out, s0), demote(out, s1));
 685            }
 686        } else if (v == LIR_or &&
 687                   s0->isop(LIR_lsh) && isconst(s0->oprnd2(), 16) &&
 688                   s1->isop(LIR_and) && isconst(s1->oprnd2(), 0xffff)) {
 689            LIns* msw = s0->oprnd1();
 690            LIns* lsw = s1->oprnd1();
 691            LIns* x;
 692            LIns* y;
 693            if (lsw->isop(LIR_add) &&
 694                lsw->oprnd1()->isop(LIR_and) &&
 695                lsw->oprnd2()->isop(LIR_and) &&
 696                isconst(lsw->oprnd1()->oprnd2(), 0xffff) &&
 697                isconst(lsw->oprnd2()->oprnd2(), 0xffff) &&
 698                msw->isop(LIR_add) &&
 699                msw->oprnd1()->isop(LIR_add) &&
 700                msw->oprnd2()->isop(LIR_rsh) &&
 701                msw->oprnd1()->oprnd1()->isop(LIR_rsh) &&
 702                msw->oprnd1()->oprnd2()->isop(LIR_rsh) &&
 703                isconst(msw->oprnd2()->oprnd2(), 16) &&
 704                isconst(msw->oprnd1()->oprnd1()->oprnd2(), 16) &&
 705                isconst(msw->oprnd1()->oprnd2()->oprnd2(), 16) &&
 706                (x = lsw->oprnd1()->oprnd1()) == msw->oprnd1()->oprnd1()->oprnd1() &&
 707                (y = lsw->oprnd2()->oprnd1()) == msw->oprnd1()->oprnd2()->oprnd1() &&
 708                lsw == msw->oprnd2()->oprnd1()) {
 709                return out->ins2(LIR_add, x, y);
 710            }
 711        }
 712#ifdef NANOJIT_ARM
 713        else if (v == LIR_lsh ||
 714                 v == LIR_rsh ||
 715                 v == LIR_ush)
 716        {
 717            // needed on ARM -- arm doesn't mask shifts to 31 like x86 does
 718            if (s1->isconst())
 719                s1->setimm16(s1->constval() & 31);
 720            else
 721                s1 = out->ins2(LIR_and, s1, out->insImm(31));
 722            return out->ins2(v, s0, s1);
 723        }
 724#endif
 725
 726        return out->ins2(v, s0, s1);
 727    }
 728
 729    LInsp insCall(const CallInfo *ci, LInsp args[])
 730    {
 731        LInsp s0 = args[0];
 732        if (ci == &js_DoubleToUint32_ci) {
 733            if (s0->isconstq())
 734                return out->insImm(js_DoubleToECMAUint32(s0->constvalf()));
 735            if (isi2f(s0) || isu2f(s0))
 736                return iu2fArg(s0);
 737        } else if (ci == &js_DoubleToInt32_ci) {
 738            if (s0->isconstq())
 739                return out->insImm(js_DoubleToECMAInt32(s0->constvalf()));
 740            if (s0->isop(LIR_fadd) || s0->isop(LIR_fsub)) {
 741                LInsp lhs = s0->oprnd1();
 742                LInsp rhs = s0->oprnd2();
 743                if (isPromote(lhs) && isPromote(rhs)) {
 744                    LOpcode op = LOpcode(s0->opcode() & ~LIR64);
 745                    return out->ins2(op, demote(out, lhs), demote(out, rhs));
 746                }
 747            }
 748            if (isi2f(s0) || isu2f(s0))
 749                return iu2fArg(s0);
 750            // XXX ARM -- check for qjoin(call(UnboxDouble),call(UnboxDouble))
 751            if (s0->isCall() && s0->callInfo() == &js_UnboxDouble_ci) {
 752                LIns* args2[] = { callArgN(s0, 0) };
 753                return out->insCall(&js_UnboxInt32_ci, args2);
 754            }
 755            if (s0->isCall() && s0->callInfo() == &js_StringToNumber_ci) {
 756                // callArgN's ordering is that as seen by the builtin, not as stored in args here.
 757                // True story!
 758                LIns* args2[] = { callArgN(s0, 1), callArgN(s0, 0) };
 759                return out->insCall(&js_StringToInt32_ci, args2);
 760            }
 761        } else if (ci == &js_BoxDouble_ci) {
 762            JS_ASSERT(s0->isQuad());
 763            if (s0->isop(LIR_i2f)) {
 764                LIns* args2[] = { s0->oprnd1(), args[1] };
 765                return out->insCall(&js_BoxInt32_ci, args2);
 766            }
 767            if (s0->isCall() && s0->callInfo() == &js_UnboxDouble_ci)
 768                return callArgN(s0, 0);
 769        }
 770        return out->insCall(ci, args);
 771    }
 772};
 773
 774/* In debug mode vpname contains a textual description of the type of the
 775   slot during the forall iteration over al slots. */
 776#ifdef JS_JIT_SPEW
 777#define DEF_VPNAME          const char* vpname; unsigned vpnum
 778#define SET_VPNAME(name)    do { vpname = name; vpnum = 0; } while(0)
 779#define INC_VPNUM()         do { ++vpnum; } while(0)
 780#else
 781#define DEF_VPNAME          do {} while (0)
 782#define vpname ""
 783#define vpnum 0
 784#define SET_VPNAME(name)    ((void)0)
 785#define INC_VPNUM()         ((void)0)
 786#endif
 787
 788/* Iterate over all interned global variables. */
 789#define FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code)                        \
 790    JS_BEGIN_MACRO                                                            \
 791        DEF_VPNAME;                                                           \
 792        JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);  \
 793        unsigned n;                                                           \
 794        jsval* vp;                                                            \
 795        SET_VPNAME("global");                                                 \
 796        for (n = 0; n < ngslots; ++n) {                                       \
 797            vp = &STOBJ_GET_SLOT(globalObj, gslots[n]);                       \
 798            { code; }                                                         \
 799            INC_VPNUM();                                                      \
 800        }                                                                     \
 801    JS_END_MACRO
 802
 803/* Iterate over all slots in the frame, consisting of args, vars, and stack
 804   (except for the top-level frame which does not have args or vars. */
 805#define FORALL_FRAME_SLOTS(fp, depth, code)                                   \
 806    JS_BEGIN_MACRO                                                            \
 807        jsval* vp;                                                            \
 808        jsval* vpstop;                                                        \
 809        if (fp->callee) {                                                     \
 810            if (depth == 0) {                                                 \
 811                SET_VPNAME("callee");                                         \
 812                vp = &fp->argv[-2];                                           \
 813                { code; }                                                     \
 814                SET_VPNAME("this");                                           \
 815                vp = &fp->argv[-1];                                           \
 816                { code; }                                                     \
 817                SET_VPNAME("argv");                                           \
 818                vp = &fp->argv[0]; vpstop = &fp->argv[fp->fun->nargs];        \
 819                while (vp < vpstop) { code; ++vp; INC_VPNUM(); }              \
 820            }                                                                 \
 821            SET_VPNAME("vars");                                               \
 822            vp = fp->slots; vpstop = &fp->slots[fp->script->nfixed];          \
 823            while (vp < vpstop) { code; ++vp; INC_VPNUM(); }                  \
 824        }                                                                     \
 825        SET_VPNAME("stack");                                                  \
 826        vp = StackBase(fp); vpstop = fp->regs->sp;                            \
 827        while (vp < vpstop) { code; ++vp; INC_VPNUM(); }                      \
 828        if (fsp < fspstop - 1) {                                              \
 829            JSStackFrame* fp2 = fsp[1];                                       \
 830            int missing = fp2->fun->nargs - fp2->argc;                        \
 831            if (missing > 0) {                                                \
 832                SET_VPNAME("missing");                                        \
 833                vp = fp->regs->sp;                                            \
 834                vpstop = vp + missing;                                        \
 835                while (vp < vpstop) { code; ++vp; INC_VPNUM(); }              \
 836            }                                                                 \
 837        }                                                                     \
 838    JS_END_MACRO
 839
 840/* Iterate over all slots in each pending frame. */
 841#define FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code)                   \
 842    JS_BEGIN_MACRO                                                            \
 843        DEF_VPNAME;                                                           \
 844        unsigned n;                                                           \
 845        JSStackFrame* currentFrame = cx->fp;                                  \
 846        JSStackFrame* entryFrame;                                             \
 847        JSStackFrame* fp = currentFrame;                                      \
 848        for (n = 0; n < callDepth; ++n) { fp = fp->down; }                    \
 849        entryFrame = fp;                                                      \
 850        unsigned frames = callDepth+1;                                        \
 851        JSStackFrame** fstack =                                               \
 852            (JSStackFrame**) alloca(frames * sizeof (JSStackFrame*));         \
 853        JSStackFrame** fspstop = &fstack[frames];                             \
 854        JSStackFrame** fsp = fspstop-1;                                       \
 855        fp = currentFrame;                                                    \
 856        for (;; fp = fp->down) { *fsp-- = fp; if (fp == entryFrame) break; }  \
 857        unsigned depth;                                                       \
 858        for (depth = 0, fsp = fstack; fsp < fspstop; ++fsp, ++depth) {        \
 859            fp = *fsp;                                                        \
 860            FORALL_FRAME_SLOTS(fp, depth, code);                              \
 861        }                                                                     \
 862    JS_END_MACRO
 863
 864#define FORALL_SLOTS(cx, ngslots, gslots, callDepth, code)                    \
 865    JS_BEGIN_MACRO                                                            \
 866        FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code);                       \
 867        FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code);                  \
 868    JS_END_MACRO
 869
 870/* Calculate the total number of native frame slots we need from this frame
 871   all the way back to the entry frame, including the current stack usage. */
 872unsigned
 873js_NativeStackSlots(JSContext *cx, unsigned callDepth)
 874{
 875    JSStackFrame* fp = cx->fp;
 876    unsigned slots = 0;
 877#if defined _DEBUG
 878    unsigned int origCallDepth = callDepth;
 879#endif
 880    for (;;) {
 881        unsigned operands = fp->regs->sp - StackBase(fp);
 882        slots += operands;
 883        if (fp->callee)
 884            slots += fp->script->nfixed;
 885        if (callDepth-- == 0) {
 886            if (fp->callee)
 887                slots += 2/*callee,this*/ + fp->fun->nargs;
 888#if defined _DEBUG
 889            unsigned int m = 0;
 890            FORALL_SLOTS_IN_PENDING_FRAMES(cx, origCallDepth, m++);
 891            JS_ASSERT(m == slots);
 892#endif
 893            return slots;
 894        }
 895        JSStackFrame* fp2 = fp;
 896        fp = fp->down;
 897        int missing = fp2->fun->nargs - fp2->argc;
 898        if (missing > 0)
 899            slots += missing;
 900    }
 901    JS_NOT_REACHED("js_NativeStackSlots");
 902}
 903
 904/* Capture the type map for the selected slots of the global object. */
 905void
 906TypeMap::captureGlobalTypes(JSContext* cx, SlotList& slots)
 907{
 908    unsigned ngslots = slots.length();
 909    uint16* gslots = slots.data();
 910    setLength(ngslots);
 911    uint8* map = data();
 912    uint8* m = map;
 913    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
 914        uint8 type = getCoercedType(*vp);
 915        if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx->fp->script, gslots[n]))
 916            type = JSVAL_DOUBLE;
 917        JS_ASSERT(type != JSVAL_BOXED);
 918        *m++ = type;
 919    );
 920}
 921
 922/* Capture the type map for the currently pending stack frames. */
 923void
 924TypeMap::captureStackTypes(JSContext* cx, unsigned callDepth)
 925{
 926    setLength(js_NativeStackSlots(cx, callDepth));
 927    uint8* map = data();
 928    uint8* m = map;
 929    FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
 930        uint8 type = getCoercedType(*vp);
 931        if ((type == JSVAL_INT) &&
 932            oracle.isStackSlotUndemotable(cx->fp->script, cx->fp->regs->pc, unsigned(m - map))) {
 933            type = JSVAL_DOUBLE;
 934        }
 935        debug_only_v(printf("capture %s%d: %d\n", vpname, vpnum, type);)
 936        *m++ = type;
 937    );
 938}
 939
 940/* Compare this type map to another one and see whether they match. */
 941bool
 942TypeMap::matches(TypeMap& other) const
 943{
 944    if (length() != other.length())
 945        return false;
 946    return !memcmp(data(), other.data(), length());
 947}
 948
 949/* Use the provided storage area to create a new type map that contains the partial type map
 950   with the rest of it filled up from the complete type map. */
 951static void
 952mergeTypeMaps(uint8** partial, unsigned* plength, uint8* complete, unsigned clength, uint8* mem)
 953{
 954    unsigned l = *plength;
 955    JS_ASSERT(l < clength);
 956    memcpy(mem, *partial, l * sizeof(uint8));
 957    memcpy(mem + l, complete + l, (clength - l) * sizeof(uint8));
 958    *partial = mem;
 959    *plength = clength;
 960}
 961
 962static void
 963js_TrashTree(JSContext* cx, Fragment* f);
 964
 965TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _fragment,
 966        TreeInfo* ti, unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap,
 967        VMSideExit* innermostNestedGuard, Fragment* outerToBlacklist)
 968{
 969    JS_ASSERT(!_fragment->vmprivate && ti);
 970
 971    this->cx = cx;
 972    this->traceMonitor = &JS_TRACE_MONITOR(cx);
 973    this->globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
 974    this->anchor = _anchor;
 975    this->fragment = _fragment;
 976    this->lirbuf = _fragment->lirbuf;
 977    this->treeInfo = ti;
 978    this->callDepth = _anchor ? _anchor->calldepth : 0;
 979    this->atoms = cx->fp->script->atomMap.vector;
 980    this->deepAborted = false;
 981    this->applyingArguments = false;
 982    this->trashTree = false;
 983    this->whichTreeToTrash = _fragment->root;
 984    this->global_dslots = this->globalObj->dslots;
 985    this->terminate = false;
 986    this->outerToBlacklist = outerToBlacklist;
 987    this->wasRootFragment = _fragment == _fragment->root;
 988
 989    debug_only_v(printf("recording starting from %s:%u@%u\n",
 990                        cx->fp->script->filename,
 991                        js_FramePCToLineNumber(cx, cx->fp),
 992                        FramePCOffset(cx->fp));)
 993    debug_only_v(printf("globalObj=%p, shape=%d\n", this->globalObj, OBJ_SHAPE(this->globalObj));)
 994
 995    lir = lir_buf_writer = new (&gc) LirBufWriter(lirbuf);
 996#ifdef DEBUG
 997    if (verbose_debug)
 998        lir = verbose_filter = new (&gc) VerboseWriter(&gc, lir, lirbuf->names);
 999#endif
1000#ifdef NJ_SOFTFLOAT
1001    lir = float_filter = new (&gc) SoftFloatFilter(lir);
1002#endif
1003    lir = cse_filter = new (&gc) CseFilter(lir, &gc);
1004    lir = expr_filter = new (&gc) ExprFilter(lir);
1005    lir = func_filter = new (&gc) FuncFilter(lir);
1006    lir->ins0(LIR_start);
1007
1008    if (!nanojit::AvmCore::config.tree_opt || fragment->root == fragment) 
1009        lirbuf->state = addName(lir->insParam(0, 0), "state");
1010
1011    lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
1012    lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
1013    cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
1014    gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, gp)), "gp");
1015    eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
1016    eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
1017
1018    /* read into registers all values on the stack and all globals we know so far */
1019    import(treeInfo, lirbuf->sp, ngslots, callDepth, globalTypeMap, stackTypeMap);
1020
1021    /* If we are attached to a tree call guard, make sure the guard the inner tree exited from
1022       is what we expect it to be. */
1023    if (_anchor && _anchor->exitType == NESTED_EXIT) {
1024        LIns* nested_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, 
1025                                                offsetof(InterpState, lastTreeExitGuard)), 
1026                                                "lastTreeExitGuard");
1027        guard(true, lir->ins2(LIR_eq, nested_ins, INS_CONSTPTR(innermostNestedGuard)), NESTED_EXIT);
1028    }
1029}
1030
1031TreeInfo::~TreeInfo()
1032{
1033    UnstableExit* temp;
1034
1035    while (unstableExits) {
1036        temp = unstableExits->next;
1037        delete unstableExits;
1038        unstableExits = temp;
1039    }
1040}
1041
1042TraceRecorder::~TraceRecorder()
1043{
1044    JS_ASSERT(nextRecorderToAbort == NULL);
1045    JS_ASSERT(treeInfo && (fragment || wasDeepAborted()));
1046#ifdef DEBUG
1047    TraceRecorder* tr = JS_TRACE_MONITOR(cx).abortStack;
1048    while (tr != NULL)
1049    {
1050        JS_ASSERT(this != tr);
1051        tr = tr->nextRecorderToAbort;
1052    }
1053#endif
1054    if (fragment) {
1055        if (wasRootFragment && !fragment->root->code()) {
1056            JS_ASSERT(!fragment->root->vmprivate);
1057            delete treeInfo;
1058        }
1059        if (trashTree)
1060            js_TrashTree(cx, whichTreeToTrash);
1061    } else if (wasRootFragment) {
1062        delete treeInfo;
1063    }
1064#ifdef DEBUG
1065    delete verbose_filter;
1066#endif
1067    delete cse_filter;
1068    delete expr_filter;
1069    delete func_filter;
1070#ifdef NJ_SOFTFLOAT
1071    delete float_filter;
1072#endif
1073    delete lir_buf_writer;
1074}
1075
1076void TraceRecorder::removeFragmentoReferences()
1077{
1078    fragment = NULL;
1079}
1080
1081/* Add debug information to a LIR instruction as we emit it. */
1082inline LIns*
1083TraceRecorder::addName(LIns* ins, const char* name)
1084{
1085#ifdef DEBUG
1086    lirbuf->names->addName(ins, name);
1087#endif
1088    return ins;
1089}
1090
1091/* Determine the current call depth (starting with the entry frame.) */
1092unsigned
1093TraceRecorder::getCallDepth() const
1094{
1095    return callDepth;
1096}
1097
1098/* Determine the offset in the native global frame for a jsval we track */
1099ptrdiff_t
1100TraceRecorder::nativeGlobalOffset(jsval* p) const
1101{
1102    JS_ASSERT(isGlobal(p));
1103    if (size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS)
1104        return size_t(p - globalObj->fslots) * sizeof(double);
1105    return ((p - globalObj->dslots) + JS_INITIAL_NSLOTS) * sizeof(double);
1106}
1107
1108/* Determine whether a value is a global stack slot */
1109bool
1110TraceRecorder::isGlobal(jsval* p) const
1111{
1112    return ((size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS) ||
1113            (size_t(p - globalObj->dslots) < (STOBJ_NSLOTS(globalObj) - JS_INITIAL_NSLOTS)));
1114}
1115
1116/* Determine the offset in the native stack for a jsval we track */
1117ptrdiff_t
1118TraceRecorder::nativeStackOffset(jsval* p) const
1119{
1120#ifdef DEBUG
1121    size_t slow_offset = 0;
1122    FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1123        if (vp == p) goto done;
1124        slow_offset += sizeof(double)
1125    );
1126
1127    /*
1128     * If it's not in a pending frame, it must be on the stack of the current frame above
1129     * sp but below fp->slots + script->nslots.
1130     */
1131    JS_ASSERT(size_t(p - cx->fp->slots) < cx->fp->script->nslots);
1132    slow_offset += size_t(p - cx->fp->regs->sp) * sizeof(double);
1133
1134done:
1135#define RETURN(offset) { JS_ASSERT((offset) == slow_offset); return offset; }
1136#else
1137#define RETURN(offset) { return offset; }
1138#endif
1139    size_t offset = 0;
1140    JSStackFrame* currentFrame = cx->fp;
1141    JSStackFrame* entryFrame;
1142    JSStackFrame* fp = currentFrame;
1143    for (unsigned n = 0; n < callDepth; ++n) { fp = fp->down; }
1144    entryFrame = fp;
1145    unsigned frames = callDepth+1;
1146    JSStackFrame** fstack = (JSStackFrame **)alloca(frames * sizeof (JSStackFrame *));
1147    JSStackFrame** fspstop = &fstack[frames];
1148    JSStackFrame** fsp = fspstop-1;
1149    fp = currentFrame;
1150    for (;; fp = fp->down) { *fsp-- = fp; if (fp == entryFrame) break; }
1151    for (fsp = fstack; fsp < fspstop; ++fsp) {
1152        fp = *fsp;
1153        if (fp->callee) {
1154            if (fsp == fstack) {
1155                if (size_t(p - &fp->argv[-2]) < size_t(2/*callee,this*/ + fp->fun->nargs))
1156                    RETURN(offset + size_t(p - &fp->argv[-2]) * sizeof(double));
1157                offset += (2/*callee,this*/ + fp->fun->nargs) * sizeof(double);
1158            }
1159            if (size_t(p - &fp->slots[0]) < fp->script->nfixed)
1160                RETURN(offset + size_t(p - &fp->slots[0]) * sizeof(double));
1161            offset += fp->script->nfixed * sizeof(double);
1162        }
1163        jsval* spbase = StackBase(fp);
1164        if (size_t(p - spbase) < size_t(fp->regs->sp - spbase))
1165            RETURN(offset + size_t(p - spbase) * sizeof(double));
1166        offset += size_t(fp->regs->sp - spbase) * sizeof(double);
1167        if (fsp < fspstop - 1) {
1168            JSStackFrame* fp2 = fsp[1];
1169            int missing = fp2->fun->nargs - fp2->argc;
1170            if (missing > 0) {
1171                if (size_t(p - fp->regs->sp) < size_t(missing))
1172                    RETURN(offset + size_t(p - fp->regs->sp) * sizeof(double));
1173                offset += size_t(missing) * sizeof(double);
1174            }
1175        }
1176    }
1177
1178    /*
1179     * If it's not in a pending frame, it must be on the stack of the current frame above
1180     * sp but below fp->slots + script->nslots.
1181     */
1182    JS_ASSERT(size_t(p - currentFrame->slots) < currentFrame->script->nslots);
1183    offset += size_t(p - currentFrame->regs->sp) * sizeof(double);
1184    RETURN(offset);
1185#undef RETURN
1186}
1187
1188/* Track the maximum number of native frame slots we need during
1189   execution. */
1190void
1191TraceRecorder::trackNativeStackUse(unsigned slots)
1192{
1193    if (slots > treeInfo->maxNativeStackSlots)
1194        treeInfo->maxNativeStackSlots = slots;
1195}
1196
1197/* Unbox a jsval into a slot. Slots are wide enough to hold double values directly (instead of 
1198   storing a pointer to them). We now assert instead of type checking, the caller must ensure the 
1199   types are compatible. */
1200static void
1201ValueToNative(JSContext* cx, jsval v, uint8 type, double* slot)
1202{
1203    unsigned tag = JSVAL_TAG(v);
1204    switch (type) {
1205      case JSVAL_INT:
1206        jsint i;
1207        if (JSVAL_IS_INT(v))
1208            *(jsint*)slot = JSVAL_TO_INT(v);
1209        else if ((tag == JSVAL_DOUBLE) && JSDOUBLE_IS_INT(*JSVAL_TO_DOUBLE(v), i))
1210            *(jsint*)slot = i;
1211        else
1212            JS_ASSERT(JSVAL_IS_INT(v));
1213        debug_only_v(printf("int<%d> ", *(jsint*)slot);)
1214        return;
1215      case JSVAL_DOUBLE:
1216        jsdouble d;
1217        if (JSVAL_IS_INT(v))
1218            d = JSVAL_TO_INT(v);
1219        else
1220            d = *JSVAL_TO_DOUBLE(v);
1221        JS_ASSERT(JSVAL_IS_INT(v) || JSVAL_IS_DOUBLE(v));
1222        *(jsdouble*)slot = d;
1223        debug_only_v(printf("double<%g> ", d);)
1224        return;
1225      case JSVAL_BOOLEAN:
1226        JS_ASSERT(tag == JSVAL_BOOLEAN);
1227        *(JSBool*)slot = JSVAL_TO_BOOLEAN(v);
1228        debug_only_v(printf("boolean<%d> ", *(JSBool*)slot);)
1229        return;
1230      case JSVAL_STRING:
1231        JS_ASSERT(tag == JSVAL_STRING);
1232        *(JSString**)slot = JSVAL_TO_STRING(v);
1233        debug_only_v(printf("string<%p> ", *(JSString**)slot);)
1234        return;
1235      default:
1236        /* Note: we should never see JSVAL_BOXED in an entry type map. */
1237        JS_ASSERT(type == JSVAL_OBJECT);
1238        JS_ASSERT(tag == JSVAL_OBJECT);
1239        *(JSObject**)slot = JSVAL_TO_OBJECT(v);
1240        debug_only_v(printf("object<%p:%s> ", JSVAL_TO_OBJECT(v),
1241                            JSVAL_IS_NULL(v)
1242                            ? "null"
1243                            : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
1244        return;
1245    }
1246}
1247
1248/* We maintain an emergency recovery pool of doubles so we can recover safely if a trace runs
1249   out of memory (doubles or objects). */
1250static jsval
1251AllocateDoubleFromRecoveryPool(JSContext* cx)
1252{
1253    JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
1254    JS_ASSERT(tm->recoveryDoublePoolPtr > tm->recoveryDoublePool);
1255    return *--tm->recoveryDoublePoolPtr;
1256}
1257
1258static bool
1259js_ReplenishRecoveryPool(JSContext* cx, JSTraceMonitor* tm)
1260{
1261    /* We should not be called with a full pool. */
1262    JS_ASSERT((size_t) (tm->recoveryDoublePoolPtr - tm->recoveryDoublePool) <
1263              MAX_NATIVE_STACK_SLOTS);
1264
1265    /*
1266     * When the GC runs in js_NewDoubleInRootedValue, it resets
1267     * tm->recoveryDoublePoolPtr back to tm->recoveryDoublePool. 
1268     */
1269    JSRuntime* rt = cx->runtime;
1270    uintN gcNumber = rt->gcNumber;
1271    jsval* ptr = tm->recoveryDoublePoolPtr; 
1272    while (ptr < tm->recoveryDoublePool + MAX_NATIVE_STACK_SLOTS) {
1273        if (!js_NewDoubleInRootedValue(cx, 0.0, ptr)) 
1274            goto oom;
1275        if (rt->gcNumber != gcNumber) {
1276            JS_ASSERT(tm->recoveryDoublePoolPtr == tm->recoveryDoublePool);
1277            ptr = tm->recoveryDoublePool;
1278            if (uintN(rt->gcNumber - gcNumber) > uintN(1))
1279                goto oom;
1280            continue;
1281        }
1282        ++ptr;
1283    }
1284    tm->recoveryDoublePoolPtr = ptr;
1285    return true;
1286
1287oom:
1288    /*
1289     * Already massive GC pressure, no need to hold doubles back.
1290     * We won't run any native code anyway.
1291     */
1292    tm->recoveryDoublePoolPtr = tm->recoveryDoublePool;
1293    return false;
1294}
1295
1296/* Box a value from the native stack back into the jsval format. Integers
1297   that are too large to fit into a jsval are automatically boxed into
1298   heap-allocated doubles. */
1299static bool
1300NativeToValue(JSContext* cx, jsval& v, uint8 type, double* slot)
1301{
1302    jsint i;
1303    jsdouble d;
1304    switch (type) {
1305      case JSVAL_BOOLEAN:
1306        v = BOOLEAN_TO_JSVAL(*(JSBool*)slot);
1307        debug_only_v(printf("boolean<%d> ", *(JSBool*)slot);)
1308        break;
1309      case JSVAL_INT:
1310        i = *(jsint*)slot;
1311        debug_only_v(printf("int<%d> ", i);)
1312      store_int:
1313        if (INT_FITS_IN_JSVAL(i)) {
1314            v = INT_TO_JSVAL(i);
1315            break;
1316        }
1317        d = (jsdouble)i;
1318        goto store_double;
1319      case JSVAL_DOUBLE:
1320        d = *slot;
1321        debug_only_v(printf("double<%g> ", d);)
1322        if (JSDOUBLE_IS_INT(d, i))
1323            goto store_int;
1324      store_double: {
1325        /* Its not safe to trigger the GC here, so use an emergency heap if we are out of
1326           double boxes. */
1327        if (cx->doubleFreeList) {
1328#ifdef DEBUG
1329            bool ok =
1330#endif
1331                js_NewDoubleInRootedValue(cx, d, &v);
1332            JS_ASSERT(ok);
1333            return true;
1334        }
1335        v = AllocateDoubleFromRecoveryPool(cx);
1336        JS_ASSERT(JSVAL_IS_DOUBLE(v) && *JSVAL_TO_DOUBLE(v) == 0.0);
1337        *JSVAL_TO_DOUBLE(v) = d;
1338        return true;
1339      }
1340      case JSVAL_STRING:
1341        v = STRING_TO_JSVAL(*(JSString**)slot);
1342        JS_ASSERT(JSVAL_TAG(v) == JSVAL_STRING); /* if this fails the pointer was not aligned */
1343        debug_only_v(printf("string<%p> ", *(JSString**)slot);)
1344        break;
1345      case JSVAL_BOXED:
1346        v = *(jsval*)slot;
1347        debug_only_v(printf("box<%lx> ", v));
1348        break;
1349      default:
1350        JS_ASSERT(type == JSVAL_OBJECT);
1351        v = OBJECT_TO_JSVAL(*(JSObject**)slot);
1352        JS_ASSERT(JSVAL_TAG(v) == JSVAL_OBJECT); /* if this fails the pointer was not aligned */
1353        debug_only_v(printf("object<%p:%s> ", JSVAL_TO_OBJECT(v),
1354                            JSVAL_IS_NULL(v)
1355                            ? "null"
1356                            : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
1357        break;
1358    }
1359    return true;
1360}
1361
1362/* Attempt to unbox the given list of interned globals onto the native global frame. */
1363static void
1364BuildNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
1365{
1366    debug_only_v(printf("global: ");)
1367    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1368        ValueToNative(cx, *vp, *mp, np + gslots[n]);
1369        ++mp;
1370    );
1371    debug_only_v(printf("\n");)
1372}
1373
1374/* Attempt to unbox the given JS frame onto a native frame. */
1375static void
1376BuildNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np)
1377{
1378    debug_only_v(printf("stack: ");)
1379    FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1380        debug_only_v(printf("%s%u=", vpname, vpnum);)
1381        ValueToNative(cx, *vp, *mp, np);
1382        ++mp; ++np;
1383    );
1384    debug_only_v(printf("\n");)
1385}
1386
1387/* Box the given native frame into a JS frame. This only fails due to a hard error
1388   (out of memory for example). */
1389static int
1390FlushNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
1391{
1392    uint8* mp_base = mp;
1393    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
1394        if (!NativeToValue(cx, *vp, *mp, np + gslots[n]))
1395            return -1;
1396        ++mp;
1397    );
1398    debug_only_v(printf("\n");)
1399    return mp - mp_base;
1400}
1401
1402/**
1403 * Box the given native stack frame into the virtual machine stack. This fails
1404 * only due to a hard error (out of memory for example).
1405 *
1406 * @param callDepth the distance between the entry frame into our trace and
1407 *                  cx->fp when we make this call.  If this is not called as a
1408 *                  result of a nested exit, callDepth is 0.
1409 * @param mp pointer to an array of type tags (JSVAL_INT, etc.) that indicate
1410 *           what the types of the things on the stack are.
1411 * @param np pointer to the native stack.  We want to copy values from here to
1412 *           the JS stack as needed.
1413 * @param stopFrame if non-null, this frame and everything above it should not
1414 *                  be restored.
1415 * @return the number of things we popped off of np.
1416 */
1417static int
1418FlushNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np,
1419                      JSStackFrame* stopFrame)
1420{
1421    jsval* stopAt = stopFrame ? &stopFrame->argv[-2] : NULL;
1422    uint8* mp_base = mp;
1423    /* Root all string and object references first (we don't need to call the GC for this). */
1424    FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
1425        if (vp == stopAt) goto skip;
1426        debug_only_v(printf("%s%u=", vpname, vpnum);)
1427        if (!NativeToValue(cx, *vp, *mp, np))
1428            return -1;
1429        ++mp; ++np
1430    );
1431skip:
1432    // Restore thisp from the now-restored argv[-1] in each pending frame.
1433    // Keep in mind that we didn't restore frames at stopFrame and above!
1434    // Scope to keep |fp| from leaking into the macros we're using.
1435    {
1436        unsigned n = callDepth+1; // +1 to make sure we restore the entry frame
1437        JSStackFrame* fp = cx->fp;
1438        if (stopFrame) {
1439            for (; fp != stopFrame; fp = fp->down) {
1440                JS_ASSERT(n != 0);
1441                --n;
1442            }
1443            // Skip over stopFrame itself.
1444            JS_ASSERT(n != 0);
1445            --n;
1446            fp = fp->down;
1447        }
1448        for (; n != 0; fp = fp->down) {
1449            --n;
1450            if (fp->callee) { // might not have it if the entry frame is global
1451                JS_ASSERT(JSVAL_IS_OBJECT(fp->argv[-1]));
1452                fp->thisp = JSVAL_TO_OBJECT(fp->argv[-1]);
1453            }
1454        }
1455    }
1456    debug_only_v(printf("\n");)
1457    return mp - mp_base;
1458}
1459
1460/* Emit load instructions onto the trace that read the initial stack state. */
1461void
1462TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, uint8& t,
1463                      const char *prefix, uintN index, JSStackFrame *fp)
1464{
1465    LIns* ins;
1466    if (t == JSVAL_INT) { /* demoted */
1467        JS_ASSERT(isInt32(*p));
1468        /* Ok, we have a valid demotion attempt pending, so insert an integer
1469           read and promote it to double since all arithmetic operations expect
1470           to see doubles on entry. The first op to use this slot will emit a
1471           f2i cast which will cancel out the i2f we insert here. */
1472        ins = lir->insLoadi(base, offset);
1473        ins = lir->ins1(LIR_i2f, ins);
1474    } else {
1475        JS_ASSERT(t == JSVAL_BOXED || isNumber(*p) == (t == JSVAL_DOUBLE));
1476        if (t == JSVAL_DOUBLE) {
1477            ins = lir->insLoad(LIR_ldq, base, offset);
1478        } else if (t == JSVAL_BOOLEAN) {
1479            ins = lir->insLoad(LIR_ld, base, offset);
1480        } else {
1481            ins = lir->insLoad(LIR_ldp, base, offset);
1482        }
1483    }
1484    tracker.set(p, ins);
1485#ifdef DEBUG
1486    char name[64];
1487    JS_ASSERT(strlen(prefix) < 10);
1488    void* mark = NULL;
1489    jsuword* localNames = NULL;
1490    const char* funName = NULL;
1491    if (*prefix == 'a' || *prefix == 'v') {
1492        mark = JS_ARENA_MARK(&cx->tempPool);
1493        if (JS_GET_LOCAL_NAME_COUNT(fp->fun) != 0)
1494            localNames = js_GetLocalNameArray(cx, fp->fun, &cx->tempPool);
1495        funName = fp->fun->atom ? js_AtomToPrintableString(cx, fp->fun->atom) : "<anonymous>";
1496    }
1497    if (!strcmp(prefix, "argv")) {
1498        if (index < fp->fun->nargs) {
1499            JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[index]);
1500            JS_snprintf(name, sizeof name, "$%s.%s", funName, js_AtomToPrintableString(cx, atom));
1501        } else {
1502            JS_snprintf(name, sizeof name, "$%s…

Large files files are truncated, but you can click here to view the full file