PageRenderTime 62ms CodeModel.GetById 17ms app.highlight 39ms RepoModel.GetById 1ms app.codeStats 0ms

/js/lib/Socket.IO-node/support/expresso/deps/jscoverage/js/jstracer.h

http://github.com/onedayitwillmake/RealtimeMultiplayerNodeJs
C++ Header | 552 lines | 416 code | 74 blank | 62 comment | 16 complexity | 497b4e28ac1290750e0c64d5d8aaa80d MD5 | raw file
  1/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  2 * vim: set ts=8 sw=4 et tw=99 ft=cpp:
  3 *
  4 * ***** BEGIN LICENSE BLOCK *****
  5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
  6 *
  7 * The contents of this file are subject to the Mozilla Public License Version
  8 * 1.1 (the "License"); you may not use this file except in compliance with
  9 * the License. You may obtain a copy of the License at
 10 * http://www.mozilla.org/MPL/
 11 *
 12 * Software distributed under the License is distributed on an "AS IS" basis,
 13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
 14 * for the specific language governing rights and limitations under the
 15 * License.
 16 *
 17 * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
 18 * May 28, 2008.
 19 *
 20 * The Initial Developer of the Original Code is
 21 *   Brendan Eich <brendan@mozilla.org>
 22 *
 23 * Contributor(s):
 24 *   Andreas Gal <gal@mozilla.com>
 25 *   Mike Shaver <shaver@mozilla.org>
 26 *   David Anderson <danderson@mozilla.com>
 27 *
 28 * Alternatively, the contents of this file may be used under the terms of
 29 * either of the GNU General Public License Version 2 or later (the "GPL"),
 30 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
 31 * in which case the provisions of the GPL or the LGPL are applicable instead
 32 * of those above. If you wish to allow use of your version of this file only
 33 * under the terms of either the GPL or the LGPL, and not to allow others to
 34 * use your version of this file under the terms of the MPL, indicate your
 35 * decision by deleting the provisions above and replace them with the notice
 36 * and other provisions required by the GPL or the LGPL. If you do not delete
 37 * the provisions above, a recipient may use your version of this file under
 38 * the terms of any one of the MPL, the GPL or the LGPL.
 39 *
 40 * ***** END LICENSE BLOCK ***** */
 41
 42#ifndef jstracer_h___
 43#define jstracer_h___
 44
 45#ifdef JS_TRACER
 46
 47#include "jscntxt.h"
 48#include "jsstddef.h"
 49#include "jstypes.h"
 50#include "jslock.h"
 51#include "jsnum.h"
 52#include "jsinterp.h"
 53#include "jsbuiltins.h"
 54
 55#if defined(DEBUG) && !defined(JS_JIT_SPEW)
 56#define JS_JIT_SPEW
 57#endif
 58
 59template <typename T>
 60class Queue : public avmplus::GCObject {
 61    T* _data;
 62    unsigned _len;
 63    unsigned _max;
 64
 65    void ensure(unsigned size) {
 66        while (_max < size)
 67            _max <<= 1;
 68        _data = (T*)realloc(_data, _max * sizeof(T));
 69    }
 70public:
 71    Queue(unsigned max = 16) {
 72        this->_max = max;
 73        this->_len = 0;
 74        this->_data = (T*)malloc(max * sizeof(T));
 75    }
 76
 77    ~Queue() {
 78        free(_data);
 79    }
 80
 81    bool contains(T a) {
 82        for (unsigned n = 0; n < _len; ++n)
 83            if (_data[n] == a)
 84                return true;
 85        return false;
 86    }
 87
 88    void add(T a) {
 89        ensure(_len + 1);
 90        JS_ASSERT(_len <= _max);
 91        _data[_len++] = a;
 92    }
 93
 94    void add(T* chunk, unsigned size) {
 95        ensure(_len + size);
 96        JS_ASSERT(_len <= _max);
 97        memcpy(&_data[_len], chunk, size * sizeof(T));
 98        _len += size;
 99    }
100
101    void addUnique(T a) {
102        if (!contains(a))
103            add(a);
104    }
105
106    void setLength(unsigned len) {
107        ensure(len + 1);
108        _len = len;
109    }
110
111    void clear() {
112        _len = 0;
113    }
114
115    unsigned length() const {
116        return _len;
117    }
118
119    T* data() const {
120        return _data;
121    }
122};
123
124/*
125 * Tracker is used to keep track of values being manipulated by the interpreter
126 * during trace recording.
127 */
128class Tracker {
129    struct Page {
130        struct Page*    next;
131        jsuword         base;
132        nanojit::LIns*  map[1];
133    };
134    struct Page* pagelist;
135
136    jsuword         getPageBase(const void* v) const;
137    struct Page*    findPage(const void* v) const;
138    struct Page*    addPage(const void* v);
139public:
140    Tracker();
141    ~Tracker();
142
143    bool            has(const void* v) const;
144    nanojit::LIns*  get(const void* v) const;
145    void            set(const void* v, nanojit::LIns* ins);
146    void            clear();
147};
148
149/*
150 * The oracle keeps track of slots that should not be demoted to int because we know them
151 * to overflow or they result in type-unstable traces. We are using a simple hash table.
152 * Collisions lead to loss of optimization (demotable slots are not demoted) but have no
153 * correctness implications.
154 */
155#define ORACLE_SIZE 4096
156
157class Oracle {
158    avmplus::BitSet _dontDemote;
159public:
160    void markGlobalSlotUndemotable(JSScript* script, unsigned slot);
161    bool isGlobalSlotUndemotable(JSScript* script, unsigned slot) const;
162    void markStackSlotUndemotable(JSScript* script, jsbytecode* ip, unsigned slot);
163    bool isStackSlotUndemotable(JSScript* script, jsbytecode* ip, unsigned slot) const;
164    void clear();
165};
166
167typedef Queue<uint16> SlotList;
168
169class TypeMap : public Queue<uint8> {
170public:
171    void captureGlobalTypes(JSContext* cx, SlotList& slots);
172    void captureStackTypes(JSContext* cx, unsigned callDepth);
173    bool matches(TypeMap& other) const;
174};
175
176enum ExitType {
177    BRANCH_EXIT, 
178    LOOP_EXIT, 
179    NESTED_EXIT,
180    MISMATCH_EXIT,
181    OOM_EXIT,
182    OVERFLOW_EXIT,
183    UNSTABLE_LOOP_EXIT,
184    TIMEOUT_EXIT
185};
186
187struct VMSideExit : public nanojit::SideExit
188{
189    intptr_t ip_adj;
190    intptr_t sp_adj;
191    intptr_t rp_adj;
192    int32_t calldepth;
193    uint32 numGlobalSlots;
194    uint32 numStackSlots;
195    uint32 numStackSlotsBelowCurrentFrame;
196    ExitType exitType;
197};
198
199static inline uint8* getTypeMap(nanojit::SideExit* exit) 
200{ 
201    return (uint8*)(((VMSideExit*)exit) + 1); 
202}
203
204struct InterpState
205{
206    void* sp; /* native stack pointer, stack[0] is spbase[0] */
207    void* rp; /* call stack pointer */
208    void* gp; /* global frame pointer */
209    JSContext *cx; /* current VM context handle */
210    void* eos; /* first unusable word after the native stack */
211    void* eor; /* first unusable word after the call stack */
212    VMSideExit* lastTreeExitGuard; /* guard we exited on during a tree call */
213    VMSideExit* lastTreeCallGuard; /* guard we want to grow from if the tree
214                                      call exit guard mismatched */
215    void* rpAtLastTreeCall; /* value of rp at innermost tree call guard */
216}; 
217
218struct UnstableExit
219{
220    nanojit::Fragment* fragment;
221    VMSideExit* exit;
222    UnstableExit* next;
223};
224
225class TreeInfo MMGC_SUBCLASS_DECL {
226    nanojit::Fragment*      fragment;
227public:
228    JSScript*               script;
229    unsigned                maxNativeStackSlots;
230    ptrdiff_t               nativeStackBase;
231    unsigned                maxCallDepth;
232    TypeMap                 stackTypeMap;
233    Queue<nanojit::Fragment*> dependentTrees;
234    unsigned                branchCount;
235    Queue<VMSideExit*>      sideExits;
236    UnstableExit*           unstableExits;
237
238    TreeInfo(nanojit::Fragment* _fragment) : unstableExits(NULL) {
239        fragment = _fragment;
240    }
241    ~TreeInfo();
242};
243
244struct FrameInfo {
245    JSObject*       callee;     // callee function object
246    intptr_t        ip_adj;     // callee script-based pc index and imacro pc
247    uint8*          typemap;    // typemap for the stack frame
248    union {
249        struct {
250            uint16  spdist;     // distance from fp->slots to fp->regs->sp at JSOP_CALL
251            uint16  argc;       // actual argument count, may be < fun->nargs
252        } s;
253        uint32      word;       // for spdist/argc LIR store in record_JSOP_CALL
254    };
255};
256
257class TraceRecorder : public avmplus::GCObject {
258    JSContext*              cx;
259    JSTraceMonitor*         traceMonitor;
260    JSObject*               globalObj;
261    Tracker                 tracker;
262    Tracker                 nativeFrameTracker;
263    char*                   entryTypeMap;
264    unsigned                callDepth;
265    JSAtom**                atoms;
266    VMSideExit*             anchor;
267    nanojit::Fragment*      fragment;
268    TreeInfo*               treeInfo;
269    nanojit::LirBuffer*     lirbuf;
270    nanojit::LirWriter*     lir;
271    nanojit::LirBufWriter*  lir_buf_writer;
272    nanojit::LirWriter*     verbose_filter;
273    nanojit::LirWriter*     cse_filter;
274    nanojit::LirWriter*     expr_filter;
275    nanojit::LirWriter*     func_filter;
276#ifdef NJ_SOFTFLOAT
277    nanojit::LirWriter*     float_filter;
278#endif
279    nanojit::LIns*          cx_ins;
280    nanojit::LIns*          gp_ins;
281    nanojit::LIns*          eos_ins;
282    nanojit::LIns*          eor_ins;
283    nanojit::LIns*          rval_ins;
284    nanojit::LIns*          inner_sp_ins;
285    bool                    deepAborted;
286    bool                    applyingArguments;
287    bool                    trashTree;
288    nanojit::Fragment*      whichTreeToTrash;
289    Queue<jsbytecode*>      cfgMerges;
290    jsval*                  global_dslots;
291    JSTraceableNative*      pendingTraceableNative;
292    bool                    terminate;
293    intptr_t                terminate_ip_adj;
294    nanojit::Fragment*      outerToBlacklist;
295    nanojit::Fragment*      promotedPeer;
296    TraceRecorder*          nextRecorderToAbort;
297    bool                    wasRootFragment;
298
299    bool isGlobal(jsval* p) const;
300    ptrdiff_t nativeGlobalOffset(jsval* p) const;
301    ptrdiff_t nativeStackOffset(jsval* p) const;
302    void import(nanojit::LIns* base, ptrdiff_t offset, jsval* p, uint8& t,
303                const char *prefix, uintN index, JSStackFrame *fp);
304    void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned ngslots, unsigned callDepth,
305                uint8* globalTypeMap, uint8* stackTypeMap);
306    void trackNativeStackUse(unsigned slots);
307
308    bool lazilyImportGlobalSlot(unsigned slot);
309
310    nanojit::LIns* guard(bool expected, nanojit::LIns* cond, ExitType exitType);
311    nanojit::LIns* guard(bool expected, nanojit::LIns* cond, nanojit::LIns* exit);
312    nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
313
314    nanojit::LIns* get(jsval* p) const;
315    nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset);
316    void set(jsval* p, nanojit::LIns* l, bool initializing = false);
317
318    bool checkType(jsval& v, uint8 t, jsval*& stage_val, nanojit::LIns*& stage_ins,
319                   unsigned& stage_count);
320    bool deduceTypeStability(nanojit::Fragment* root_peer, nanojit::Fragment** stable_peer,
321                             unsigned* demotes);
322
323    jsval& argval(unsigned n) const;
324    jsval& varval(unsigned n) const;
325    jsval& stackval(int n) const;
326
327    nanojit::LIns* scopeChain() const;
328    bool activeCallOrGlobalSlot(JSObject* obj, jsval*& vp);
329
330    nanojit::LIns* arg(unsigned n);
331    void arg(unsigned n, nanojit::LIns* i);
332    nanojit::LIns* var(unsigned n);
333    void var(unsigned n, nanojit::LIns* i);
334    nanojit::LIns* stack(int n);
335    void stack(int n, nanojit::LIns* i);
336
337    nanojit::LIns* alu(nanojit::LOpcode op, jsdouble v0, jsdouble v1, 
338                       nanojit::LIns* s0, nanojit::LIns* s1);
339    nanojit::LIns* f2i(nanojit::LIns* f);
340    nanojit::LIns* makeNumberInt32(nanojit::LIns* f);
341    nanojit::LIns* stringify(jsval& v);
342
343    bool call_imacro(jsbytecode* imacro);
344
345    bool ifop();
346    bool switchop();
347    bool inc(jsval& v, jsint incr, bool pre = true);
348    bool inc(jsval& v, nanojit::LIns*& v_ins, jsint incr, bool pre = true);
349    bool incProp(jsint incr, bool pre = true);
350    bool incElem(jsint incr, bool pre = true);
351    bool incName(jsint incr, bool pre = true);
352
353    enum { CMP_NEGATE = 1, CMP_TRY_BRANCH_AFTER_COND = 2, CMP_CASE = 4, CMP_STRICT = 8 };
354    bool cmp(nanojit::LOpcode op, int flags = 0);
355
356    bool unary(nanojit::LOpcode op);
357    bool binary(nanojit::LOpcode op);
358
359    bool ibinary(nanojit::LOpcode op);
360    bool iunary(nanojit::LOpcode op);
361    bool bbinary(nanojit::LOpcode op);
362    void demote(jsval& v, jsdouble result);
363
364    bool map_is_native(JSObjectMap* map, nanojit::LIns* map_ins, nanojit::LIns*& ops_ins,
365                       size_t op_offset = 0);
366    bool test_property_cache(JSObject* obj, nanojit::LIns* obj_ins, JSObject*& obj2,
367                             jsuword& pcval);
368    bool test_property_cache_direct_slot(JSObject* obj, nanojit::LIns* obj_ins, uint32& slot);
369    void stobj_set_slot(nanojit::LIns* obj_ins, unsigned slot,
370                        nanojit::LIns*& dslots_ins, nanojit::LIns* v_ins);
371    nanojit::LIns* stobj_get_fslot(nanojit::LIns* obj_ins, unsigned slot);
372    nanojit::LIns* stobj_get_slot(nanojit::LIns* obj_ins, unsigned slot,
373                                  nanojit::LIns*& dslots_ins);
374    bool native_set(nanojit::LIns* obj_ins, JSScopeProperty* sprop,
375                    nanojit::LIns*& dslots_ins, nanojit::LIns* v_ins);
376    bool native_get(nanojit::LIns* obj_ins, nanojit::LIns* pobj_ins, JSScopeProperty* sprop,
377                    nanojit::LIns*& dslots_ins, nanojit::LIns*& v_ins);
378
379    bool name(jsval*& vp);
380    bool prop(JSObject* obj, nanojit::LIns* obj_ins, uint32& slot, nanojit::LIns*& v_ins);
381    bool elem(jsval& oval, jsval& idx, jsval*& vp, nanojit::LIns*& v_ins, nanojit::LIns*& addr_ins);
382
383    bool getProp(JSObject* obj, nanojit::LIns* obj_ins);
384    bool getProp(jsval& v);
385    bool getThis(nanojit::LIns*& this_ins);
386
387    bool box_jsval(jsval v, nanojit::LIns*& v_ins);
388    bool unbox_jsval(jsval v, nanojit::LIns*& v_ins);
389    bool guardClass(JSObject* obj, nanojit::LIns* obj_ins, JSClass* clasp,
390                    ExitType exitType = MISMATCH_EXIT);
391    bool guardDenseArray(JSObject* obj, nanojit::LIns* obj_ins,
392                         ExitType exitType = MISMATCH_EXIT);
393    bool guardDenseArrayIndex(JSObject* obj, jsint idx, nanojit::LIns* obj_ins,
394                              nanojit::LIns* dslots_ins, nanojit::LIns* idx_ins,
395                              ExitType exitType);
396    bool guardElemOp(JSObject* obj, nanojit::LIns* obj_ins, jsid id, size_t op_offset, jsval* vp);
397    void clearFrameSlotsFromCache();
398    bool guardShapelessCallee(jsval& callee);
399    bool interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, bool constructing);
400    bool functionCall(bool constructing);
401
402    void trackCfgMerges(jsbytecode* pc);
403    void flipIf(jsbytecode* pc, bool& cond);
404    void fuseIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
405
406    bool hasMethod(JSObject* obj, jsid id);
407    bool hasToStringMethod(JSObject* obj);
408    bool hasToStringMethod(jsval v) {
409        JS_ASSERT(JSVAL_IS_OBJECT(v));
410        return hasToStringMethod(JSVAL_TO_OBJECT(v));
411    }
412    bool hasValueOfMethod(JSObject* obj);
413    bool hasValueOfMethod(jsval v) {
414        JS_ASSERT(JSVAL_IS_OBJECT(v));
415        return hasValueOfMethod(JSVAL_TO_OBJECT(v));
416    }
417    bool hasIteratorMethod(JSObject* obj);
418    bool hasIteratorMethod(jsval v) {
419        JS_ASSERT(JSVAL_IS_OBJECT(v));
420        return hasIteratorMethod(JSVAL_TO_OBJECT(v));
421    }
422
423public:
424    friend bool js_MonitorRecording(TraceRecorder* tr);
425
426    TraceRecorder(JSContext* cx, VMSideExit*, nanojit::Fragment*, TreeInfo*,
427                  unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap,
428                  VMSideExit* expectedInnerExit, nanojit::Fragment* outerToBlacklist);
429    ~TraceRecorder();
430
431    uint8 determineSlotType(jsval* vp) const;
432    nanojit::LIns* snapshot(ExitType exitType);
433    nanojit::Fragment* getFragment() const { return fragment; }
434    bool isLoopHeader(JSContext* cx) const;
435    void compile(nanojit::Fragmento* fragmento);
436    bool closeLoop(nanojit::Fragmento* fragmento, bool& demote, unsigned *demotes);
437    void endLoop(nanojit::Fragmento* fragmento);
438    void joinEdgesToEntry(nanojit::Fragmento* fragmento, nanojit::Fragment* peer_root);
439    void blacklist() { fragment->blacklist(); }
440    bool adjustCallerTypes(nanojit::Fragment* f, unsigned* demote_slots, bool& trash);
441    nanojit::Fragment* findNestedCompatiblePeer(nanojit::Fragment* f, nanojit::Fragment** empty);
442    void prepareTreeCall(nanojit::Fragment* inner);
443    void emitTreeCall(nanojit::Fragment* inner, VMSideExit* exit);
444    unsigned getCallDepth() const;
445    void pushAbortStack();
446    void popAbortStack();
447    void removeFragmentoReferences();
448
449    bool record_EnterFrame();
450    bool record_LeaveFrame();
451    bool record_SetPropHit(JSPropCacheEntry* entry, JSScopeProperty* sprop);
452    bool record_SetPropMiss(JSPropCacheEntry* entry);
453    bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
454    bool record_FastNativeCallComplete();
455    bool record_IteratorNextComplete();
456
457    nanojit::Fragment* getOuterToBlacklist() { return outerToBlacklist; }
458    void deepAbort() { deepAborted = true; }
459    bool wasDeepAborted() { return deepAborted; }
460    bool walkedOutOfLoop() { return terminate; }
461    void setPromotedPeer(nanojit::Fragment* peer) { promotedPeer = peer; }
462    TreeInfo* getTreeInfo() { return treeInfo; }
463
464#define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format)               \
465    bool record_##op();
466# include "jsopcode.tbl"
467#undef OPDEF
468};
469
470#define TRACING_ENABLED(cx)       JS_HAS_OPTION(cx, JSOPTION_JIT)
471#define TRACE_RECORDER(cx)        (JS_TRACE_MONITOR(cx).recorder)
472#define SET_TRACE_RECORDER(cx,tr) (JS_TRACE_MONITOR(cx).recorder = (tr))
473
474#define JSOP_IS_BINARY(op) ((uintN)((op) - JSOP_BITOR) <= (uintN)(JSOP_MOD - JSOP_BITOR))
475
476/*
477 * See jsinterp.cpp for the ENABLE_TRACER definition. Also note how comparing x
478 * to JSOP_* constants specializes trace-recording code at compile time either
479 * to include imacro support, or exclude it altogether for this particular x.
480 *
481 * We save macro-generated code size also via bool TraceRecorder::record_JSOP_*
482 * return type, instead of a three-state: OK, ABORTED, IMACRO_STARTED. But the
483 * price of this is the JSFRAME_IMACRO_START frame flag. We need one more bit
484 * to detect that TraceRecorder::call_imacro was invoked by the record_JSOP_*
485 * method invoked by TRACE_ARGS_.
486 */
487#define RECORD_ARGS(x,args)                                                   \
488    JS_BEGIN_MACRO                                                            \
489        if (!js_MonitorRecording(TRACE_RECORDER(cx))) {                       \
490            ENABLE_TRACER(0);                                                 \
491        } else {                                                              \
492            TRACE_ARGS_(x, args,                                              \
493                if ((fp->flags & JSFRAME_IMACRO_START) &&                     \
494                    (x == JSOP_ITER || x == JSOP_NEXTITER ||                  \
495                    JSOP_IS_BINARY(x))) {                                     \
496                    fp->flags &= ~JSFRAME_IMACRO_START;                       \
497                    atoms = COMMON_ATOMS_START(&rt->atomState);               \
498                    op = JSOp(*regs.pc);                                      \
499                    DO_OP();                                                  \
500                }                                                             \
501            );                                                                \
502         }                                                                    \
503    JS_END_MACRO
504
505#define TRACE_ARGS_(x,args,onfalse)                                           \
506    JS_BEGIN_MACRO                                                            \
507        TraceRecorder* tr_ = TRACE_RECORDER(cx);                              \
508        if (tr_ && !tr_->record_##x args) {                                   \
509            onfalse                                                           \
510            js_AbortRecording(cx, #x);                                        \
511            ENABLE_TRACER(0);                                                 \
512        }                                                                     \
513    JS_END_MACRO
514
515#define TRACE_ARGS(x,args)      TRACE_ARGS_(x, args, )
516
517#define RECORD(x)               RECORD_ARGS(x, ())
518#define TRACE_0(x)              TRACE_ARGS(x, ())
519#define TRACE_1(x,a)            TRACE_ARGS(x, (a))
520#define TRACE_2(x,a,b)          TRACE_ARGS(x, (a, b))
521
522extern bool
523js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount);
524
525extern bool
526js_MonitorRecording(TraceRecorder *tr);
527
528extern void
529js_AbortRecording(JSContext* cx, const char* reason);
530
531extern void
532js_InitJIT(JSTraceMonitor *tm);
533
534extern void
535js_FinishJIT(JSTraceMonitor *tm);
536
537extern void
538js_FlushJITCache(JSContext* cx);
539
540extern void
541js_FlushJITOracle(JSContext* cx);
542
543#else  /* !JS_TRACER */
544
545#define RECORD(x)               ((void)0)
546#define TRACE_0(x)              ((void)0)
547#define TRACE_1(x,a)            ((void)0)
548#define TRACE_2(x,a,b)          ((void)0)
549
550#endif /* !JS_TRACER */
551
552#endif /* jstracer_h___ */