diff --git a/js/src/jsbuiltins.cpp b/js/src/jsbuiltins.cpp index d0abff31f9e0..466fc64538fb 100644 --- a/js/src/jsbuiltins.cpp +++ b/js/src/jsbuiltins.cpp @@ -386,10 +386,7 @@ js_PopInterpFrame(JSContext* cx, TracerState* state) cx->display[fp->script->staticLevel] = fp->displaySave; /* Pop the frame and its memory. */ - JSStackFrame *down = fp->down; - cx->stack().popInlineFrame(cx, fp, down); - JS_ASSERT(cx->fp == down && cx->fp->regs == &fp->callerRegs); - down->regs = fp->regs; + cx->stack().popInlineFrame(cx, fp, fp->down); /* Update the inline call count. */ *state->inlineCallCountp = *state->inlineCallCountp - 1; diff --git a/js/src/jscntxt.cpp b/js/src/jscntxt.cpp index 9c69982ed721..653d4068cd5e 100644 --- a/js/src/jscntxt.cpp +++ b/js/src/jscntxt.cpp @@ -277,7 +277,7 @@ StackSpace::getInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag, assertIsCurrent(cx); JS_ASSERT(currentCallStack->isActive()); - jsval *start = cx->fp->regs->sp; + jsval *start = cx->regs->sp; ptrdiff_t nvals = nmissing + VALUES_PER_CALL_STACK + VALUES_PER_STACK_FRAME + nfixed; if (!ensureSpace(cx, start, nvals)) return false; @@ -288,7 +288,7 @@ StackSpace::getInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag, JS_REQUIRES_STACK void StackSpace::pushInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag, - InvokeFrameGuard &fg) + InvokeFrameGuard &fg, JSFrameRegs ®s) { JS_ASSERT(!!ag.cs ^ !!fg.cs); JS_ASSERT_IF(ag.cs, ag.cs == currentCallStack && !ag.cs->inContext()); @@ -298,7 +298,7 @@ StackSpace::pushInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag, } JSStackFrame *fp = fg.fp; fp->down = cx->fp; - cx->pushCallStackAndFrame(currentCallStack, fp); + cx->pushCallStackAndFrame(currentCallStack, fp, regs); currentCallStack->setInitialVarObj(NULL); fg.cx = cx; } @@ -361,13 +361,13 @@ StackSpace::getExecuteFrame(JSContext *cx, JSStackFrame *down, JS_REQUIRES_STACK void StackSpace::pushExecuteFrame(JSContext *cx, ExecuteFrameGuard &fg, - JSObject *initialVarObj) + JSFrameRegs ®s, JSObject *initialVarObj) { fg.fp->down = fg.down; CallStack *cs = fg.cs; cs->setPreviousInThread(currentCallStack); currentCallStack = cs; - cx->pushCallStackAndFrame(cs, fg.fp); + cx->pushCallStackAndFrame(cs, fg.fp, regs); cs->setInitialVarObj(initialVarObj); fg.cx = cx; } @@ -391,13 +391,14 @@ StackSpace::getSynthesizedSlowNativeFrame(JSContext *cx, CallStack *&cs, JSStack } JS_REQUIRES_STACK void -StackSpace::pushSynthesizedSlowNativeFrame(JSContext *cx, CallStack *cs, JSStackFrame *fp) +StackSpace::pushSynthesizedSlowNativeFrame(JSContext *cx, CallStack *cs, JSStackFrame *fp, + JSFrameRegs ®s) { JS_ASSERT(!fp->script && FUN_SLOW_NATIVE(fp->fun)); fp->down = cx->fp; cs->setPreviousInThread(currentCallStack); currentCallStack = cs; - cx->pushCallStackAndFrame(cs, fp); + cx->pushCallStackAndFrame(cs, fp, regs); cs->setInitialVarObj(NULL); } @@ -412,6 +413,84 @@ StackSpace::popSynthesizedSlowNativeFrame(JSContext *cx) currentCallStack = currentCallStack->getPreviousInThread(); } +/* + * When a pair of down-linked stack frames are in the same callstack, the + * up-frame's address is the top of the down-frame's stack, modulo missing + * arguments. + */ +static inline jsval * +InlineDownFrameSP(JSStackFrame *up) +{ + JS_ASSERT(up->fun && up->script); + jsval *sp = up->argv + up->argc; +#ifdef DEBUG + uint16 nargs = up->fun->nargs; + uintN argc = up->argc; + uintN missing = argc < nargs ? nargs - argc : 0; + JS_ASSERT(sp == (jsval *)up - missing); +#endif + return sp; +} + +JS_REQUIRES_STACK +FrameRegsIter::FrameRegsIter(JSContext *cx) +{ + curcs = cx->getCurrentCallStack(); + if (!curcs) { + curfp = NULL; + return; + } + if (curcs->isSuspended()) { + curfp = curcs->getSuspendedFrame(); + cursp = curcs->getSuspendedRegs()->sp; + curpc = curcs->getSuspendedRegs()->pc; + } + JS_ASSERT(cx->fp); + curfp = cx->fp; + cursp = cx->regs->sp; + curpc = cx->regs->pc; + return; +} + +FrameRegsIter & +FrameRegsIter::operator++() +{ + JSStackFrame *up = curfp; + JSStackFrame *down = curfp = curfp->down; + if (!down) + return *this; + + curpc = down->savedPC; + + /* For a contiguous down and up, compute sp from up. */ + if (up != curcs->getInitialFrame()) { + cursp = InlineDownFrameSP(up); + return *this; + } + + /* + * If the up-frame is in csup and the down-frame is in csdown, it is not + * necessarily the case that |csup->getPreviousInContext == csdown| or that + * |csdown->getSuspendedFrame == down| (because of indirect eval and + * JS_EvaluateInStackFrame). To compute down's sp, we need to do a linear + * scan, keeping track of what is immediately after down in memory. + */ + curcs = curcs->getPreviousInContext(); + cursp = curcs->getSuspendedSP(); + JSStackFrame *f = curcs->getSuspendedFrame(); + while (f != down) { + if (f == curcs->getInitialFrame()) { + curcs = curcs->getPreviousInContext(); + cursp = curcs->getSuspendedSP(); + f = curcs->getSuspendedFrame(); + } else { + cursp = InlineDownFrameSP(f); + f = f->down; + } + } + return *this; +} + bool JSThreadData::init() { @@ -1577,14 +1656,12 @@ ReportError(JSContext *cx, const char *message, JSErrorReport *reportp, static void PopulateReportBlame(JSContext *cx, JSErrorReport *report) { - JSStackFrame *fp; - /* * Walk stack until we find a frame that is associated with some script * rather than a native frame. */ - for (fp = js_GetTopStackFrame(cx); fp; fp = fp->down) { - if (fp->regs) { + for (JSStackFrame *fp = js_GetTopStackFrame(cx); fp; fp = fp->down) { + if (fp->pc(cx)) { report->filename = fp->script->filename; report->lineno = js_FramePCToLineNumber(cx, fp); break; @@ -2163,13 +2240,10 @@ js_GetCurrentBytecodePC(JSContext* cx) #endif { JS_ASSERT_NOT_ON_TRACE(cx); /* for static analysis */ - JSStackFrame* fp = cx->fp; - if (fp && fp->regs) { - pc = fp->regs->pc; - imacpc = fp->imacpc; - } else { + pc = cx->regs ? cx->regs->pc : NULL; + if (!pc) return NULL; - } + imacpc = cx->fp->imacpc; } /* @@ -2194,18 +2268,27 @@ js_CurrentPCIsInImacro(JSContext *cx) JSContext::JSContext(JSRuntime *rt) : runtime(rt), fp(NULL), + regs(NULL), regExpStatics(this), busyArrays(this) {} void -JSContext::pushCallStackAndFrame(js::CallStack *newcs, JSStackFrame *newfp) +JSContext::pushCallStackAndFrame(js::CallStack *newcs, JSStackFrame *newfp, + JSFrameRegs &newregs) { - if (hasActiveCallStack()) - currentCallStack->suspend(fp); + if (hasActiveCallStack()) { + JS_ASSERT(fp->savedPC == JSStackFrame::sInvalidPC); + fp->savedPC = regs->pc; + currentCallStack->suspend(fp, regs); + } newcs->setPreviousInContext(currentCallStack); currentCallStack = newcs; +#ifdef DEBUG + newfp->savedPC = JSStackFrame::sInvalidPC; +#endif setCurrentFrame(newfp); + setCurrentRegs(&newregs); newcs->joinContext(this, newfp); } @@ -2214,18 +2297,25 @@ JSContext::popCallStackAndFrame() { JS_ASSERT(currentCallStack->maybeContext() == this); JS_ASSERT(currentCallStack->getInitialFrame() == fp); + JS_ASSERT(fp->savedPC == JSStackFrame::sInvalidPC); currentCallStack->leaveContext(); currentCallStack = currentCallStack->getPreviousInContext(); if (currentCallStack) { if (currentCallStack->isSaved()) { setCurrentFrame(NULL); + setCurrentRegs(NULL); } else { setCurrentFrame(currentCallStack->getSuspendedFrame()); + setCurrentRegs(currentCallStack->getSuspendedRegs()); currentCallStack->resume(); +#ifdef DEBUG + fp->savedPC = JSStackFrame::sInvalidPC; +#endif } } else { JS_ASSERT(fp->down == NULL); setCurrentFrame(NULL); + setCurrentRegs(NULL); } } @@ -2233,16 +2323,23 @@ void JSContext::saveActiveCallStack() { JS_ASSERT(hasActiveCallStack()); - currentCallStack->save(fp); + currentCallStack->save(fp, regs); + JS_ASSERT(fp->savedPC == JSStackFrame::sInvalidPC); + fp->savedPC = regs->pc; setCurrentFrame(NULL); + setCurrentRegs(NULL); } void JSContext::restoreCallStack() { - JS_ASSERT(!hasActiveCallStack()); - setCurrentFrame(currentCallStack->getSuspendedFrame()); - currentCallStack->restore(); + js::CallStack *ccs = currentCallStack; + setCurrentFrame(ccs->getSuspendedFrame()); + setCurrentRegs(ccs->getSuspendedRegs()); + ccs->restore(); +#ifdef DEBUG + fp->savedPC = JSStackFrame::sInvalidPC; +#endif } JSGenerator * @@ -2265,7 +2362,7 @@ JSContext::generatorFor(JSStackFrame *fp) const } CallStack * -JSContext::containingCallStack(JSStackFrame *target) +JSContext::containingCallStack(const JSStackFrame *target) { /* The context may have nothing running. */ CallStack *cs = currentCallStack; diff --git a/js/src/jscntxt.h b/js/src/jscntxt.h index 1221222aa05e..b5d73811b78d 100644 --- a/js/src/jscntxt.h +++ b/js/src/jscntxt.h @@ -200,6 +200,10 @@ struct TracerState uintN nativeVpLen; jsval* nativeVp; + // The regs pointed to by cx->regs while a deep-bailed slow native + // completes execution. + JSFrameRegs bailedSlowNativeRegs; + TracerState(JSContext *cx, TraceMonitor *tm, TreeFragment *ti, uintN &inlineCallCountp, VMSideExit** innermostNestedGuardp); ~TracerState(); @@ -251,10 +255,11 @@ struct GlobalState { * * A callstack in a context may additionally be "active" or "suspended". A * suspended callstack |cs| has a "suspended frame" which serves as the current - * frame of |cs|. There is at most one active callstack in a given context. - * Callstacks in a context execute LIFO and are maintained in a stack. The top - * of this stack is the context's "current callstack". If a context |cx| has an - * active callstack |cs|, then: + * frame of |cs|. Additionally, a suspended callstack has "suspended regs", + * which is a snapshot of |cx->regs| when |cs| was suspended. There is at most + * one active callstack in a given context. Callstacks in a context execute + * LIFO and are maintained in a stack. The top of this stack is the context's + * "current callstack". If a context |cx| has an active callstack |cs|, then: * 1. |cs| is |cx|'s current callstack, * 2. |cx->fp != NULL|, and * 3. |cs|'s current frame is |cx->fp|. @@ -284,6 +289,9 @@ class CallStack /* If this callstack is suspended, the top of the callstack. */ JSStackFrame *suspendedFrame; + /* If this callstack is suspended, |cx->regs| when it was suspended. */ + JSFrameRegs *suspendedRegs; + /* This callstack was suspended by JS_SaveFrameChain. */ bool saved; @@ -366,11 +374,12 @@ class CallStack /* Transitioning between isActive <--> isSuspended */ - void suspend(JSStackFrame *fp) { + void suspend(JSStackFrame *fp, JSFrameRegs *regs) { JS_ASSERT(isActive()); JS_ASSERT(fp && contains(fp)); suspendedFrame = fp; JS_ASSERT(isSuspended()); + suspendedRegs = regs; } void resume() { @@ -381,9 +390,9 @@ class CallStack /* When isSuspended, transitioning isSaved <--> !isSaved */ - void save(JSStackFrame *fp) { + void save(JSStackFrame *fp, JSFrameRegs *regs) { JS_ASSERT(!isSaved()); - suspend(fp); + suspend(fp, regs); saved = true; JS_ASSERT(isSaved()); } @@ -423,6 +432,16 @@ class CallStack return suspendedFrame; } + JSFrameRegs *getSuspendedRegs() const { + JS_ASSERT(isSuspended()); + return suspendedRegs; + } + + jsval *getSuspendedSP() const { + JS_ASSERT(isSuspended()); + return suspendedRegs->sp; + } + /* JSContext / js::StackSpace bookkeeping. */ void setPreviousInContext(CallStack *cs) { @@ -682,7 +701,7 @@ class StackSpace JS_REQUIRES_STACK void pushInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag, - InvokeFrameGuard &fg); + InvokeFrameGuard &fg, JSFrameRegs ®s); /* * For the simpler case when arguments are allocated at the same time as @@ -695,7 +714,7 @@ class StackSpace ExecuteFrameGuard &fg) const; JS_REQUIRES_STACK void pushExecuteFrame(JSContext *cx, ExecuteFrameGuard &fg, - JSObject *initialVarObj); + JSFrameRegs ®s, JSObject *initialVarObj); /* * Since RAII cannot be used for inline frames, callers must manually @@ -706,7 +725,8 @@ class StackSpace uintN nmissing, uintN nfixed) const; JS_REQUIRES_STACK - inline void pushInlineFrame(JSContext *cx, JSStackFrame *fp, JSStackFrame *newfp); + inline void pushInlineFrame(JSContext *cx, JSStackFrame *fp, jsbytecode *pc, + JSStackFrame *newfp); JS_REQUIRES_STACK inline void popInlineFrame(JSContext *cx, JSStackFrame *up, JSStackFrame *down); @@ -719,7 +739,8 @@ class StackSpace void getSynthesizedSlowNativeFrame(JSContext *cx, CallStack *&cs, JSStackFrame *&fp); JS_REQUIRES_STACK - void pushSynthesizedSlowNativeFrame(JSContext *cx, CallStack *cs, JSStackFrame *fp); + void pushSynthesizedSlowNativeFrame(JSContext *cx, CallStack *cs, JSStackFrame *fp, + JSFrameRegs ®s); JS_REQUIRES_STACK void popSynthesizedSlowNativeFrame(JSContext *cx); @@ -731,6 +752,34 @@ class StackSpace JS_STATIC_ASSERT(StackSpace::CAPACITY_VALS % StackSpace::COMMIT_VALS == 0); +/* + * While |cx->fp|'s pc/sp are available in |cx->regs|, to compute the saved + * value of pc/sp for any other frame, it is necessary to know about that + * frame's up-frame. This iterator maintains this information when walking down + * a chain of stack frames starting at |cx->fp|. + * + * Usage: + * for (FrameRegsIter i(cx); !i.done(); ++i) + * ... i.fp() ... i.sp() ... i.pc() + */ +class FrameRegsIter +{ + CallStack *curcs; + JSStackFrame *curfp; + jsval *cursp; + jsbytecode *curpc; + + public: + JS_REQUIRES_STACK FrameRegsIter(JSContext *cx); + + bool done() const { return curfp == NULL; } + FrameRegsIter &operator++(); + + JSStackFrame *fp() const { return curfp; } + jsval *sp() const { return cursp; } + jsbytecode *pc() const { return curpc; } +}; + /* Holds the number of recording attemps for an address. */ typedef HashMappc can be NULL) + */ + JS_REQUIRES_STACK + JSFrameRegs *regs; + private: friend class js::StackSpace; + friend JSBool js_Interpret(JSContext *); - /* 'fp' must only be changed by calling this function. */ + /* 'fp' and 'regs' must only be changed by calling these functions. */ void setCurrentFrame(JSStackFrame *fp) { this->fp = fp; } + void setCurrentRegs(JSFrameRegs *regs) { + this->regs = regs; + } + public: /* Temporary arena pool used while compiling and decompiling. */ JSArenaPool tempPool; @@ -1721,7 +1782,8 @@ struct JSContext } /* Add the given callstack to the list as the new active callstack. */ - void pushCallStackAndFrame(js::CallStack *newcs, JSStackFrame *newfp); + void pushCallStackAndFrame(js::CallStack *newcs, JSStackFrame *newfp, + JSFrameRegs ®s); /* Remove the active callstack and make the next callstack active. */ void popCallStackAndFrame(); @@ -1736,7 +1798,7 @@ struct JSContext * Perform a linear search of all frames in all callstacks in the given context * for the given frame, returning the callstack, if found, and null otherwise. */ - js::CallStack *containingCallStack(JSStackFrame *target); + js::CallStack *containingCallStack(const JSStackFrame *target); #ifdef JS_THREADSAFE JSThread *thread; @@ -1820,9 +1882,7 @@ struct JSContext JSGenerator *generatorFor(JSStackFrame *fp) const; /* Early OOM-check. */ - bool ensureGeneratorStackSpace() { - return genStack.reserve(genStack.length() + 1); - } + inline bool ensureGeneratorStackSpace(); bool enterGenerator(JSGenerator *gen) { return genStack.append(gen); @@ -1968,6 +2028,15 @@ struct JSContext return JS_THREAD_DATA(this)->stackSpace; } +#ifdef DEBUG + void assertValidStackDepth(uintN depth) { + JS_ASSERT(0 <= regs->sp - StackBase(fp)); + JS_ASSERT(depth <= uintptr_t(regs->sp - StackBase(fp))); + } +#else + void assertValidStackDepth(uintN /*depth*/) {} +#endif + private: /* @@ -1993,6 +2062,13 @@ JSStackFrame::varobj(JSContext *cx) const return fun ? callobj : cx->activeCallStack()->getInitialVarObj(); } +JS_ALWAYS_INLINE jsbytecode * +JSStackFrame::pc(JSContext *cx) const +{ + JS_ASSERT(cx->containingCallStack(this) != NULL); + return cx->fp == this ? cx->regs->pc : savedPC; +} + /* * InvokeArgsGuard is used outside the JS engine (where jscntxtinlines.h is * not included). To avoid visibility issues, force members inline. @@ -2031,7 +2107,11 @@ InvokeArgsGuard::~InvokeArgsGuard() # define JS_THREAD_ID(cx) ((cx)->thread ? (cx)->thread->id : 0) #endif -#ifdef __cplusplus +static inline uintN +FramePCOffset(JSContext *cx, JSStackFrame* fp) +{ + return uintN((fp->imacpc ? fp->imacpc : fp->pc(cx)) - fp->script->code); +} static inline JSAtom ** FrameAtomBase(JSContext *cx, JSStackFrame *fp) @@ -2435,8 +2515,6 @@ class JSAutoResolveFlags JS_DECL_USE_GUARD_OBJECT_NOTIFIER }; -#endif /* __cpluscplus */ - /* * Slightly more readable macros for testing per-context option settings (also * to hide bitset implementation detail). diff --git a/js/src/jscntxtinlines.h b/js/src/jscntxtinlines.h index d79b5752a916..0cc3b66196a9 100644 --- a/js/src/jscntxtinlines.h +++ b/js/src/jscntxtinlines.h @@ -46,6 +46,15 @@ #include "jsobjinlines.h" +inline bool +JSContext::ensureGeneratorStackSpace() +{ + bool ok = genStack.reserve(genStack.length() + 1); + if (!ok) + js_ReportOutOfMemory(this); + return ok; +} + namespace js { JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame * @@ -61,12 +70,12 @@ StackSpace::firstUnused() const CallStack *ccs = currentCallStack; if (!ccs) return base; - if (!ccs->inContext()) - return ccs->getInitialArgEnd(); - JSStackFrame *fp = ccs->getCurrentFrame(); - if (JSFrameRegs *regs = fp->regs) - return regs->sp; - return fp->slots(); + if (JSContext *cx = ccs->maybeContext()) { + if (!ccs->isSuspended()) + return cx->regs->sp; + return ccs->getSuspendedRegs()->sp; + } + return ccs->getInitialArgEnd(); } /* Inline so we don't need the friend API. */ @@ -124,7 +133,7 @@ StackSpace::getInlineFrame(JSContext *cx, jsval *sp, { assertIsCurrent(cx); JS_ASSERT(cx->hasActiveCallStack()); - JS_ASSERT(cx->fp->regs->sp == sp); + JS_ASSERT(cx->regs->sp == sp); ptrdiff_t nvals = nmissing + VALUES_PER_STACK_FRAME + nfixed; if (!ensureSpace(cx, sp, nvals)) @@ -135,13 +144,18 @@ StackSpace::getInlineFrame(JSContext *cx, jsval *sp, } JS_REQUIRES_STACK JS_ALWAYS_INLINE void -StackSpace::pushInlineFrame(JSContext *cx, JSStackFrame *fp, JSStackFrame *newfp) +StackSpace::pushInlineFrame(JSContext *cx, JSStackFrame *fp, jsbytecode *pc, + JSStackFrame *newfp) { assertIsCurrent(cx); JS_ASSERT(cx->hasActiveCallStack()); - JS_ASSERT(cx->fp == fp); + JS_ASSERT(cx->fp == fp && cx->regs->pc == pc); + fp->savedPC = pc; newfp->down = fp; +#ifdef DEBUG + newfp->savedPC = JSStackFrame::sInvalidPC; +#endif cx->setCurrentFrame(newfp); } @@ -151,7 +165,14 @@ StackSpace::popInlineFrame(JSContext *cx, JSStackFrame *up, JSStackFrame *down) assertIsCurrent(cx); JS_ASSERT(cx->hasActiveCallStack()); JS_ASSERT(cx->fp == up && up->down == down); + JS_ASSERT(up->savedPC == JSStackFrame::sInvalidPC); + JSFrameRegs *regs = cx->regs; + regs->pc = down->savedPC; + regs->sp = up->argv - 1; +#ifdef DEBUG + down->savedPC = JSStackFrame::sInvalidPC; +#endif cx->setCurrentFrame(down); } diff --git a/js/src/jsdbgapi.cpp b/js/src/jsdbgapi.cpp index d7c21416dda0..02f1f376f8cc 100644 --- a/js/src/jsdbgapi.cpp +++ b/js/src/jsdbgapi.cpp @@ -684,19 +684,16 @@ js_watch_set(JSContext *cx, JSObject *obj, jsval id, jsval *vp) PodZero(fp->slots(), nfixed); PodZero(fp); fp->script = script; - fp->regs = NULL; fp->fun = fun; fp->argv = vp + 2; fp->scopeChain = closure->getParent(); - if (script) { - JS_ASSERT(script->length >= JSOP_STOP_LENGTH); - regs.pc = script->code + script->length - JSOP_STOP_LENGTH; - regs.sp = fp->slots() + script->nfixed; - fp->regs = ®s; - } + + /* Initialize regs. */ + regs.pc = script ? script->code : NULL; + regs.sp = fp->slots() + nfixed; /* Officially push |fp|. |frame|'s destructor pops. */ - cx->stack().pushExecuteFrame(cx, frame, NULL); + cx->stack().pushExecuteFrame(cx, frame, regs, NULL); /* Now that fp has been pushed, get the call object. */ if (script && fun && fun->isHeavyweight() && @@ -1070,7 +1067,7 @@ JS_GetFrameScript(JSContext *cx, JSStackFrame *fp) JS_PUBLIC_API(jsbytecode *) JS_GetFramePC(JSContext *cx, JSStackFrame *fp) { - return fp->regs ? fp->regs->pc : NULL; + return fp->pc(cx); } JS_PUBLIC_API(JSStackFrame *) diff --git a/js/src/jsdtracef.cpp b/js/src/jsdtracef.cpp index c157860188d7..176101e358d6 100644 --- a/js/src/jsdtracef.cpp +++ b/js/src/jsdtracef.cpp @@ -79,7 +79,7 @@ jsdtrace_fun_linenumber(JSContext *cx, const JSFunction *fun) static int jsdtrace_frame_linenumber(JSContext *cx, JSStackFrame *fp) { - if (fp && fp->regs) + if (fp) return (int) js_FramePCToLineNumber(cx, fp); return 0; diff --git a/js/src/jsexn.cpp b/js/src/jsexn.cpp index 5159e1875472..1cc625a89e68 100644 --- a/js/src/jsexn.cpp +++ b/js/src/jsexn.cpp @@ -338,7 +338,7 @@ InitExnPrivate(JSContext *cx, JSObject *exnObject, JSString *message, elem->filename = NULL; if (fp->script) { elem->filename = fp->script->filename; - if (fp->regs) + if (fp->pc(cx)) elem->ulineno = js_FramePCToLineNumber(cx, fp); } ++elem; @@ -753,7 +753,7 @@ Exception(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval) } else { if (!fp) fp = js_GetScriptedCaller(cx, NULL); - lineno = (fp && fp->regs) ? js_FramePCToLineNumber(cx, fp) : 0; + lineno = (fp && fp->pc(cx)) ? js_FramePCToLineNumber(cx, fp) : 0; } return (obj->getClass() != &js_ErrorClass) || diff --git a/js/src/jsfun.cpp b/js/src/jsfun.cpp index 75b36a7208e7..553cc67cca28 100644 --- a/js/src/jsfun.cpp +++ b/js/src/jsfun.cpp @@ -2621,14 +2621,8 @@ js_ValueToCallableObject(JSContext *cx, jsval *vp, uintN flags) void js_ReportIsNotFunction(JSContext *cx, jsval *vp, uintN flags) { - JSStackFrame *fp; uintN error; - const char *name, *source; - - for (fp = js_GetTopStackFrame(cx); fp && !fp->regs; fp = fp->down) - continue; - name = source = NULL; - + const char *name = NULL, *source = NULL; AutoValueRooter tvr(cx); if (flags & JSV2F_ITERATOR) { error = JSMSG_BAD_ITERATOR; @@ -2650,15 +2644,18 @@ js_ReportIsNotFunction(JSContext *cx, jsval *vp, uintN flags) error = JSMSG_NOT_FUNCTION; } - js_ReportValueError3(cx, error, - (fp && fp->regs && - StackBase(fp) <= vp && vp < fp->regs->sp) - ? vp - fp->regs->sp - : (flags & JSV2F_SEARCH_STACK) - ? JSDVG_SEARCH_STACK - : JSDVG_IGNORE_STACK, - *vp, NULL, - name, source); + LeaveTrace(cx); + FrameRegsIter i(cx); + while (!i.done() && !i.pc()) + ++i; + + ptrdiff_t spindex = + !i.done() && StackBase(i.fp()) <= vp && vp < i.sp() + ? vp - i.sp() + : flags & JSV2F_SEARCH_STACK ? JSDVG_SEARCH_STACK + : JSDVG_IGNORE_STACK; + + js_ReportValueError3(cx, error, spindex, *vp, NULL, name, source); } /* diff --git a/js/src/jsinterp.cpp b/js/src/jsinterp.cpp index fa1e97ad37bb..aa35b12d64ca 100644 --- a/js/src/jsinterp.cpp +++ b/js/src/jsinterp.cpp @@ -95,6 +95,10 @@ using namespace js; /* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */ #if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___ +#ifdef DEBUG +jsbytecode *const JSStackFrame::sInvalidPC = (jsbytecode *)0xbeef; +#endif + JSObject * js_GetScopeChain(JSContext *cx, JSStackFrame *fp) { @@ -559,6 +563,7 @@ js_Invoke(JSContext *cx, const InvokeArgsGuard &args, uintN flags) * Get a pointer to new frame/slots. This memory is not "claimed", so the * code before pushInvokeFrame must not reenter the interpreter. */ + JSFrameRegs regs; InvokeFrameGuard frame; if (!cx->stack().getInvokeFrame(cx, args, nmissing, nfixed, frame)) return false; @@ -583,13 +588,21 @@ js_Invoke(JSContext *cx, const InvokeArgsGuard &args, uintN flags) fp->annotation = NULL; fp->scopeChain = NULL; fp->blockChain = NULL; - fp->regs = NULL; fp->imacpc = NULL; fp->flags = flags; fp->displaySave = NULL; + /* Initialize regs. */ + if (script) { + regs.pc = script->code; + regs.sp = fp->slots() + script->nfixed; + } else { + regs.pc = NULL; + regs.sp = fp->slots(); + } + /* Officially push |fp|. |frame|'s destructor pops. */ - cx->stack().pushInvokeFrame(cx, args, frame); + cx->stack().pushInvokeFrame(cx, args, frame, regs); /* Now that the frame has been pushed, fix up the scope chain. */ if (native) { @@ -722,6 +735,7 @@ js_Execute(JSContext *cx, JSObject *const chain, JSScript *script, * N.B. when fp->argv is removed (bug 539144), argv will have to be copied * in before execution and copied out after. */ + JSFrameRegs regs; ExecuteFrameGuard frame; if (!cx->stack().getExecuteFrame(cx, down, 0, script->nslots, frame)) return false; @@ -797,11 +811,14 @@ js_Execute(JSContext *cx, JSObject *const chain, JSScript *script, fp->script = script; fp->imacpc = NULL; fp->rval = JSVAL_VOID; - fp->regs = NULL; fp->blockChain = NULL; + /* Initialize regs. */ + regs.pc = script->code; + regs.sp = StackBase(fp); + /* Officially push |fp|. |frame|'s destructor pops. */ - cx->stack().pushExecuteFrame(cx, frame, initialVarObj); + cx->stack().pushExecuteFrame(cx, frame, regs, initialVarObj); /* Now that the frame has been pushed, we can call the thisObject hook. */ if (!down) { @@ -1114,7 +1131,7 @@ js_EnterWith(JSContext *cx, jsint stackIndex) JSObject *obj, *parent, *withobj; fp = cx->fp; - sp = fp->regs->sp; + sp = cx->regs->sp; JS_ASSERT(stackIndex < 0); JS_ASSERT(StackBase(fp) <= sp + stackIndex); @@ -1175,16 +1192,16 @@ js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth) * Unwind block and scope chains to match the given depth. The function sets * fp->sp on return to stackDepth. */ -JS_REQUIRES_STACK JSBool -js_UnwindScope(JSContext *cx, JSStackFrame *fp, jsint stackDepth, - JSBool normalUnwind) +JS_STATIC_INTERPRET JS_REQUIRES_STACK JSBool +js_UnwindScope(JSContext *cx, jsint stackDepth, JSBool normalUnwind) { JSObject *obj; JSClass *clasp; JS_ASSERT(stackDepth >= 0); - JS_ASSERT(StackBase(fp) + stackDepth <= fp->regs->sp); + JS_ASSERT(StackBase(cx->fp) + stackDepth <= cx->regs->sp); + JSStackFrame *fp = cx->fp; for (obj = fp->blockChain; obj; obj = obj->getParent()) { JS_ASSERT(obj->getClass() == &js_BlockClass); if (OBJ_BLOCK_DEPTH(cx, obj) < stackDepth) @@ -1205,7 +1222,7 @@ js_UnwindScope(JSContext *cx, JSStackFrame *fp, jsint stackDepth, } } - fp->regs->sp = StackBase(fp) + stackDepth; + cx->regs->sp = StackBase(fp) + stackDepth; return normalUnwind; } @@ -1274,7 +1291,7 @@ js_TraceOpcode(JSContext *cx) tracefp = (FILE *) cx->tracefp; JS_ASSERT(tracefp); fp = cx->fp; - regs = fp->regs; + regs = cx->regs; /* * Operations in prologues don't produce interesting values, and @@ -2008,7 +2025,7 @@ js_Interpret(JSContext *cx) uintN inlineCallCount; JSAtom **atoms; JSVersion currentVersion, originalVersion; - JSFrameRegs regs; + JSFrameRegs regs, *prevContextRegs; JSObject *obj, *obj2, *parent; JSBool ok, cond; jsint len; @@ -2214,7 +2231,7 @@ js_Interpret(JSContext *cx) script = fp->script; \ atoms = FrameAtomBase(cx, fp); \ currentVersion = (JSVersion) script->version; \ - JS_ASSERT(fp->regs == ®s); \ + JS_ASSERT(cx->regs == ®s); \ JS_END_MACRO #define MONITOR_BRANCH(reason) \ @@ -2312,21 +2329,18 @@ js_Interpret(JSContext *cx) */ CHECK_INTERRUPT_HANDLER(); -#if !JS_HAS_GENERATORS - JS_ASSERT(!fp->regs); -#else - /* Initialize the pc and sp registers unless we're resuming a generator. */ - if (JS_LIKELY(!fp->regs)) { -#endif - ASSERT_NOT_THROWING(cx); - regs.pc = script->code; - regs.sp = StackBase(fp); - fp->regs = ®s; + /* + * Access to |cx->regs| is very common, so we copy in and repoint to a + * local variable, and copy out on exit. + */ + JS_ASSERT(cx->regs); + prevContextRegs = cx->regs; + regs = *cx->regs; + cx->setCurrentRegs(®s); + #if JS_HAS_GENERATORS - } else { - JS_ASSERT(fp->regs == &cx->generatorFor(fp)->savedRegs); - regs = *fp->regs; - fp->regs = ®s; + if (JS_UNLIKELY(fp->isGenerator())) { + JS_ASSERT(prevContextRegs == &cx->generatorFor(fp)->savedRegs); JS_ASSERT((size_t) (regs.pc - script->code) <= script->length); JS_ASSERT((size_t) (regs.sp - StackBase(fp)) <= StackDepth(script)); @@ -2344,13 +2358,10 @@ js_Interpret(JSContext *cx) goto error; } } -#endif /* JS_HAS_GENERATORS */ +#endif #ifdef JS_TRACER - /* - * We cannot reenter the interpreter while recording; wait to abort until - * after cx->fp->regs is set. - */ + /* We cannot reenter the interpreter while recording. */ if (TRACE_RECORDER(cx)) AbortRecording(cx, "attempt to reenter interpreter while recording"); #endif @@ -2415,6 +2426,7 @@ js_Interpret(JSContext *cx) #endif /* !JS_THREADED_INTERP */ error: + JS_ASSERT(cx->regs == ®s); #ifdef JS_TRACER if (fp->imacpc && cx->throwing) { // Handle other exceptions as if they came from the imacro-calling pc. @@ -2507,8 +2519,8 @@ js_Interpret(JSContext *cx) */ regs.pc = (script)->main + tn->start + tn->length; - ok = js_UnwindScope(cx, fp, tn->stackDepth, JS_TRUE); - JS_ASSERT(fp->regs->sp == StackBase(fp) + tn->stackDepth); + ok = js_UnwindScope(cx, tn->stackDepth, JS_TRUE); + JS_ASSERT(regs.sp == StackBase(fp) + tn->stackDepth); if (!ok) { /* * Restart the handler search with updated pc and stack depth @@ -2578,13 +2590,13 @@ js_Interpret(JSContext *cx) forced_return: /* - * Unwind the scope making sure that ok stays false even when UnwindScope + * Unwind the scope making sure that ok stays false even when js_UnwindScope * returns true. * * When a trap handler returns JSTRAP_RETURN, we jump here with ok set to * true bypassing any finally blocks. */ - ok &= js_UnwindScope(cx, fp, 0, ok || cx->throwing); + ok &= js_UnwindScope(cx, 0, ok || cx->throwing); JS_ASSERT(regs.sp == StackBase(fp)); #ifdef DEBUG @@ -2607,21 +2619,17 @@ js_Interpret(JSContext *cx) * frame pc. */ JS_ASSERT(inlineCallCount == 0); - JS_ASSERT(fp->regs == ®s); + JS_ASSERT(cx->regs == ®s); + *prevContextRegs = regs; + cx->setCurrentRegs(prevContextRegs); + #ifdef JS_TRACER if (TRACE_RECORDER(cx)) AbortRecording(cx, "recording out of js_Interpret"); #endif -#if JS_HAS_GENERATORS - if (JS_UNLIKELY(fp->isGenerator())) { - cx->generatorFor(fp)->savedRegs = regs; - } else -#endif /* JS_HAS_GENERATORS */ - { - JS_ASSERT(!fp->blockChain); - JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChain, 0)); - } - fp->regs = NULL; + + JS_ASSERT_IF(!fp->isGenerator(), !fp->blockChain); + JS_ASSERT_IF(!fp->isGenerator(), !js_IsActiveWithOrBlock(cx, fp->scopeChain, 0)); /* Undo the remaining effects committed on entry to js_Interpret. */ if (script->staticLevel < JS_DISPLAY_SIZE) diff --git a/js/src/jsinterp.h b/js/src/jsinterp.h index 0406203d7481..7184137dfd90 100644 --- a/js/src/jsinterp.h +++ b/js/src/jsinterp.h @@ -80,13 +80,10 @@ enum JSFrameFlags { * function. * * NB: This struct is manually initialized in jsinterp.c and jsiter.c. If you - * add new members, update both files. But first, try to remove members. The - * sharp* and xml* members should be moved onto the stack as local variables - * with well-known slots, if possible. + * add new members, update both files. */ struct JSStackFrame { - JSFrameRegs *regs; jsbytecode *imacpc; /* null or interpreter macro call pc */ JSObject *callobj; /* lazily created Call object */ jsval argsobj; /* lazily created arguments object, must be @@ -102,6 +99,10 @@ struct JSStackFrame /* Maintained by StackSpace operations */ JSStackFrame *down; /* previous frame, part of stack layout invariant */ + jsbytecode *savedPC; /* only valid if cx->fp != this */ +#ifdef DEBUG + static jsbytecode *const sInvalidPC; +#endif /* * We can't determine in advance which local variables can live on @@ -150,12 +151,9 @@ struct JSStackFrame script->staticLevel */ /* Members only needed for inline calls. */ - JSFrameRegs callerRegs; /* caller's regs for inline call */ void *hookData; /* debugger call hook data */ JSVersion callerVersion; /* dynamic version of calling script */ - inline void assertValidStackDepth(uintN depth); - void putActivationObjects(JSContext *cx) { /* * The order of calls here is important as js_PutCallObject needs to @@ -169,6 +167,9 @@ struct JSStackFrame } } + /* Get the frame's current bytecode, assuming |this| is in |cx|. */ + jsbytecode *pc(JSContext *cx) const; + jsval *argEnd() const { return (jsval *)this; } @@ -220,32 +221,12 @@ JS_STATIC_ASSERT(sizeof(JSStackFrame) % sizeof(jsval) == 0); } -#ifdef __cplusplus -static JS_INLINE uintN -FramePCOffset(JSStackFrame* fp) -{ - return uintN((fp->imacpc ? fp->imacpc : fp->regs->pc) - fp->script->code); -} -#endif - static JS_INLINE jsval * StackBase(JSStackFrame *fp) { return fp->slots() + fp->script->nfixed; } -#ifdef DEBUG -void -JSStackFrame::assertValidStackDepth(uintN depth) -{ - JS_ASSERT(0 <= regs->sp - StackBase(this)); - JS_ASSERT(depth <= uintptr_t(regs->sp - StackBase(this))); -} -#else -void -JSStackFrame::assertValidStackDepth(uintN /*depth*/){} -#endif - static JS_INLINE uintN GlobalVarCount(JSStackFrame *fp) { @@ -433,8 +414,7 @@ js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth); * fp->sp on return to stackDepth. */ extern JS_REQUIRES_STACK JSBool -js_UnwindScope(JSContext *cx, JSStackFrame *fp, jsint stackDepth, - JSBool normalUnwind); +js_UnwindScope(JSContext *cx, jsint stackDepth, JSBool normalUnwind); extern JSBool js_OnUnknownMethod(JSContext *cx, jsval *vp); diff --git a/js/src/jsiter.cpp b/js/src/jsiter.cpp index adc25ef07fcd..dd9a619e8107 100644 --- a/js/src/jsiter.cpp +++ b/js/src/jsiter.cpp @@ -74,6 +74,7 @@ #include "jsxml.h" #endif +#include "jscntxtinlines.h" #include "jsobjinlines.h" #include "jsstrinlines.h" @@ -794,14 +795,13 @@ js_NewGenerator(JSContext *cx) /* Initialize JSGenerator. */ gen->obj = obj; gen->state = JSGEN_NEWBORN; - gen->savedRegs.pc = fp->regs->pc; - JS_ASSERT(fp->regs->sp == fp->slots() + fp->script->nfixed); + gen->savedRegs.pc = cx->regs->pc; + JS_ASSERT(cx->regs->sp == fp->slots() + fp->script->nfixed); gen->savedRegs.sp = slots + fp->script->nfixed; gen->vplen = vplen; gen->liveFrame = newfp; /* Copy generator's stack frame copy in from |cx->fp|. */ - newfp->regs = &gen->savedRegs; newfp->imacpc = NULL; newfp->callobj = fp->callobj; if (fp->callobj) { /* Steal call object. */ @@ -922,7 +922,6 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj, memcpy(vp, genVp, usedBefore * sizeof(jsval)); fp->flags &= ~JSFRAME_FLOATING_GENERATOR; fp->argv = vp + 2; - fp->regs = &gen->savedRegs; gen->savedRegs.sp = fp->slots() + (gen->savedRegs.sp - genfp->slots()); JS_ASSERT(uintN(gen->savedRegs.sp - fp->slots()) <= fp->script->nslots); @@ -945,7 +944,7 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj, (void)cx->enterGenerator(gen); /* OOM check above. */ /* Officially push |fp|. |frame|'s destructor pops. */ - cx->stack().pushExecuteFrame(cx, frame, NULL); + cx->stack().pushExecuteFrame(cx, frame, gen->savedRegs, NULL); ok = js_Interpret(cx); diff --git a/js/src/jsobj.cpp b/js/src/jsobj.cpp index 419fa7612bbb..bc05625885ff 100644 --- a/js/src/jsobj.cpp +++ b/js/src/jsobj.cpp @@ -1023,9 +1023,10 @@ js_ComputeFilename(JSContext *cx, JSStackFrame *caller, return principals->codebase; } - if (caller->regs && js_GetOpcode(cx, caller->script, caller->regs->pc) == JSOP_EVAL) { - JS_ASSERT(js_GetOpcode(cx, caller->script, caller->regs->pc + JSOP_EVAL_LENGTH) == JSOP_LINENO); - *linenop = GET_UINT16(caller->regs->pc + JSOP_EVAL_LENGTH); + jsbytecode *pc = caller->pc(cx); + if (pc && js_GetOpcode(cx, caller->script, pc) == JSOP_EVAL) { + JS_ASSERT(js_GetOpcode(cx, caller->script, pc + JSOP_EVAL_LENGTH) == JSOP_LINENO); + *linenop = GET_UINT16(pc + JSOP_EVAL_LENGTH); } else { *linenop = js_FramePCToLineNumber(cx, caller); } @@ -1069,7 +1070,8 @@ obj_eval(JSContext *cx, uintN argc, jsval *vp) return JS_FALSE; } - bool indirectCall = (caller->regs && *caller->regs->pc != JSOP_EVAL); + jsbytecode *callerPC = caller->pc(cx); + bool indirectCall = (callerPC && *callerPC != JSOP_EVAL); /* * This call to js_GetWrappedObject is safe because of the security checks @@ -2795,16 +2797,14 @@ js_InferFlags(JSContext *cx, uintN defaultFlags) JS_ASSERT_NOT_ON_TRACE(cx); - JSStackFrame *fp; jsbytecode *pc; const JSCodeSpec *cs; uint32 format; uintN flags = 0; - fp = js_GetTopStackFrame(cx); - if (!fp || !fp->regs) + JSStackFrame *const fp = js_GetTopStackFrame(cx); + if (!fp || !(pc = cx->regs->pc)) return defaultFlags; - pc = fp->regs->pc; cs = &js_CodeSpec[js_GetOpcode(cx, fp->script, pc)]; format = cs->format; if (JOF_MODE(format) != JOF_NAME) @@ -3010,15 +3010,11 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp) JS_REQUIRES_STACK JSBool js_PutBlockObject(JSContext *cx, JSBool normalUnwind) { - JSStackFrame *fp; - JSObject *obj; - uintN depth, count; - /* Blocks have one fixed slot available for the first local.*/ JS_STATIC_ASSERT(JS_INITIAL_NSLOTS == JSSLOT_BLOCK_DEPTH + 2); - fp = cx->fp; - obj = fp->scopeChain; + JSStackFrame *const fp = cx->fp; + JSObject *obj = fp->scopeChain; JS_ASSERT(obj->getClass() == &js_BlockClass); JS_ASSERT(obj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp)); JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj)); @@ -3035,10 +3031,10 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind) JS_ASSERT(obj->numSlots() == JS_INITIAL_NSLOTS); /* The block and its locals must be on the current stack for GC safety. */ - depth = OBJ_BLOCK_DEPTH(cx, obj); - count = OBJ_BLOCK_COUNT(cx, obj); - JS_ASSERT(depth <= (size_t) (fp->regs->sp - StackBase(fp))); - JS_ASSERT(count <= (size_t) (fp->regs->sp - StackBase(fp) - depth)); + uintN depth = OBJ_BLOCK_DEPTH(cx, obj); + uintN count = OBJ_BLOCK_COUNT(cx, obj); + JS_ASSERT(depth <= (size_t) (cx->regs->sp - StackBase(fp))); + JS_ASSERT(count <= (size_t) (cx->regs->sp - StackBase(fp) - depth)); /* See comments in CheckDestructuring from jsparse.cpp. */ JS_ASSERT(count >= 1); @@ -4776,7 +4772,7 @@ js_GetMethod(JSContext *cx, JSObject *obj, jsid id, uintN getHow, jsval *vp) JS_FRIEND_API(bool) js_CheckUndeclaredVarAssignment(JSContext *cx, jsval propname) { - JSStackFrame *fp = js_GetTopStackFrame(cx); + JSStackFrame *const fp = js_GetTopStackFrame(cx); if (!fp) return true; @@ -6418,32 +6414,42 @@ MaybeDumpValue(const char *name, jsval v) } JS_FRIEND_API(void) -js_DumpStackFrame(JSStackFrame *fp) +js_DumpStackFrame(JSContext *cx, JSStackFrame *start) { - jsval *sp = NULL; + /* This should only called during live debugging. */ + VOUCH_DOES_NOT_REQUIRE_STACK(); + + if (!start) + start = cx->fp; + FrameRegsIter i(cx); + while (!i.done() && i.fp() != start) + ++i; + + if (i.done()) { + fprintf(stderr, "fp = %p not found in cx = %p\n", (void *)start, (void *)cx); + return; + } + + for (; !i.done(); ++i) { + JSStackFrame *const fp = i.fp(); - for (; fp; fp = fp->down) { fprintf(stderr, "JSStackFrame at %p\n", (void *) fp); - if (fp->argv) + if (fp->argv) { + fprintf(stderr, "callee: "); dumpValue(fp->argv[-2]); - else + } else { fprintf(stderr, "global frame, no callee"); + } fputc('\n', stderr); if (fp->script) fprintf(stderr, "file %s line %u\n", fp->script->filename, (unsigned) fp->script->lineno); - if (fp->regs) { - if (!fp->regs->pc) { - fprintf(stderr, "*** regs && !regs->pc, skipping frame\n\n"); - continue; - } + if (jsbytecode *pc = i.pc()) { if (!fp->script) { - fprintf(stderr, "*** regs && !script, skipping frame\n\n"); + fprintf(stderr, "*** pc && !script, skipping frame\n\n"); continue; } - jsbytecode *pc = fp->regs->pc; - sp = fp->regs->sp; if (fp->imacpc) { fprintf(stderr, " pc in imacro at %p\n called from ", pc); pc = fp->imacpc; @@ -6453,19 +6459,15 @@ js_DumpStackFrame(JSStackFrame *fp) fprintf(stderr, "pc = %p\n", pc); fprintf(stderr, " current op: %s\n", js_CodeName[*pc]); } - if (sp && fp->slots()) { - fprintf(stderr, " slots: %p\n", (void *) fp->slots()); - fprintf(stderr, " sp: %p = slots + %u\n", (void *) sp, (unsigned) (sp - fp->slots())); - if (sp - fp->slots() < 10000) { // sanity - for (jsval *p = fp->slots(); p < sp; p++) { - fprintf(stderr, " %p: ", (void *) p); - dumpValue(*p); - fputc('\n', stderr); - } + jsval *sp = i.sp(); + fprintf(stderr, " slots: %p\n", (void *) fp->slots()); + fprintf(stderr, " sp: %p = slots + %u\n", (void *) sp, (unsigned) (sp - fp->slots())); + if (sp - fp->slots() < 10000) { // sanity + for (jsval *p = fp->slots(); p < sp; p++) { + fprintf(stderr, " %p: ", (void *) p); + dumpValue(*p); + fputc('\n', stderr); } - } else { - fprintf(stderr, " sp: %p\n", (void *) sp); - fprintf(stderr, " slots: %p\n", (void *) fp->slots()); } fprintf(stderr, " argv: %p (argc: %u)\n", (void *) fp->argv, (unsigned) fp->argc); MaybeDumpObject("callobj", fp->callobj); diff --git a/js/src/jsobj.h b/js/src/jsobj.h index 0f394214a8f9..07aaa8fe2b34 100644 --- a/js/src/jsobj.h +++ b/js/src/jsobj.h @@ -1294,7 +1294,7 @@ JS_FRIEND_API(void) js_DumpAtom(JSAtom *atom); JS_FRIEND_API(void) js_DumpValue(jsval val); JS_FRIEND_API(void) js_DumpId(jsid id); JS_FRIEND_API(void) js_DumpObject(JSObject *obj); -JS_FRIEND_API(void) js_DumpStackFrame(JSStackFrame *fp); +JS_FRIEND_API(void) js_DumpStackFrameChain(JSContext *cx, JSStackFrame *start = NULL); #endif extern uintN diff --git a/js/src/jsopcode.cpp b/js/src/jsopcode.cpp index 5e83c66d6e88..eab813e670b3 100644 --- a/js/src/jsopcode.cpp +++ b/js/src/jsopcode.cpp @@ -1993,7 +1993,7 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop) */ fp = js_GetScriptedCaller(cx, NULL); format = cs->format; - if (((fp && fp->regs && pc == fp->regs->pc) || + if (((fp && pc == fp->pc(cx)) || (pc == startpc && nuses != 0)) && format & (JOF_SET|JOF_DEL|JOF_INCDEC|JOF_FOR|JOF_VARPROP)) { mode = JOF_MODE(format); @@ -5102,26 +5102,25 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v, JSStackFrame *fp; jsbytecode *pc; JSScript *script; - JSFrameRegs *regs; - intN pcdepth; - jsval *sp, *stackBase; - char *name; JS_ASSERT(spindex < 0 || spindex == JSDVG_IGNORE_STACK || spindex == JSDVG_SEARCH_STACK); - fp = js_GetScriptedCaller(cx, NULL); - if (!fp || !fp->regs || !fp->regs->sp) + LeaveTrace(cx); + + /* Get scripted caller */ + FrameRegsIter i(cx); + while (!i.done() && !i.fp()->script) + ++i; + + if (i.done() || !i.pc()) goto do_fallback; + fp = i.fp(); script = fp->script; - regs = fp->regs; - pc = fp->imacpc ? fp->imacpc : regs->pc; - if (pc < script->main || script->code + script->length <= pc) { - JS_NOT_REACHED("bug"); - goto do_fallback; - } + pc = fp->imacpc ? fp->imacpc : i.pc(); + JS_ASSERT(pc >= script->main && pc < script->code + script->length); if (spindex != JSDVG_IGNORE_STACK) { jsbytecode **pcstack; @@ -5134,7 +5133,7 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v, cx->malloc(StackDepth(script) * sizeof *pcstack); if (!pcstack) return NULL; - pcdepth = ReconstructPCStack(cx, script, pc, pcstack); + intN pcdepth = ReconstructPCStack(cx, script, pc, pcstack); if (pcdepth < 0) goto release_pcstack; @@ -5150,8 +5149,8 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v, * calculated value matching v under assumption that it is * it that caused exception, see bug 328664. */ - stackBase = StackBase(fp); - sp = regs->sp; + jsval *stackBase = StackBase(fp); + jsval *sp = i.sp(); do { if (sp == stackBase) { pcdepth = -1; @@ -5178,10 +5177,13 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v, } { - jsbytecode* savepc = regs->pc; + jsbytecode* savepc = i.pc(); jsbytecode* imacpc = fp->imacpc; if (imacpc) { - regs->pc = imacpc; + if (fp == cx->fp) + cx->regs->pc = imacpc; + else + fp->savedPC = imacpc; fp->imacpc = NULL; } @@ -5189,18 +5191,23 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v, * FIXME: bug 489843. Stack reconstruction may have returned a pc * value *inside* an imacro; this would confuse the decompiler. */ + char *name; if (imacpc && size_t(pc - script->code) >= script->length) name = FAILED_EXPRESSION_DECOMPILER; else name = DecompileExpression(cx, script, fp->fun, pc); if (imacpc) { - regs->pc = savepc; + if (fp == cx->fp) + cx->regs->pc = imacpc; + else + fp->savedPC = savepc; fp->imacpc = imacpc; } + + if (name != FAILED_EXPRESSION_DECOMPILER) + return name; } - if (name != FAILED_EXPRESSION_DECOMPILER) - return name; do_fallback: if (!fallback) { diff --git a/js/src/jsops.cpp b/js/src/jsops.cpp index 62a045c995e1..18660e434a7b 100644 --- a/js/src/jsops.cpp +++ b/js/src/jsops.cpp @@ -224,7 +224,6 @@ BEGIN_CASE(JSOP_STOP) { JS_ASSERT(!fp->blockChain); JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChain, 0)); - JS_ASSERT(fp->down->regs == &fp->callerRegs); if (JS_LIKELY(script->staticLevel < JS_DISPLAY_SIZE)) cx->display[script->staticLevel] = fp->displaySave; @@ -277,15 +276,12 @@ BEGIN_CASE(JSOP_STOP) JSStackFrame *down = fp->down; bool recursive = fp->script == down->script; - /* Restore caller's registers. */ - regs = fp->callerRegs; - regs.sp -= 1 + (size_t) fp->argc; - regs.sp[-1] = fp->rval; - down->regs = ®s; - - /* Pop |fp| from the context. */ + /* Pop the frame. */ cx->stack().popInlineFrame(cx, fp, down); + /* Propagate return value before fp is lost. */ + regs.sp[-1] = fp->rval; + /* Sync interpreter registers. */ fp = cx->fp; script = fp->script; @@ -2071,7 +2067,6 @@ BEGIN_CASE(JSOP_APPLY) } JS_ASSERT(!JSFUN_BOUND_METHOD_TEST(fun->flags)); newfp->thisv = vp[1]; - newfp->regs = NULL; newfp->imacpc = NULL; /* Push void to initialize local variables. */ @@ -2091,16 +2086,15 @@ BEGIN_CASE(JSOP_APPLY) js_SetVersion(cx, currentVersion); } - /* Push the frame and set interpreter registers. */ - newfp->callerRegs = regs; - fp->regs = &newfp->callerRegs; - regs.sp = newsp; + /* Push the frame. */ + stack.pushInlineFrame(cx, fp, regs.pc, newfp); + + /* Initializer regs after pushInlineFrame snapshots pc. */ regs.pc = newscript->code; - newfp->regs = ®s; - stack.pushInlineFrame(cx, fp, newfp); - JS_ASSERT(newfp == cx->fp); + regs.sp = newsp; /* Import into locals. */ + JS_ASSERT(newfp == cx->fp); fp = newfp; script = newscript; atoms = script->atomMap.vector; @@ -2132,8 +2126,8 @@ BEGIN_CASE(JSOP_APPLY) goto error; } } else if (fp->script == fp->down->script && - *fp->down->regs->pc == JSOP_CALL && - *fp->regs->pc == JSOP_TRACE) { + *fp->down->savedPC == JSOP_CALL && + *regs.pc == JSOP_TRACE) { MONITOR_BRANCH(Record_EnterFrame); } #endif @@ -3195,7 +3189,7 @@ END_CASE(JSOP_HOLE) BEGIN_CASE(JSOP_NEWARRAY) len = GET_UINT16(regs.pc); - cx->fp->assertValidStackDepth(len); + cx->assertValidStackDepth(len); obj = js_NewArrayObject(cx, len, regs.sp - len, JS_TRUE); if (!obj) goto error; diff --git a/js/src/jspropertycache.cpp b/js/src/jspropertycache.cpp index 33bfc22ada13..50576b850167 100644 --- a/js/src/jspropertycache.cpp +++ b/js/src/jspropertycache.cpp @@ -127,7 +127,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI * Optimize the cached vword based on our parameters and the current pc's * opcode format flags. */ - pc = cx->fp->regs->pc; + pc = cx->regs->pc; op = js_GetOpcode(cx, cx->fp->script, pc); cs = &js_CodeSpec[op]; kshape = 0; diff --git a/js/src/jsrecursion.cpp b/js/src/jsrecursion.cpp index b9ad686fc17e..b1731aec5886 100644 --- a/js/src/jsrecursion.cpp +++ b/js/src/jsrecursion.cpp @@ -163,8 +163,8 @@ TraceRecorder::downSnapshot(FrameInfo* downFrame) exitTypeMap[i] = typeMap[i]; /* Add the return type. */ - JS_ASSERT_IF(*cx->fp->regs->pc != JSOP_RETURN, *cx->fp->regs->pc == JSOP_STOP); - if (*cx->fp->regs->pc == JSOP_RETURN) + JS_ASSERT_IF(*cx->regs->pc != JSOP_RETURN, *cx->regs->pc == JSOP_STOP); + if (*cx->regs->pc == JSOP_RETURN) exitTypeMap[downPostSlots] = determineSlotType(&stackval(-1)); else exitTypeMap[downPostSlots] = TT_VOID; @@ -198,12 +198,21 @@ TraceRecorder::downSnapshot(FrameInfo* downFrame) return exit; } +static JS_REQUIRES_STACK jsval * +DownFrameSP(JSContext *cx) +{ + FrameRegsIter i(cx); + ++i; + JS_ASSERT(i.fp() == cx->fp->down); + return i.sp(); +} + JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::upRecursion() { - JS_ASSERT((JSOp)*cx->fp->down->regs->pc == JSOP_CALL); + JS_ASSERT((JSOp)*cx->fp->down->savedPC == JSOP_CALL); JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, cx->fp->down->script, - cx->fp->down->regs->pc)].length == JSOP_CALL_LENGTH); + cx->fp->down->savedPC)].length == JSOP_CALL_LENGTH); JS_ASSERT(callDepth == 0); @@ -215,10 +224,10 @@ TraceRecorder::upRecursion() if (anchor && (anchor->exitType == RECURSIVE_EMPTY_RP_EXIT || anchor->exitType == RECURSIVE_SLURP_MISMATCH_EXIT || anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT)) { - return slurpDownFrames(cx->fp->down->regs->pc); + return slurpDownFrames(cx->fp->down->savedPC); } - jsbytecode* return_pc = cx->fp->down->regs->pc; + jsbytecode* return_pc = cx->fp->down->savedPC; jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH; /* @@ -245,7 +254,7 @@ TraceRecorder::upRecursion() * Need to compute this from the down frame, since the stack could have * moved on this one. */ - fi->spdist = cx->fp->down->regs->sp - cx->fp->down->slots(); + fi->spdist = DownFrameSP(cx) - cx->fp->down->slots(); JS_ASSERT(cx->fp->argc == cx->fp->down->argc); fi->set_argc(uint16(cx->fp->argc), false); fi->callerHeight = downPostSlots; @@ -308,7 +317,7 @@ TraceRecorder::upRecursion() exit = downSnapshot(fi); LIns* rval_ins; - if (*cx->fp->regs->pc == JSOP_RETURN) { + if (*cx->regs->pc == JSOP_RETURN) { JS_ASSERT(!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT); rval_ins = get(&stackval(-1)); JS_ASSERT(rval_ins); @@ -318,7 +327,7 @@ TraceRecorder::upRecursion() TraceType returnType = exit->stackTypeMap()[downPostSlots]; if (returnType == TT_INT32) { - JS_ASSERT(*cx->fp->regs->pc == JSOP_RETURN); + JS_ASSERT(*cx->regs->pc == JSOP_RETURN); JS_ASSERT(determineSlotType(&stackval(-1)) == TT_INT32); JS_ASSERT(isPromoteInt(rval_ins)); rval_ins = demote(lir, rval_ins); @@ -327,7 +336,7 @@ TraceRecorder::upRecursion() UpRecursiveSlotMap slotMap(*this, downPostSlots, rval_ins); for (unsigned i = 0; i < downPostSlots; i++) slotMap.addSlot(exit->stackType(i)); - if (*cx->fp->regs->pc == JSOP_RETURN) + if (*cx->regs->pc == JSOP_RETURN) slotMap.addSlot(&stackval(-1)); else slotMap.addSlot(TT_VOID); @@ -386,7 +395,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) unsigned frameDepth; unsigned downPostSlots; - JSStackFrame* fp = cx->fp; + FrameRegsIter i(cx); LIns* fp_ins = addName(lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp), ACC_OTHER), "fp"); @@ -399,7 +408,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) { fp_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, down), ACC_OTHER), "downFp"); - fp = fp->down; + ++i; argv_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argv), ACC_OTHER), "argv"); @@ -427,14 +436,12 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) RECURSIVE_LOOP_EXIT); } - /* fp->down->regs->pc should be == pc. */ + /* fp->down->savedPC should be == pc. */ guard(true, lir->ins2(LIR_eqp, - lir->insLoad(LIR_ldp, - addName(lir->insLoad(LIR_ldp, fp_ins, - offsetof(JSStackFrame, regs), ACC_OTHER), - "regs"), - offsetof(JSFrameRegs, pc), ACC_OTHER), + addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, savedPC), + ACC_OTHER), + "savedPC"), INS_CONSTPTR(return_pc)), RECURSIVE_SLURP_MISMATCH_EXIT); @@ -489,13 +496,13 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) * thrown away. */ TraceType* typeMap = exit->stackTypeMap(); - jsbytecode* oldpc = cx->fp->regs->pc; - cx->fp->regs->pc = exit->pc; + jsbytecode* oldpc = cx->regs->pc; + cx->regs->pc = exit->pc; captureStackTypes(frameDepth, typeMap); - cx->fp->regs->pc = oldpc; + cx->regs->pc = oldpc; if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) { - JS_ASSERT_IF(*cx->fp->regs->pc != JSOP_RETURN, *cx->fp->regs->pc == JSOP_STOP); - if (*cx->fp->regs->pc == JSOP_RETURN) + JS_ASSERT_IF(*cx->regs->pc != JSOP_RETURN, *cx->regs->pc == JSOP_STOP); + if (*cx->regs->pc == JSOP_RETURN) typeMap[downPostSlots] = determineSlotType(&stackval(-1)); else typeMap[downPostSlots] = TT_VOID; @@ -522,10 +529,10 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) { /* - * It is safe to read cx->fp->regs->pc here because the frame hasn't + * It is safe to read cx->regs->pc here because the frame hasn't * been popped yet. We're guaranteed to have a return or stop. */ - JSOp op = JSOp(*cx->fp->regs->pc); + JSOp op = JSOp(*cx->regs->pc); JS_ASSERT(op == JSOP_RETURN || op == JSOP_STOP); if (op == JSOP_RETURN) { @@ -575,6 +582,8 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) info.slurpFailSlot = (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT) ? anchor->slurpFailSlot : 0; + JSStackFrame *const fp = i.fp(); + /* callee */ slurpSlot(lir->insLoad(LIR_ldp, argv_ins, -2 * ptrdiff_t(sizeof(jsval)), ACC_OTHER), &fp->argv[-2], @@ -612,7 +621,8 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) slots_ins, INS_CONSTWORD(nfixed * sizeof(jsval))), "stackBase"); - size_t limit = size_t(fp->regs->sp - StackBase(fp)); + + size_t limit = size_t(i.sp() - StackBase(fp)); if (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT) limit--; else @@ -634,7 +644,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc) RecursiveSlotMap slotMap(*this, downPostSlots, rval_ins); for (unsigned i = 0; i < downPostSlots; i++) slotMap.addSlot(typeMap[i]); - if (*cx->fp->regs->pc == JSOP_RETURN) + if (*cx->regs->pc == JSOP_RETURN) slotMap.addSlot(&stackval(-1), typeMap[downPostSlots]); else slotMap.addSlot(TT_VOID); diff --git a/js/src/jsregexp.cpp b/js/src/jsregexp.cpp index bd6acd4b3581..6bb9de2a5f02 100644 --- a/js/src/jsregexp.cpp +++ b/js/src/jsregexp.cpp @@ -4783,7 +4783,7 @@ MatchRegExp(REGlobalData *gData, REMatchState *x) "entering REGEXP trace at %s:%u@%u, code: %p\n", caller ? caller->script->filename : "", caller ? js_FramePCToLineNumber(gData->cx, caller) : 0, - caller ? FramePCOffset(caller) : 0, + caller ? FramePCOffset(gData->cx, caller) : 0, JS_FUNC_TO_DATA_PTR(void *, native)); }) #endif diff --git a/js/src/jsscript.cpp b/js/src/jsscript.cpp index 487401c0a30f..e295eee0c7e3 100644 --- a/js/src/jsscript.cpp +++ b/js/src/jsscript.cpp @@ -1312,7 +1312,7 @@ js_GetSrcNoteCached(JSContext *cx, JSScript *script, jsbytecode *pc) uintN js_FramePCToLineNumber(JSContext *cx, JSStackFrame *fp) { - return js_PCToLineNumber(cx, fp->script, fp->imacpc ? fp->imacpc : fp->regs->pc); + return js_PCToLineNumber(cx, fp->script, fp->imacpc ? fp->imacpc : fp->pc(cx)); } uintN diff --git a/js/src/jstracer.cpp b/js/src/jstracer.cpp index 79b1432d1d09..b21cf1d59990 100644 --- a/js/src/jstracer.cpp +++ b/js/src/jstracer.cpp @@ -1174,7 +1174,7 @@ Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc) JS_REQUIRES_STACK void Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot) { - markStackSlotUndemotable(cx, slot, cx->fp->regs->pc); + markStackSlotUndemotable(cx, slot, cx->regs->pc); } /* Consult with the oracle whether we shouldn't demote a certain slot. */ @@ -1191,7 +1191,7 @@ Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc) con JS_REQUIRES_STACK bool Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const { - return isStackSlotUndemotable(cx, slot, cx->fp->regs->pc); + return isStackSlotUndemotable(cx, slot, cx->regs->pc); } /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */ @@ -1463,12 +1463,12 @@ TreeFragment::initialize(JSContext* cx, SlotList *globalSlots) /* Capture the coerced type of each active slot in the type map. */ this->typeMap.captureTypes(cx, globalObj, *globalSlots, 0 /* callDepth */); this->nStackTypes = this->typeMap.length() - globalSlots->length(); - this->spOffsetAtEntry = cx->fp->regs->sp - StackBase(cx->fp); + this->spOffsetAtEntry = cx->regs->sp - StackBase(cx->fp); #ifdef DEBUG this->treeFileName = cx->fp->script->filename; this->treeLineNumber = js_FramePCToLineNumber(cx, cx->fp); - this->treePCOffset = FramePCOffset(cx->fp); + this->treePCOffset = FramePCOffset(cx, cx->fp); #endif this->script = cx->fp->script; this->recursion = Recursion_None; @@ -1478,7 +1478,7 @@ TreeFragment::initialize(JSContext* cx, SlotList *globalSlots) this->sideExits.clear(); /* Determine the native frame layout at the entry point. */ - this->nativeStackBase = (nStackTypes - (cx->fp->regs->sp - StackBase(cx->fp))) * + this->nativeStackBase = (nStackTypes - (cx->regs->sp - StackBase(cx->fp))) * sizeof(double); this->maxNativeStackSlots = nStackTypes; this->maxCallDepth = 0; @@ -1733,10 +1733,13 @@ public: */ template static JS_REQUIRES_STACK bool -VisitFrameSlots(Visitor &visitor, unsigned depth, JSStackFrame *fp, - JSStackFrame *up) +VisitFrameSlots(Visitor &visitor, JSContext *cx, unsigned depth, + FrameRegsIter &i, JSStackFrame *up) { - if (depth > 0 && !VisitFrameSlots(visitor, depth-1, fp->down, fp)) + JSStackFrame *const fp = i.fp(); + jsval *const sp = i.sp(); + + if (depth > 0 && !VisitFrameSlots(visitor, cx, depth-1, ++i, fp)) return false; if (fp->argv) { @@ -1758,18 +1761,17 @@ VisitFrameSlots(Visitor &visitor, unsigned depth, JSStackFrame *fp, if (!visitor.visitStackSlots(fp->slots(), fp->script->nfixed, fp)) return false; } + visitor.setStackSlotKind("stack"); - JS_ASSERT(fp->regs->sp >= StackBase(fp)); - if (!visitor.visitStackSlots(StackBase(fp), - size_t(fp->regs->sp - StackBase(fp)), - fp)) { + jsval *base = StackBase(fp); + JS_ASSERT(sp >= base && sp <= fp->slots() + fp->script->nslots); + if (!visitor.visitStackSlots(base, size_t(sp - base), fp)) return false; - } if (up) { int missing = up->fun->nargs - up->argc; if (missing > 0) { visitor.setStackSlotKind("missing"); - if (!visitor.visitStackSlots(fp->regs->sp, size_t(missing), fp)) + if (!visitor.visitStackSlots(sp, size_t(missing), fp)) return false; } } @@ -1784,7 +1786,8 @@ template static JS_REQUIRES_STACK JS_ALWAYS_INLINE bool VisitStackSlots(Visitor &visitor, JSContext *cx, unsigned callDepth) { - return VisitFrameSlots(visitor, callDepth, cx->fp, NULL); + FrameRegsIter i(cx); + return VisitFrameSlots(visitor, cx, callDepth, i, NULL); } template @@ -1913,16 +1916,16 @@ public: JS_REQUIRES_STACK unsigned NativeStackSlots(JSContext *cx, unsigned callDepth) { - JSStackFrame* fp = cx->fp; + FrameRegsIter i(cx); unsigned slots = 0; unsigned depth = callDepth; - for (;;) { + for (;; ++i) { /* * Duplicate native stack layout computation: see VisitFrameSlots * header comment. */ - unsigned operands = fp->regs->sp - StackBase(fp); - slots += operands; + JSStackFrame *const fp = i.fp(); + slots += i.sp() - StackBase(fp); if (fp->argv) slots += fp->script->nfixed + SPECIAL_FRAME_SLOTS; if (depth-- == 0) { @@ -1935,9 +1938,7 @@ NativeStackSlots(JSContext *cx, unsigned callDepth) #endif return slots; } - JSStackFrame* fp2 = fp; - fp = fp->down; - int missing = fp2->fun->nargs - fp2->argc; + int missing = fp->fun->nargs - fp->argc; if (missing > 0) slots += missing; } @@ -2170,7 +2171,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* frag { JS_ASSERT(globalObj == cx->fp->scopeChain->getGlobal()); JS_ASSERT(globalObj->scope()->hasOwnShape()); - JS_ASSERT(cx->fp->regs->pc == (jsbytecode*)fragment->ip); + JS_ASSERT(cx->regs->pc == (jsbytecode*)fragment->ip); fragment->lirbuf = lirbuf; #ifdef DEBUG @@ -2393,7 +2394,7 @@ TraceRecorder::finishAbort(const char* reason) tree->treePCOffset, cx->fp->script->filename, js_FramePCToLineNumber(cx, cx->fp), - FramePCOffset(cx->fp), + FramePCOffset(cx, cx->fp), reason); #endif Backoff(cx, (jsbytecode*) fragment->root->ip, fragment->root); @@ -2530,7 +2531,7 @@ TraceRecorder::nativeStackOffset(jsval* p) const */ if (!visitor.stopped()) { JS_ASSERT(size_t(p - cx->fp->slots()) < cx->fp->script->nslots); - offset += size_t(p - cx->fp->regs->sp) * sizeof(double); + offset += size_t(p - cx->regs->sp) * sizeof(double); } return offset; } @@ -3491,7 +3492,7 @@ TraceRecorder::import(TreeFragment* tree, LIns* sp, unsigned stackSlots, unsigne JS_REQUIRES_STACK bool TraceRecorder::isValidSlot(JSScope* scope, JSScopeProperty* sprop) { - uint32 setflags = (js_CodeSpec[*cx->fp->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR)); + uint32 setflags = (js_CodeSpec[*cx->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR)); if (setflags) { if (!sprop->hasDefaultSetter()) @@ -3901,8 +3902,8 @@ TreevisLogExit(JSContext* cx, VMSideExit* exit) { debug_only_printf(LC_TMTreeVis, "TREEVIS ADDEXIT EXIT=%p TYPE=%s FRAG=%p PC=%p FILE=\"%s\"" " LINE=%d OFFS=%d", (void*)exit, getExitName(exit->exitType), - (void*)exit->from, (void*)cx->fp->regs->pc, cx->fp->script->filename, - js_FramePCToLineNumber(cx, cx->fp), FramePCOffset(cx->fp)); + (void*)exit->from, (void*)cx->regs->pc, cx->fp->script->filename, + js_FramePCToLineNumber(cx, cx->fp), FramePCOffset(cx, cx->fp)); debug_only_print0(LC_TMTreeVis, " STACK=\""); for (unsigned i = 0; i < exit->numStackSlots; i++) debug_only_printf(LC_TMTreeVis, "%c", typeChar[exit->stackTypeMap()[i]]); @@ -3916,8 +3917,8 @@ TreevisLogExit(JSContext* cx, VMSideExit* exit) JS_REQUIRES_STACK VMSideExit* TraceRecorder::snapshot(ExitType exitType) { - JSStackFrame* fp = cx->fp; - JSFrameRegs* regs = fp->regs; + JSStackFrame* const fp = cx->fp; + JSFrameRegs* const regs = cx->regs; jsbytecode* pc = regs->pc; /* @@ -3984,7 +3985,7 @@ TraceRecorder::snapshot(ExitType exitType) if (pendingUnboxSlot || (pendingSpecializedNative && (pendingSpecializedNative->flags & JSTN_UNBOX_AFTER))) { unsigned pos = stackSlots - 1; - if (pendingUnboxSlot == cx->fp->regs->sp - 2) + if (pendingUnboxSlot == cx->regs->sp - 2) pos = stackSlots - 2; typemap[pos] = TT_JSVAL; } @@ -4601,8 +4602,8 @@ TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit) * to be in an imacro here and the opcode should be either JSOP_TRACE or, in * case this loop was blacklisted in the meantime, JSOP_NOP. */ - JS_ASSERT((*cx->fp->regs->pc == JSOP_TRACE || *cx->fp->regs->pc == JSOP_NOP || - *cx->fp->regs->pc == JSOP_RETURN || *cx->fp->regs->pc == JSOP_STOP) && + JS_ASSERT((*cx->regs->pc == JSOP_TRACE || *cx->regs->pc == JSOP_NOP || + *cx->regs->pc == JSOP_RETURN || *cx->regs->pc == JSOP_STOP) && !cx->fp->imacpc); if (callDepth != 0) { @@ -4728,7 +4729,7 @@ TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit) "Recording completed at %s:%u@%u via closeLoop (FragID=%06u)\n", cx->fp->script->filename, js_FramePCToLineNumber(cx, cx->fp), - FramePCOffset(cx->fp), + FramePCOffset(cx, cx->fp), fragment->profFragID); debug_only_print0(LC_TMMinimal, "\n"); #endif @@ -4902,7 +4903,7 @@ TraceRecorder::endLoop(VMSideExit* exit) "Recording completed at %s:%u@%u via endLoop (FragID=%06u)\n", cx->fp->script->filename, js_FramePCToLineNumber(cx, cx->fp), - FramePCOffset(cx->fp), + FramePCOffset(cx, cx->fp), fragment->profFragID); debug_only_print0(LC_TMTracer, "\n"); #endif @@ -5212,15 +5213,15 @@ TraceRecorder::checkTraceEnd(jsbytecode *pc) * pointer and pretend we have reached the loop header. */ if (pendingLoop) { - JS_ASSERT(!cx->fp->imacpc && (pc == cx->fp->regs->pc || pc == cx->fp->regs->pc + 1)); - JSFrameRegs orig = *cx->fp->regs; + JS_ASSERT(!cx->fp->imacpc && (pc == cx->regs->pc || pc == cx->regs->pc + 1)); + JSFrameRegs orig = *cx->regs; - cx->fp->regs->pc = (jsbytecode*)tree->ip; - cx->fp->regs->sp = StackBase(cx->fp) + tree->spOffsetAtEntry; + cx->regs->pc = (jsbytecode*)tree->ip; + cx->regs->sp = StackBase(cx->fp) + tree->spOffsetAtEntry; JSContext* localcx = cx; AbortableRecordingStatus ars = closeLoop(); - *localcx->fp->regs = orig; + *localcx->regs = orig; return ars; } @@ -5393,8 +5394,8 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee) jsval* vp = sp - (2 + argc); /* Fixup |fp| using |fi|. */ - fp->regs->sp = sp; - fp->regs->pc = fi.pc; + cx->regs->sp = sp; + cx->regs->pc = fi.pc; fp->imacpc = fi.imacpc; fp->blockChain = fi.block; @@ -5458,18 +5459,12 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee) */ newfp->callerVersion = (JSVersion) fp->script->version; - /* - * Weave regs like JSOP_CALL/JSOP_STOP. |fp->regs| should point to the - * |regs| variable in the innermost js_Interpret activation. - */ - newfp->callerRegs = *fp->regs; - newfp->regs = fp->regs; - fp->regs = &newfp->callerRegs; - newfp->regs->pc = newscript->code; - newfp->regs->sp = newfp->slots() + newscript->nfixed; - /* Push inline frame. (Copied from js_Interpret.) */ - stack.pushInlineFrame(cx, fp, newfp); + stack.pushInlineFrame(cx, fp, fi.pc, newfp); + + /* Initialize regs after pushInlineFrame snapshots pc. */ + cx->regs->pc = newscript->code; + cx->regs->sp = StackBase(newfp); /* * If there's a call hook, invoke it to compute the hookData used by @@ -5514,7 +5509,6 @@ SynthesizeSlowNativeFrame(TracerState& state, JSContext *cx, VMSideExit *exit) JS_ASSERT(fun->u.n.extra == 0); #endif - fp->regs = NULL; fp->imacpc = NULL; fp->callobj = NULL; fp->argsobj = NULL; @@ -5531,7 +5525,12 @@ SynthesizeSlowNativeFrame(TracerState& state, JSContext *cx, VMSideExit *exit) fp->flags = exit->constructing() ? JSFRAME_CONSTRUCTING : 0; fp->displaySave = NULL; - cx->stack().pushSynthesizedSlowNativeFrame(cx, cs, fp); + state.bailedSlowNativeRegs = *cx->regs; + + cx->stack().pushSynthesizedSlowNativeFrame(cx, cs, fp, state.bailedSlowNativeRegs); + + state.bailedSlowNativeRegs.pc = NULL; + state.bailedSlowNativeRegs.sp = fp->slots(); } static JS_REQUIRES_STACK bool @@ -5577,7 +5576,7 @@ RecordTree(JSContext* cx, TreeFragment* peer, jsbytecode* outer, #ifdef JS_JIT_SPEW debug_only_printf(LC_TMTreeVis, "TREEVIS CREATETREE ROOT=%p PC=%p FILE=\"%s\" LINE=%d OFFS=%d", (void*)f, f->ip, f->treeFileName, f->treeLineNumber, - FramePCOffset(cx->fp)); + FramePCOffset(cx, cx->fp)); debug_only_print0(LC_TMTreeVis, " STACK=\""); for (unsigned i = 0; i < f->nStackTypes; i++) debug_only_printf(LC_TMTreeVis, "%c", typeChar[f->typeMap[i]]); @@ -5698,7 +5697,7 @@ AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit, jsb TrashTree(cx, from); return false; } - if (exit->recursive_pc != cx->fp->regs->pc) + if (exit->recursive_pc != cx->regs->pc) return false; from = LookupLoop(tm, exit->recursive_pc, globalObj, globalShape, cx->fp->argc); if (!from) @@ -5725,13 +5724,13 @@ CreateBranchFragment(JSContext* cx, TreeFragment* root, VMSideExit* anchor) ? (++(tm->lastFragID)) : 0; ) - VMFragment* f = new (*tm->dataAlloc) VMFragment(cx->fp->regs->pc verbose_only(, profFragID)); + VMFragment* f = new (*tm->dataAlloc) VMFragment(cx->regs->pc verbose_only(, profFragID)); debug_only_printf(LC_TMTreeVis, "TREEVIS CREATEBRANCH ROOT=%p FRAG=%p PC=%p FILE=\"%s\"" " LINE=%d ANCHOR=%p OFFS=%d\n", - (void*)root, (void*)f, (void*)cx->fp->regs->pc, cx->fp->script->filename, + (void*)root, (void*)f, (void*)cx->regs->pc, cx->fp->script->filename, js_FramePCToLineNumber(cx, cx->fp), (void*)anchor, - FramePCOffset(cx->fp)); + FramePCOffset(cx, cx->fp)); verbose_only( tm->branches = new (*tm->dataAlloc) Seq(f, tm->branches); ) f->root = root; @@ -5782,7 +5781,7 @@ AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, j * might extend along separate paths (i.e. after the loop edge, and after a * return statement). */ - c->ip = cx->fp->regs->pc; + c->ip = cx->regs->pc; JS_ASSERT(c->root == f); } @@ -5874,7 +5873,7 @@ TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCall JS_ASSERT(r->fragment && !r->fragment->lastIns); TreeFragment* root = r->fragment->root; - TreeFragment* first = LookupOrAddLoop(tm, cx->fp->regs->pc, root->globalObj, + TreeFragment* first = LookupOrAddLoop(tm, cx->regs->pc, root->globalObj, root->globalShape, cx->fp->argc); /* @@ -5893,7 +5892,7 @@ TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCall "Looking for type-compatible peer (%s:%d@%d)\n", cx->fp->script->filename, js_FramePCToLineNumber(cx, cx->fp), - FramePCOffset(cx->fp)); + FramePCOffset(cx, cx->fp)); // Find a matching inner tree. If none can be found, compile one. TreeFragment* f = r->findNestedCompatiblePeer(first); @@ -6013,7 +6012,7 @@ TraceRecorder::attemptTreeCall(TreeFragment* f, uintN& inlineCallCount) } case OVERFLOW_EXIT: - oracle->markInstructionUndemotable(cx->fp->regs->pc); + oracle->markInstructionUndemotable(cx->regs->pc); /* FALL THROUGH */ case RECURSIVE_SLURP_FAIL_EXIT: case RECURSIVE_SLURP_MISMATCH_EXIT: @@ -6444,7 +6443,7 @@ ExecuteTree(JSContext* cx, TreeFragment* f, uintN& inlineCallCount, "entering trace at %s:%u@%u, native stack slots: %u code: %p\n", cx->fp->script->filename, js_FramePCToLineNumber(cx, cx->fp), - FramePCOffset(cx->fp), + FramePCOffset(cx, cx->fp), f->maxNativeStackSlots, f->code()); @@ -6561,8 +6560,10 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) * * First, if we just returned from a slow native, pop its stack frame. */ - if (!cx->fp->script) + if (!cx->fp->script) { + JS_ASSERT(cx->regs == &state.bailedSlowNativeRegs); cx->stack().popSynthesizedSlowNativeFrame(cx); + } JS_ASSERT(cx->fp->script); if (!(bs & BUILTIN_ERROR)) { @@ -6578,7 +6579,7 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) * ExecuteTree. We are about to return to the interpreter. Adjust * the top stack frame to resume on the next op. */ - JSFrameRegs* regs = cx->fp->regs; + JSFrameRegs* regs = cx->regs; JSOp op = (JSOp) *regs->pc; JS_ASSERT(op == JSOP_CALL || op == JSOP_APPLY || op == JSOP_NEW || op == JSOP_GETPROP || op == JSOP_GETTHISPROP || op == JSOP_GETARGPROP || @@ -6672,7 +6673,7 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) "synthesized deep frame for %s:%u@%u, slots=%d, fi=%p\n", fp->script->filename, js_FramePCToLineNumber(cx, fp), - FramePCOffset(fp), + FramePCOffset(cx, fp), slots, (void*)*callstack); #endif @@ -6707,7 +6708,7 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) debug_only_printf(LC_TMTracer, "synthesized shallow frame for %s:%u@%u\n", fp->script->filename, js_FramePCToLineNumber(cx, fp), - FramePCOffset(fp)); + FramePCOffset(cx, fp)); #endif } @@ -6718,7 +6719,7 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) * first we recover fp->blockChain, which comes from the side exit * struct. */ - JSStackFrame* fp = cx->fp; + JSStackFrame* const fp = cx->fp; fp->blockChain = innermost->block; @@ -6726,12 +6727,12 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) * If we are not exiting from an inlined frame, the state->sp is spbase. * Otherwise spbase is whatever slots frames around us consume. */ - fp->regs->pc = innermost->pc; + cx->regs->pc = innermost->pc; fp->imacpc = innermost->imacpc; - fp->regs->sp = StackBase(fp) + (innermost->sp_adj / sizeof(double)) - calldepth_slots; + cx->regs->sp = StackBase(fp) + (innermost->sp_adj / sizeof(double)) - calldepth_slots; JS_ASSERT_IF(!fp->imacpc, fp->slots() + fp->script->nfixed + - js_ReconstructStackDepth(cx, fp->script, fp->regs->pc) == fp->regs->sp); + js_ReconstructStackDepth(cx, fp->script, cx->regs->pc) == cx->regs->sp); #ifdef EXECUTE_TREE_TIMER uint64 cycles = rdtsc() - state.startTime; @@ -6744,11 +6745,11 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr) "calldepth=%d, cycles=%llu\n", fp->script->filename, js_FramePCToLineNumber(cx, fp), - FramePCOffset(fp), - js_CodeName[fp->imacpc ? *fp->imacpc : *fp->regs->pc], + FramePCOffset(cx, fp), + js_CodeName[fp->imacpc ? *fp->imacpc : *cx->regs->pc], (void*)lr, getExitName(lr->exitType), - (long long int)(fp->regs->sp - StackBase(fp)), + (long long int)(cx->regs->sp - StackBase(fp)), calldepth, (unsigned long long int)cycles); @@ -6835,7 +6836,7 @@ MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason) /* Is the recorder currently active? */ if (tm->recorder) { - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; if (pc == tm->recorder->tree->ip) { tm->recorder->closeLoop(); } else { @@ -6856,7 +6857,7 @@ MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason) * interpreter and do not attempt to trigger or record a new tree at * this location. */ - if (pc != cx->fp->regs->pc) { + if (pc != cx->regs->pc) { #ifdef MOZ_TRACEVIS tvso.r = R_INNER_SIDE_EXIT; #endif @@ -6875,7 +6876,7 @@ MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason) SlotList* globalSlots = NULL; if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) { - Backoff(cx, cx->fp->regs->pc); + Backoff(cx, cx->regs->pc); return MONITOR_NOT_RECORDING; } @@ -6887,7 +6888,7 @@ MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason) return MONITOR_NOT_RECORDING; } - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; uint32 argc = cx->fp->argc; TreeFragment* f = LookupOrAddLoop(tm, pc, globalObj, globalShape, argc); @@ -6928,7 +6929,7 @@ MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason) debug_only_printf(LC_TMTracer, "Looking for compat peer %d@%d, from %p (ip: %p)\n", js_FramePCToLineNumber(cx, cx->fp), - FramePCOffset(cx->fp), (void*)f, f->ip); + FramePCOffset(cx, cx->fp), (void*)f, f->ip); uintN count; TreeFragment* match = FindVMCompatiblePeer(cx, globalObj, f, count); @@ -6986,7 +6987,7 @@ MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason) return RecordingIfTrue(rv); case OVERFLOW_EXIT: - tm->oracle->markInstructionUndemotable(cx->fp->regs->pc); + tm->oracle->markInstructionUndemotable(cx->regs->pc); /* FALL THROUGH */ case RECURSIVE_SLURP_FAIL_EXIT: case RECURSIVE_SLURP_MISMATCH_EXIT: @@ -7072,9 +7073,9 @@ TraceRecorder::monitorRecording(JSOp op) debug_only_stmt( if (LogController.lcbits & LC_TMRecorder) { - js_Disassemble1(cx, cx->fp->script, cx->fp->regs->pc, + js_Disassemble1(cx, cx->fp->script, cx->regs->pc, cx->fp->imacpc - ? 0 : cx->fp->regs->pc - cx->fp->script->code, + ? 0 : cx->regs->pc - cx->fp->script->code, !cx->fp->imacpc, stdout); } ) @@ -7721,8 +7722,7 @@ TraceRecorder::varval(unsigned n) const JS_REQUIRES_STACK jsval& TraceRecorder::stackval(int n) const { - jsval* sp = cx->fp->regs->sp; - return sp[n]; + return cx->regs->sp[n]; } /* @@ -7793,7 +7793,7 @@ TraceRecorder::scopeChainProp(JSObject* chainHead, jsval*& vp, LIns*& ins, NameR TraceMonitor &localtm = *traceMonitor; - JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)]; + JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)]; JSObject* obj2; JSProperty* prop; JSObject *obj = chainHead; @@ -7865,7 +7865,7 @@ TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, jsval*& vp, { JSScopeProperty *sprop = (JSScopeProperty*) prop; - JSOp op = JSOp(*cx->fp->regs->pc); + JSOp op = JSOp(*cx->regs->pc); uint32 setflags = (js_CodeSpec[op].format & (JOF_SET | JOF_INCDEC | JOF_FOR)); if (setflags && !sprop->writable()) RETURN_STOP("writing to a read-only property"); @@ -8033,7 +8033,7 @@ TraceRecorder::alu(LOpcode v, jsdouble v0, jsdouble v1, LIns* s0, LIns* s1) * integers and the oracle must not give us a negative hint for the * instruction. */ - if (oracle->isInstructionUndemotable(cx->fp->regs->pc) || !isPromoteInt(s0) || !isPromoteInt(s1)) { + if (oracle->isInstructionUndemotable(cx->regs->pc) || !isPromoteInt(s0) || !isPromoteInt(s1)) { out: if (v == LIR_modd) { LIns* args[] = { s1, s0 }; @@ -8300,7 +8300,7 @@ TraceRecorder::callImacroInfallibly(jsbytecode* imacro) { JSStackFrame* fp = cx->fp; JS_ASSERT(!fp->imacpc); - JSFrameRegs* regs = fp->regs; + JSFrameRegs* regs = cx->regs; fp->imacpc = regs->pc; regs->pc = imacro; atoms = COMMON_ATOMS_START(&cx->runtime->atomState); @@ -8339,7 +8339,7 @@ TraceRecorder::ifop() return ARECORD_STOP; } - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; emitIf(pc, cond, x); return checkTraceEnd(pc); } @@ -8364,7 +8364,7 @@ TraceRecorder::tableswitch() if (v_ins->isImmI()) return ARECORD_CONTINUE; - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; /* Starting a new trace after exiting a trace via switch. */ if (anchor && (anchor->exitType == CASE_EXIT || anchor->exitType == DEFAULT_EXIT) && @@ -8467,7 +8467,7 @@ TraceRecorder::inc(jsval v, LIns*& v_ins, jsint incr, bool pre) LIns* v_after; CHECK_STATUS(incHelper(v, v_ins, v_after, incr)); - const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc]; + const JSCodeSpec& cs = js_CodeSpec[*cx->regs->pc]; JS_ASSERT(cs.ndefs == 1); stack(-cs.nuses, pre ? v_after : v_ins); v_ins = v_after; @@ -8724,7 +8724,7 @@ TraceRecorder::equalityHelper(jsval& l, jsval& r, LIns* l_ins, LIns* r_ins, cond = !cond; } - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; /* * Don't guard if the same path is always taken. If it isn't, we have to @@ -8866,7 +8866,7 @@ TraceRecorder::relational(LOpcode op, bool tryBranchAfterCond) } x = lir->ins2(op, l_ins, r_ins); - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; /* * Don't guard if the same path is always taken. If it isn't, we have to @@ -9112,7 +9112,7 @@ TraceRecorder::map_is_native(JSObjectMap* map, LIns* map_ins, LIns*& ops_ins, si JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2, PCVal& pcval) { - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; JS_ASSERT(*pc != JSOP_INITPROP && *pc != JSOP_INITMETHOD && *pc != JSOP_SETNAME && *pc != JSOP_SETPROP && *pc != JSOP_SETMETHOD); @@ -9224,7 +9224,7 @@ TraceRecorder::guardPropertyCacheHit(LIns* obj_ins, if (entry->adding()) RETURN_STOP("adding a property to the global object"); - JSOp op = js_GetOpcode(cx, cx->fp->script, cx->fp->regs->pc); + JSOp op = js_GetOpcode(cx, cx->fp->script, cx->regs->pc); if (JOF_OPMODE(op) != JOF_NAME) { guard(true, addName(lir->ins2(LIR_eqp, obj_ins, INS_CONSTOBJ(globalObj)), "guard_global"), @@ -9729,7 +9729,7 @@ IsTraceableRecursion(JSContext *cx) JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_EnterFrame(uintN& inlineCallCount) { - JSStackFrame* fp = cx->fp; + JSStackFrame* const fp = cx->fp; if (++callDepth >= MAX_CALLDEPTH) RETURN_STOP_A("exceeded maximum call depth"); @@ -9824,7 +9824,7 @@ TraceRecorder::record_EnterFrame(uintN& inlineCallCount) RETURN_STOP_A("recursion started inlining"); } - TreeFragment* first = LookupLoop(&JS_TRACE_MONITOR(cx), fp->regs->pc, tree->globalObj, + TreeFragment* first = LookupLoop(&JS_TRACE_MONITOR(cx), cx->regs->pc, tree->globalObj, tree->globalShape, fp->argc); if (!first) return ARECORD_CONTINUE; @@ -9879,7 +9879,7 @@ TraceRecorder::record_LeaveFrame() ); JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, cx->fp->script, - cx->fp->regs->pc)].length == JSOP_CALL_LENGTH); + cx->regs->pc)].length == JSOP_CALL_LENGTH); if (callDepth-- <= 0) RETURN_STOP_A("returned out of a loop we started tracing"); @@ -9965,7 +9965,7 @@ TraceRecorder::record_JSOP_GOTO() * generate an always-taken loop exit guard. For other downward gotos * (like if/else) continue recording. */ - jssrcnote* sn = js_GetSrcNote(cx->fp->script, cx->fp->regs->pc); + jssrcnote* sn = js_GetSrcNote(cx->fp->script, cx->regs->pc); if (sn && (SN_TYPE(sn) == SRC_BREAK || SN_TYPE(sn) == SRC_CONT2LABEL)) { AUDIT(breakLoopExits); @@ -9977,7 +9977,7 @@ TraceRecorder::record_JSOP_GOTO() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_IFEQ() { - trackCfgMerges(cx->fp->regs->pc); + trackCfgMerges(cx->regs->pc); return ifop(); } @@ -10075,8 +10075,8 @@ TraceRecorder::record_JSOP_SWAP() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_PICK() { - jsval* sp = cx->fp->regs->sp; - jsint n = cx->fp->regs->pc[1]; + jsval* sp = cx->regs->sp; + jsint n = cx->regs->pc[1]; JS_ASSERT(sp - (n+1) >= StackBase(cx->fp)); LIns* top = get(sp - (n+1)); for (jsint i = 0; i < n; ++i) @@ -10265,7 +10265,7 @@ TraceRecorder::record_JSOP_NEG() * a double. Only follow this path if we're not an integer that's 0 and * we're not a double that's zero. */ - if (!oracle->isInstructionUndemotable(cx->fp->regs->pc) && + if (!oracle->isInstructionUndemotable(cx->regs->pc) && isPromoteInt(a) && (!JSVAL_IS_INT(v) || JSVAL_TO_INT(v) != 0) && (!JSVAL_IS_DOUBLE(v) || !JSDOUBLE_IS_NEGZERO(*JSVAL_TO_DOUBLE(v))) && @@ -10624,8 +10624,8 @@ JS_REQUIRES_STACK RecordingStatus TraceRecorder::callSpecializedNative(JSNativeTraceInfo *trcinfo, uintN argc, bool constructing) { - JSStackFrame* fp = cx->fp; - jsbytecode *pc = fp->regs->pc; + JSStackFrame* const fp = cx->fp; + jsbytecode *pc = cx->regs->pc; jsval& fval = stackval(0 - (2 + argc)); jsval& tval = stackval(0 - (1 + argc)); @@ -11011,8 +11011,8 @@ TraceRecorder::functionCall(uintN argc, JSOp mode) JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_NEW() { - uintN argc = GET_ARGC(cx->fp->regs->pc); - cx->fp->assertValidStackDepth(argc + 2); + uintN argc = GET_ARGC(cx->regs->pc); + cx->assertValidStackDepth(argc + 2); return InjectStatus(functionCall(argc, JSOP_NEW)); } @@ -11481,7 +11481,7 @@ TraceRecorder::record_SetPropHit(PropertyCacheEntry* entry, JSScopeProperty* spr jsval& l = stackval(-2); LIns* v_ins; - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; bool isDefinitelyAtom = (*pc == JSOP_SETPROP); CHECK_STATUS_A(setProp(l, entry, sprop, r, v_ins, isDefinitelyAtom)); @@ -11531,7 +11531,7 @@ TraceRecorder::finishGetProp(LIns* obj_ins, LIns* vp_ins, LIns* ok_ins, jsval* o // these slots will be ignored anyway. LIns* result_ins = lir->insLoad(LIR_ldp, vp_ins, 0, ACC_OTHER); set(outp, result_ins); - if (js_CodeSpec[*cx->fp->regs->pc].format & JOF_CALLOP) + if (js_CodeSpec[*cx->regs->pc].format & JOF_CALLOP) set(outp + 1, obj_ins); // We need to guard on ok_ins, but this requires a snapshot of the state @@ -11675,7 +11675,7 @@ TraceRecorder::getPropertyById(LIns* obj_ins, jsval* outp) { // Find the atom. JSAtom* atom; - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; const JSCodeSpec& cs = js_CodeSpec[*pc]; if (*pc == JSOP_LENGTH) { atom = cx->runtime->atomState.lengthAtom; @@ -11755,8 +11755,8 @@ TraceRecorder::getPropertyWithScriptGetter(JSObject *obj, LIns* obj_ins, JSScope // the interpreter state and the tracker in the same way. This adjustment // is noted in imacros.jsasm with .fixup tags. jsval getter = sprop->getterValue(); - jsval*& sp = cx->fp->regs->sp; - switch (*cx->fp->regs->pc) { + jsval*& sp = cx->regs->sp; + switch (*cx->regs->pc) { case JSOP_GETPROP: sp++; sp[-1] = sp[-2]; @@ -11798,7 +11798,7 @@ static bool OkToTraceTypedArrays = false; JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETELEM() { - bool call = *cx->fp->regs->pc == JSOP_CALLELEM; + bool call = *cx->regs->pc == JSOP_CALLELEM; jsval& idx = stackval(-1); jsval& lval = stackval(-2); @@ -12097,7 +12097,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex) if (!JSVAL_IS_PRIMITIVE(idx)) RETURN_STOP_A("non-primitive index"); CHECK_STATUS_A(initOrSetPropertyByName(obj_ins, &idx, &v, - *cx->fp->regs->pc == JSOP_INITELEM)); + *cx->regs->pc == JSOP_INITELEM)); } else if (OkToTraceTypedArrays && js_IsTypedArray(obj)) { // Fast path: assigning to element of typed array. @@ -12212,7 +12212,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex) } } else if (JSVAL_TO_INT(idx) < 0 || !obj->isDenseArray()) { CHECK_STATUS_A(initOrSetPropertyByIndex(obj_ins, idx_ins, &v, - *cx->fp->regs->pc == JSOP_INITELEM)); + *cx->regs->pc == JSOP_INITELEM)); } else { // Fast path: assigning to element of dense array. @@ -12248,7 +12248,7 @@ TraceRecorder::setElem(int lval_spindex, int idx_spindex, int v_spindex) guard(false, lir->insEqI_0(res_ins), MISMATCH_EXIT); } - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP) set(&lval, v_ins); @@ -12398,7 +12398,7 @@ TraceRecorder::stackLoad(LIns* base, AccSet accSet, uint8 type) JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETUPVAR() { - uintN index = GET_UINT16(cx->fp->regs->pc); + uintN index = GET_UINT16(cx->regs->pc); JSScript *script = cx->fp->script; JSUpvarArray* uva = script->upvars(); JS_ASSERT(index < uva->length); @@ -12425,7 +12425,7 @@ TraceRecorder::record_JSOP_GETDSLOT() JSObject* callee = cx->fp->calleeObject(); LIns* callee_ins = get(&cx->fp->argv[-2]); - unsigned index = GET_UINT16(cx->fp->regs->pc); + unsigned index = GET_UINT16(cx->regs->pc); LIns* dslots_ins = lir->insLoad(LIR_ldp, callee_ins, offsetof(JSObject, dslots), ACC_OTHER); LIns* v_ins = lir->insLoad(LIR_ldp, dslots_ins, index * sizeof(jsval), ACC_OTHER); @@ -12548,7 +12548,7 @@ TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, if (JSVAL_TO_OBJECT(fval)->getGlobal() != globalObj) RETURN_STOP("JSOP_CALL or JSOP_NEW crosses global scopes"); - JSStackFrame* fp = cx->fp; + JSStackFrame* const fp = cx->fp; // Generate a type map for the outgoing frame and stash it in the LIR unsigned stackSlots = NativeStackSlots(cx, 0 /* callDepth */); @@ -12565,9 +12565,9 @@ TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, fi->block = fp->blockChain; if (fp->blockChain) tree->gcthings.addUnique(OBJECT_TO_JSVAL(fp->blockChain)); - fi->pc = fp->regs->pc; + fi->pc = cx->regs->pc; fi->imacpc = fp->imacpc; - fi->spdist = fp->regs->sp - fp->slots(); + fi->spdist = cx->regs->sp - fp->slots(); fi->set_argc(uint16(argc), constructing); fi->callerHeight = stackSlots - (2 + argc); fi->callerArgc = fp->argc; @@ -12595,8 +12595,8 @@ TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_CALL() { - uintN argc = GET_ARGC(cx->fp->regs->pc); - cx->fp->assertValidStackDepth(argc + 2); + uintN argc = GET_ARGC(cx->regs->pc); + cx->assertValidStackDepth(argc + 2); return InjectStatus(functionCall(argc, (cx->fp->imacpc && *cx->fp->imacpc == JSOP_APPLY) ? JSOP_APPLY @@ -12630,17 +12630,16 @@ static jsbytecode* call_imacro_table[] = { JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_APPLY() { - JSStackFrame* fp = cx->fp; - jsbytecode *pc = fp->regs->pc; + jsbytecode *pc = cx->regs->pc; uintN argc = GET_ARGC(pc); - cx->fp->assertValidStackDepth(argc + 2); + cx->assertValidStackDepth(argc + 2); - jsval* vp = fp->regs->sp - (argc + 2); + jsval* vp = cx->regs->sp - (argc + 2); jsuint length = 0; JSObject* aobj = NULL; LIns* aobj_ins = NULL; - JS_ASSERT(!fp->imacpc); + JS_ASSERT(!cx->fp->imacpc); if (!VALUE_IS_FUNCTION(cx, vp[0])) return record_JSOP_CALL(); @@ -12717,7 +12716,7 @@ TraceRecorder::record_NativeCallComplete() if (pendingSpecializedNative == IGNORE_NATIVE_CALL_COMPLETE_CALLBACK) return ARECORD_CONTINUE; - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; JS_ASSERT(pendingSpecializedNative); JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEW || *pc == JSOP_SETPROP); @@ -12772,7 +12771,7 @@ TraceRecorder::record_NativeCallComplete() * value is actually currently associated with that location, and that we are talking * about the top of the stack here, which is where we expected boxed values. */ - JS_ASSERT(&v == &cx->fp->regs->sp[-1] && get(&v) == v_ins); + JS_ASSERT(&v == &cx->regs->sp[-1] && get(&v) == v_ins); set(&v, unbox_jsval(v, v_ins, snapshot(BRANCH_EXIT))); } else if (JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_NEG) { /* Already added i2d in functionCall. */ @@ -12925,7 +12924,7 @@ JS_REQUIRES_STACK RecordingStatus TraceRecorder::propTail(JSObject* obj, LIns* obj_ins, JSObject* obj2, PCVal pcval, uint32 *slotp, LIns** v_insp, jsval *outp) { - const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc]; + const JSCodeSpec& cs = js_CodeSpec[*cx->regs->pc]; uint32 setflags = (cs.format & (JOF_INCDEC | JOF_FOR)); JS_ASSERT(!(cs.format & JOF_SET)); @@ -13197,7 +13196,7 @@ TraceRecorder::typedArrayElement(jsval& oval, jsval& ival, jsval*& vp, LIns*& v_ JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::getProp(JSObject* obj, LIns* obj_ins) { - JSOp op = JSOp(*cx->fp->regs->pc); + JSOp op = JSOp(*cx->regs->pc); const JSCodeSpec& cs = js_CodeSpec[op]; JS_ASSERT(cs.ndefs == 1); @@ -13227,7 +13226,7 @@ TraceRecorder::record_JSOP_NAME() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_DOUBLE() { - jsval v = jsval(atoms[GET_INDEX(cx->fp->regs->pc)]); + jsval v = jsval(atoms[GET_INDEX(cx->regs->pc)]); stack(0, lir->insImmD(*JSVAL_TO_DOUBLE(v))); return ARECORD_CONTINUE; } @@ -13235,7 +13234,7 @@ TraceRecorder::record_JSOP_DOUBLE() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_STRING() { - JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)]; + JSAtom* atom = atoms[GET_INDEX(cx->regs->pc)]; JS_ASSERT(ATOM_IS_STRING(atom)); stack(0, INS_ATOM(atom)); return ARECORD_CONTINUE; @@ -13331,9 +13330,9 @@ TraceRecorder::record_JSOP_STRICTNE() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_OBJECT() { - JSStackFrame* fp = cx->fp; + JSStackFrame* const fp = cx->fp; JSScript* script = fp->script; - unsigned index = atoms - script->atomMap.vector + GET_INDEX(fp->regs->pc); + unsigned index = atoms - script->atomMap.vector + GET_INDEX(cx->regs->pc); JSObject* obj; obj = script->getObject(index); @@ -13356,49 +13355,49 @@ TraceRecorder::record_JSOP_TRAP() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETARG() { - stack(0, arg(GET_ARGNO(cx->fp->regs->pc))); + stack(0, arg(GET_ARGNO(cx->regs->pc))); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_SETARG() { - arg(GET_ARGNO(cx->fp->regs->pc), stack(-1)); + arg(GET_ARGNO(cx->regs->pc), stack(-1)); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETLOCAL() { - stack(0, var(GET_SLOTNO(cx->fp->regs->pc))); + stack(0, var(GET_SLOTNO(cx->regs->pc))); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_SETLOCAL() { - var(GET_SLOTNO(cx->fp->regs->pc), stack(-1)); + var(GET_SLOTNO(cx->regs->pc), stack(-1)); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_UINT16() { - stack(0, lir->insImmD(GET_UINT16(cx->fp->regs->pc))); + stack(0, lir->insImmD(GET_UINT16(cx->regs->pc))); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_NEWINIT() { - JSProtoKey key = JSProtoKey(GET_INT8(cx->fp->regs->pc)); + JSProtoKey key = JSProtoKey(GET_INT8(cx->regs->pc)); LIns* proto_ins; CHECK_STATUS_A(getClassPrototype(key, proto_ins)); LIns* args[] = { proto_ins, cx_ins }; const CallInfo *ci = (key == JSProto_Array) ? &js_NewEmptyArray_ci - : (cx->fp->regs->pc[JSOP_NEWINIT_LENGTH] != JSOP_ENDINIT) + : (cx->regs->pc[JSOP_NEWINIT_LENGTH] != JSOP_ENDINIT) ? &js_NonEmptyObject_ci : &js_Object_tn_ci; LIns* v_ins = lir->insCall(ci, args); @@ -13445,49 +13444,49 @@ TraceRecorder::record_JSOP_USESHARP() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_INCARG() { - return InjectStatus(inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1)); + return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), 1)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_INCLOCAL() { - return InjectStatus(inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1)); + return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), 1)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_DECARG() { - return InjectStatus(inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1)); + return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), -1)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_DECLOCAL() { - return InjectStatus(inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1)); + return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), -1)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_ARGINC() { - return InjectStatus(inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1, false)); + return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), 1, false)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_LOCALINC() { - return InjectStatus(inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1, false)); + return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), 1, false)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_ARGDEC() { - return InjectStatus(inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1, false)); + return InjectStatus(inc(argval(GET_ARGNO(cx->regs->pc)), -1, false)); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_LOCALDEC() { - return InjectStatus(inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1, false)); + return InjectStatus(inc(varval(GET_SLOTNO(cx->regs->pc)), -1, false)); } JS_REQUIRES_STACK AbortableRecordingStatus @@ -13522,7 +13521,7 @@ TraceRecorder::record_JSOP_ITER() RETURN_IF_XML_A(v); LIns *obj_ins = get(&v); - jsuint flags = cx->fp->regs->pc[1]; + jsuint flags = cx->regs->pc[1]; enterDeepBailCall(); @@ -13615,7 +13614,7 @@ TraceRecorder::record_JSOP_MOREITER() INS_CONSTWORD(JSVAL_TRUE)); } - jsbytecode* pc = cx->fp->regs->pc; + jsbytecode* pc = cx->regs->pc; if (pc[1] == JSOP_IFNE) { fuseIf(pc + 1, cond, cond_ins); @@ -13748,7 +13747,7 @@ TraceRecorder::record_JSOP_FORARG() { LIns* v_ins; CHECK_STATUS_A(unboxNextValue(v_ins)); - arg(GET_ARGNO(cx->fp->regs->pc), v_ins); + arg(GET_ARGNO(cx->regs->pc), v_ins); return ARECORD_CONTINUE; } @@ -13757,7 +13756,7 @@ TraceRecorder::record_JSOP_FORLOCAL() { LIns* v_ins; CHECK_STATUS_A(unboxNextValue(v_ins)); - var(GET_SLOTNO(cx->fp->regs->pc), v_ins); + var(GET_SLOTNO(cx->regs->pc), v_ins); return ARECORD_CONTINUE; } @@ -13873,7 +13872,7 @@ TraceRecorder::traverseScopeChain(JSObject *obj, LIns *obj_ins, JSObject *target JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_BINDNAME() { - JSStackFrame *fp = cx->fp; + JSStackFrame* const fp = cx->fp; JSObject *obj; if (!fp->fun) { @@ -13923,7 +13922,7 @@ TraceRecorder::record_JSOP_BINDNAME() LIns *obj_ins = stobj_get_parent(get(callee)); // Find the target object. - JSAtom *atom = atoms[GET_INDEX(cx->fp->regs->pc)]; + JSAtom *atom = atoms[GET_INDEX(cx->regs->pc)]; jsid id = ATOM_TO_JSID(atom); JSObject *obj2 = js_FindIdentifierBase(cx, fp->scopeChain, id); if (obj2 != globalObj && obj2->getClass() != &js_CallClass) @@ -14005,7 +14004,7 @@ TraceRecorder::record_JSOP_IN() * The interpreter fuses comparisons and the following branch, so we have * to do that here as well. */ - fuseIf(cx->fp->regs->pc + 1, cond, x); + fuseIf(cx->regs->pc + 1, cond, x); /* * We update the stack after the guard. This is safe since the guard bails @@ -14154,7 +14153,7 @@ TraceRecorder::record_JSOP_DEFVAR() jsatomid TraceRecorder::getFullIndex(ptrdiff_t pcoff) { - jsatomid index = GET_INDEX(cx->fp->regs->pc + pcoff); + jsatomid index = GET_INDEX(cx->regs->pc + pcoff); index += atoms - cx->fp->script->atomMap.vector; return index; } @@ -14179,7 +14178,7 @@ TraceRecorder::record_JSOP_LAMBDA() if (FUN_NULL_CLOSURE(fun)) { if (FUN_OBJECT(fun)->getParent() != globalObj) RETURN_STOP_A("Null closure function object parent must be global object"); - JSOp op2 = JSOp(cx->fp->regs->pc[JSOP_LAMBDA_LENGTH]); + JSOp op2 = JSOp(cx->regs->pc[JSOP_LAMBDA_LENGTH]); if (op2 == JSOP_SETMETHOD) { jsval lval = stackval(-1); @@ -14263,7 +14262,7 @@ TraceRecorder::record_JSOP_CALLEE() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_SETLOCALPOP() { - var(GET_SLOTNO(cx->fp->regs->pc), stack(-1)); + var(GET_SLOTNO(cx->regs->pc), stack(-1)); return ARECORD_CONTINUE; } @@ -14304,9 +14303,9 @@ TraceRecorder::record_JSOP_NOP() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_ARGSUB() { - JSStackFrame* fp = cx->fp; + JSStackFrame* const fp = cx->fp; if (!(fp->fun->flags & JSFUN_HEAVYWEIGHT)) { - uintN slot = GET_ARGNO(fp->regs->pc); + uintN slot = GET_ARGNO(cx->regs->pc); if (slot >= fp->argc) RETURN_STOP_A("can't trace out-of-range arguments"); stack(0, get(&cx->fp->argv[slot])); @@ -14476,7 +14475,7 @@ TraceRecorder::record_JSOP_RETRVAL() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETGVAR() { - jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)]; + jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->regs->pc)]; if (JSVAL_IS_NULL(slotval)) return ARECORD_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here. @@ -14492,7 +14491,7 @@ TraceRecorder::record_JSOP_GETGVAR() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_SETGVAR() { - jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)]; + jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->regs->pc)]; if (JSVAL_IS_NULL(slotval)) return ARECORD_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here. @@ -14508,7 +14507,7 @@ TraceRecorder::record_JSOP_SETGVAR() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_INCGVAR() { - jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)]; + jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->regs->pc)]; if (JSVAL_IS_NULL(slotval)) // We will see JSOP_INCNAME from the interpreter's jump, so no-op here. return ARECORD_CONTINUE; @@ -14524,7 +14523,7 @@ TraceRecorder::record_JSOP_INCGVAR() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_DECGVAR() { - jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)]; + jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->regs->pc)]; if (JSVAL_IS_NULL(slotval)) // We will see JSOP_INCNAME from the interpreter's jump, so no-op here. return ARECORD_CONTINUE; @@ -14540,7 +14539,7 @@ TraceRecorder::record_JSOP_DECGVAR() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GVARINC() { - jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)]; + jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->regs->pc)]; if (JSVAL_IS_NULL(slotval)) // We will see JSOP_INCNAME from the interpreter's jump, so no-op here. return ARECORD_CONTINUE; @@ -14556,7 +14555,7 @@ TraceRecorder::record_JSOP_GVARINC() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GVARDEC() { - jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)]; + jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->regs->pc)]; if (JSVAL_IS_NULL(slotval)) // We will see JSOP_INCNAME from the interpreter's jump, so no-op here. return ARECORD_CONTINUE; @@ -14572,9 +14571,9 @@ TraceRecorder::record_JSOP_GVARDEC() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_REGEXP() { - JSStackFrame* fp = cx->fp; + JSStackFrame* const fp = cx->fp; JSScript* script = fp->script; - unsigned index = atoms - script->atomMap.vector + GET_INDEX(fp->regs->pc); + unsigned index = atoms - script->atomMap.vector + GET_INDEX(cx->regs->pc); LIns* proto_ins; CHECK_STATUS_A(getClassPrototype(JSProto_RegExp, proto_ins)); @@ -14828,14 +14827,14 @@ TraceRecorder::record_JSOP_DELDESC() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_UINT24() { - stack(0, lir->insImmD(GET_UINT24(cx->fp->regs->pc))); + stack(0, lir->insImmD(GET_UINT24(cx->regs->pc))); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_INDEXBASE() { - atoms += GET_INDEXBASE(cx->fp->regs->pc); + atoms += GET_INDEXBASE(cx->regs->pc); return ARECORD_CONTINUE; } @@ -14962,9 +14961,9 @@ TraceRecorder::record_JSOP_YIELD() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_ARRAYPUSH() { - uint32_t slot = GET_UINT16(cx->fp->regs->pc); + uint32_t slot = GET_UINT16(cx->regs->pc); JS_ASSERT(cx->fp->script->nfixed <= slot); - JS_ASSERT(cx->fp->slots() + slot < cx->fp->regs->sp - 1); + JS_ASSERT(cx->fp->slots() + slot < cx->regs->sp - 1); jsval &arrayval = cx->fp->slots()[slot]; JS_ASSERT(JSVAL_IS_OBJECT(arrayval)); JS_ASSERT(JSVAL_TO_OBJECT(arrayval)->isDenseArray()); @@ -14988,7 +14987,7 @@ JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_LEAVEBLOCKEXPR() { LIns* v_ins = stack(-1); - int n = -1 - GET_UINT16(cx->fp->regs->pc); + int n = -1 - GET_UINT16(cx->regs->pc); stack(n, v_ins); return ARECORD_CONTINUE; } @@ -15012,13 +15011,13 @@ TraceRecorder::record_JSOP_GETTHISPROP() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETARGPROP() { - return getProp(argval(GET_ARGNO(cx->fp->regs->pc))); + return getProp(argval(GET_ARGNO(cx->regs->pc))); } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_GETLOCALPROP() { - return getProp(varval(GET_SLOTNO(cx->fp->regs->pc))); + return getProp(varval(GET_SLOTNO(cx->regs->pc))); } JS_REQUIRES_STACK AbortableRecordingStatus @@ -15045,7 +15044,7 @@ TraceRecorder::record_JSOP_INDEXBASE3() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_CALLGVAR() { - jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)]; + jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->regs->pc)]; if (JSVAL_IS_NULL(slotval)) // We will see JSOP_CALLNAME from the interpreter's jump, so no-op here. return ARECORD_CONTINUE; @@ -15064,7 +15063,7 @@ TraceRecorder::record_JSOP_CALLGVAR() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_CALLLOCAL() { - uintN slot = GET_SLOTNO(cx->fp->regs->pc); + uintN slot = GET_SLOTNO(cx->regs->pc); stack(0, var(slot)); stack(1, INS_NULL()); return ARECORD_CONTINUE; @@ -15073,7 +15072,7 @@ TraceRecorder::record_JSOP_CALLLOCAL() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_CALLARG() { - uintN slot = GET_ARGNO(cx->fp->regs->pc); + uintN slot = GET_ARGNO(cx->regs->pc); stack(0, arg(slot)); stack(1, INS_NULL()); return ARECORD_CONTINUE; @@ -15088,14 +15087,14 @@ TraceRecorder::record_JSOP_UNUSED218() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_INT8() { - stack(0, lir->insImmD(GET_INT8(cx->fp->regs->pc))); + stack(0, lir->insImmD(GET_INT8(cx->regs->pc))); return ARECORD_CONTINUE; } JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_INT32() { - stack(0, lir->insImmD(GET_INT32(cx->fp->regs->pc))); + stack(0, lir->insImmD(GET_INT32(cx->regs->pc))); return ARECORD_CONTINUE; } @@ -15167,8 +15166,8 @@ TraceRecorder::record_JSOP_NEWARRAY() LIns *proto_ins; CHECK_STATUS_A(getClassPrototype(JSProto_Array, proto_ins)); - uint32 len = GET_UINT16(cx->fp->regs->pc); - cx->fp->assertValidStackDepth(len); + uint32 len = GET_UINT16(cx->regs->pc); + cx->assertValidStackDepth(len); LIns* args[] = { lir->insImmI(len), proto_ins, cx_ins }; LIns* v_ins = lir->insCall(&js_NewArrayWithSlots_ci, args); @@ -15221,8 +15220,7 @@ TraceRecorder::record_JSOP_OBJTOSTR() JS_REQUIRES_STACK AbortableRecordingStatus TraceRecorder::record_JSOP_CONCATN() { - JSStackFrame *fp = cx->fp; - JSFrameRegs ®s = *fp->regs; + JSFrameRegs regs = *cx->regs; uint32 argc = GET_ARGC(regs.pc); jsval *argBase = regs.sp - argc; diff --git a/js/src/jsxml.cpp b/js/src/jsxml.cpp index b72eba272d24..ac6b589968a9 100644 --- a/js/src/jsxml.cpp +++ b/js/src/jsxml.cpp @@ -1754,7 +1754,6 @@ ParseXMLSource(JSContext *cx, JSString *src) JSXML *xml; const char *filename; uintN lineno; - JSStackFrame *fp; JSOp op; JSParseNode *pn; JSXMLArray nsarray; @@ -1797,13 +1796,16 @@ ParseXMLSource(JSContext *cx, JSString *src) &dstlen); chars [offset + dstlen] = 0; + LeaveTrace(cx); xml = NULL; - for (fp = js_GetTopStackFrame(cx); fp && !fp->regs; fp = fp->down) - JS_ASSERT(!fp->script); + FrameRegsIter i(cx); + for (; !i.done() && !i.pc(); ++i) + JS_ASSERT(!i.fp()->script); filename = NULL; lineno = 1; - if (fp) { - op = (JSOp) *fp->regs->pc; + if (!i.done()) { + JSStackFrame *fp = i.fp(); + op = (JSOp) *i.pc(); if (op == JSOP_TOXML || op == JSOP_TOXMLLIST) { filename = fp->script->filename; lineno = js_FramePCToLineNumber(cx, fp); @@ -7717,7 +7719,8 @@ js_StepXMLListFilter(JSContext *cx, JSBool initialized) JSXML *xml, *list; JSXMLFilter *filter; - sp = js_GetTopStackFrame(cx)->regs->sp; + LeaveTrace(cx); + sp = cx->regs->sp; if (!initialized) { /* * We haven't iterated yet, so initialize the filter based on the diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index 6caade9a24eb..3b712bc77047 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -3023,7 +3023,7 @@ EvalInContext(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, ok = JS_EvaluateUCScript(scx, sobj, src, srclen, fp->script->filename, JS_PCToLineNumber(cx, fp->script, - fp->regs->pc), + fp->pc(cx)), rval); } @@ -3061,13 +3061,13 @@ EvalInFrame(JSContext *cx, uintN argc, jsval *vp) JS_ASSERT(cx->fp); - JSStackFrame *fp = cx->fp; - for (uint32 i = 0; i < upCount; ++i) { - if (!fp->down) + FrameRegsIter fi(cx); + for (uint32 i = 0; i < upCount; ++i, ++fi) { + if (!fi.fp()->down) break; - fp = fp->down; } + JSStackFrame *const fp = fi.fp(); if (!fp->script) { JS_ReportError(cx, "cannot eval in non-script frame"); return JS_FALSE; @@ -3080,7 +3080,7 @@ EvalInFrame(JSContext *cx, uintN argc, jsval *vp) JSBool ok = JS_EvaluateUCInStackFrame(cx, fp, str->chars(), str->length(), fp->script->filename, JS_PCToLineNumber(cx, fp->script, - fp->regs->pc), + fi.pc()), vp); if (saveCurrent)