(unrebased) contiguous patch

This commit is contained in:
Luke Wagner
2010-03-03 17:52:26 -08:00
parent d6c0d01461
commit 41091e16ea
30 changed files with 2167 additions and 1589 deletions

View File

@@ -89,6 +89,7 @@
#include "jsatominlines.h" #include "jsatominlines.h"
#include "jsscopeinlines.h" #include "jsscopeinlines.h"
#include "jscntxtinlines.h"
#if JS_HAS_XML_SUPPORT #if JS_HAS_XML_SUPPORT
#include "jsxml.h" #include "jsxml.h"
@@ -2295,8 +2296,6 @@ JS_GC(JSContext *cx)
LeaveTrace(cx); LeaveTrace(cx);
/* Don't nuke active arenas if executing or compiling. */ /* Don't nuke active arenas if executing or compiling. */
if (cx->stackPool.current == &cx->stackPool.first)
JS_FinishArenaPool(&cx->stackPool);
if (cx->tempPool.current == &cx->tempPool.first) if (cx->tempPool.current == &cx->tempPool.first)
JS_FinishArenaPool(&cx->tempPool); JS_FinishArenaPool(&cx->tempPool);
js_GC(cx, GC_NORMAL); js_GC(cx, GC_NORMAL);
@@ -4956,18 +4955,18 @@ JS_New(JSContext *cx, JSObject *ctor, uintN argc, jsval *argv)
// is not a simple variation of JSOP_CALL. We have to determine what class // is not a simple variation of JSOP_CALL. We have to determine what class
// of object to create, create it, and clamp the return value to an object, // of object to create, create it, and clamp the return value to an object,
// among other details. js_InvokeConstructor does the hard work. // among other details. js_InvokeConstructor does the hard work.
void *mark; InvokeArgsGuard args;
jsval *vp = js_AllocStack(cx, 2 + argc, &mark); if (!cx->stack().pushInvokeArgs(cx, argc, args))
if (!vp)
return NULL; return NULL;
jsval *vp = args.getvp();
vp[0] = OBJECT_TO_JSVAL(ctor); vp[0] = OBJECT_TO_JSVAL(ctor);
vp[1] = JSVAL_NULL; vp[1] = JSVAL_NULL;
memcpy(vp + 2, argv, argc * sizeof(jsval)); memcpy(vp + 2, argv, argc * sizeof(jsval));
JSBool ok = js_InvokeConstructor(cx, argc, JS_TRUE, vp); JSBool ok = js_InvokeConstructor(cx, args, JS_TRUE);
JSObject *obj = ok ? JSVAL_TO_OBJECT(vp[0]) : NULL; JSObject *obj = ok ? JSVAL_TO_OBJECT(vp[0]) : NULL;
js_FreeStack(cx, mark);
LAST_FRAME_CHECKS(cx, ok); LAST_FRAME_CHECKS(cx, ok);
return obj; return obj;
} }

View File

@@ -104,6 +104,7 @@
#include "jsatominlines.h" #include "jsatominlines.h"
#include "jsobjinlines.h" #include "jsobjinlines.h"
#include "jscntxtinlines.h"
using namespace js; using namespace js;
@@ -2000,11 +2001,16 @@ js_MergeSort(void *src, size_t nel, size_t elsize,
return JS_TRUE; return JS_TRUE;
} }
typedef struct CompareArgs { struct CompareArgs
JSContext *context; {
jsval fval; JSContext *context;
jsval *elemroot; /* stack needed for js_Invoke */ jsval fval;
} CompareArgs; InvokeArgsGuard args;
CompareArgs(JSContext *cx, jsval fval)
: context(cx), fval(fval)
{}
};
static JS_REQUIRES_STACK JSBool static JS_REQUIRES_STACK JSBool
sort_compare(void *arg, const void *a, const void *b, int *result) sort_compare(void *arg, const void *a, const void *b, int *result)
@@ -2012,9 +2018,8 @@ sort_compare(void *arg, const void *a, const void *b, int *result)
jsval av = *(const jsval *)a, bv = *(const jsval *)b; jsval av = *(const jsval *)a, bv = *(const jsval *)b;
CompareArgs *ca = (CompareArgs *) arg; CompareArgs *ca = (CompareArgs *) arg;
JSContext *cx = ca->context; JSContext *cx = ca->context;
jsval *invokevp, *sp;
/** /*
* array_sort deals with holes and undefs on its own and they should not * array_sort deals with holes and undefs on its own and they should not
* come here. * come here.
*/ */
@@ -2024,14 +2029,14 @@ sort_compare(void *arg, const void *a, const void *b, int *result)
if (!JS_CHECK_OPERATION_LIMIT(cx)) if (!JS_CHECK_OPERATION_LIMIT(cx))
return JS_FALSE; return JS_FALSE;
invokevp = ca->elemroot; jsval *invokevp = ca->args.getvp();
sp = invokevp; jsval *sp = invokevp;
*sp++ = ca->fval; *sp++ = ca->fval;
*sp++ = JSVAL_NULL; *sp++ = JSVAL_NULL;
*sp++ = av; *sp++ = av;
*sp++ = bv; *sp++ = bv;
if (!js_Invoke(cx, 2, invokevp, 0)) if (!js_Invoke(cx, ca->args, 0))
return JS_FALSE; return JS_FALSE;
jsdouble cmp; jsdouble cmp;
@@ -2279,22 +2284,17 @@ array_sort(JSContext *cx, uintN argc, jsval *vp)
} while (++i != newlen); } while (++i != newlen);
} }
} else { } else {
void *mark;
LeaveTrace(cx); LeaveTrace(cx);
CompareArgs ca; CompareArgs ca(cx, fval);
ca.context = cx; if (!cx->stack().pushInvokeArgs(cx, 2, ca.args))
ca.fval = fval;
ca.elemroot = js_AllocStack(cx, 2 + 2, &mark);
if (!ca.elemroot)
return false; return false;
bool ok = !!js_MergeSort(vec, size_t(newlen), sizeof(jsval),
comparator_stack_cast(sort_compare), if (!js_MergeSort(vec, size_t(newlen), sizeof(jsval),
&ca, mergesort_tmp); comparator_stack_cast(sort_compare),
js_FreeStack(cx, mark); &ca, mergesort_tmp)) {
if (!ok)
return false; return false;
}
} }
/* /*
@@ -3024,15 +3024,8 @@ typedef enum ArrayExtraMode {
static JSBool static JSBool
array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp) array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
{ {
JSObject *obj; JSObject *obj = JS_THIS_OBJECT(cx, vp);
jsuint length, newlen; jsuint length;
jsval *argv, *elemroot, *invokevp, *sp;
JSBool ok, cond, hole;
JSObject *callable, *thisp, *newarr;
jsint start, end, step, i;
void *mark;
obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !js_GetLengthProperty(cx, obj, &length)) if (!obj || !js_GetLengthProperty(cx, obj, &length))
return JS_FALSE; return JS_FALSE;
@@ -3044,8 +3037,8 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
js_ReportMissingArg(cx, vp, 0); js_ReportMissingArg(cx, vp, 0);
return JS_FALSE; return JS_FALSE;
} }
argv = vp + 2; jsval *argv = vp + 2;
callable = js_ValueToCallableObject(cx, &argv[0], JSV2F_SEARCH_STACK); JSObject *callable = js_ValueToCallableObject(cx, &argv[0], JSV2F_SEARCH_STACK);
if (!callable) if (!callable)
return JS_FALSE; return JS_FALSE;
@@ -3053,11 +3046,13 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
* Set our initial return condition, used for zero-length array cases * Set our initial return condition, used for zero-length array cases
* (and pre-size our map return to match our known length, for all cases). * (and pre-size our map return to match our known length, for all cases).
*/ */
jsuint newlen;
JSObject *newarr;
#ifdef __GNUC__ /* quell GCC overwarning */ #ifdef __GNUC__ /* quell GCC overwarning */
newlen = 0; newlen = 0;
newarr = NULL; newarr = NULL;
#endif #endif
start = 0, end = length, step = 1; jsint start = 0, end = length, step = 1;
switch (mode) { switch (mode) {
case REDUCE_RIGHT: case REDUCE_RIGHT:
@@ -3072,6 +3067,7 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
if (argc >= 2) { if (argc >= 2) {
*vp = argv[1]; *vp = argv[1];
} else { } else {
JSBool hole;
do { do {
if (!GetArrayElement(cx, obj, start, &hole, vp)) if (!GetArrayElement(cx, obj, start, &hole, vp))
return JS_FALSE; return JS_FALSE;
@@ -3107,6 +3103,7 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
if (length == 0) if (length == 0)
return JS_TRUE; return JS_TRUE;
JSObject *thisp;
if (argc > 1 && !REDUCE_MODE(mode)) { if (argc > 1 && !REDUCE_MODE(mode)) {
if (!js_ValueToObject(cx, argv[1], &thisp)) if (!js_ValueToObject(cx, argv[1], &thisp))
return JS_FALSE; return JS_FALSE;
@@ -3121,17 +3118,21 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
*/ */
LeaveTrace(cx); LeaveTrace(cx);
argc = 3 + REDUCE_MODE(mode); argc = 3 + REDUCE_MODE(mode);
elemroot = js_AllocStack(cx, 1 + 2 + argc, &mark);
if (!elemroot) InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, argc, args))
return JS_FALSE; return JS_FALSE;
MUST_FLOW_THROUGH("out"); MUST_FLOW_THROUGH("out");
ok = JS_TRUE; JSBool ok = JS_TRUE;
invokevp = elemroot + 1; JSBool cond;
jsval *invokevp = args.getvp();
for (i = start; i != end; i += step) { AutoValueRooter tvr(cx);
for (jsint i = start; i != end; i += step) {
JSBool hole;
ok = JS_CHECK_OPERATION_LIMIT(cx) && ok = JS_CHECK_OPERATION_LIMIT(cx) &&
GetArrayElement(cx, obj, i, &hole, elemroot); GetArrayElement(cx, obj, i, &hole, tvr.addr());
if (!ok) if (!ok)
goto out; goto out;
if (hole) if (hole)
@@ -3139,21 +3140,21 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
/* /*
* Push callable and 'this', then args. We must do this for every * Push callable and 'this', then args. We must do this for every
* iteration around the loop since js_Invoke uses spbase[0] for return * iteration around the loop since js_Invoke uses invokevp[0] for return
* value storage, while some native functions use spbase[1] for local * value storage, while some native functions use invokevp[1] for local
* rooting. * rooting.
*/ */
sp = invokevp; jsval *sp = invokevp;
*sp++ = OBJECT_TO_JSVAL(callable); *sp++ = OBJECT_TO_JSVAL(callable);
*sp++ = OBJECT_TO_JSVAL(thisp); *sp++ = OBJECT_TO_JSVAL(thisp);
if (REDUCE_MODE(mode)) if (REDUCE_MODE(mode))
*sp++ = *vp; *sp++ = *vp;
*sp++ = *elemroot; *sp++ = tvr.value();
*sp++ = INT_TO_JSVAL(i); *sp++ = INT_TO_JSVAL(i);
*sp++ = OBJECT_TO_JSVAL(obj); *sp++ = OBJECT_TO_JSVAL(obj);
/* Do the call. */ /* Do the call. */
ok = js_Invoke(cx, argc, invokevp, 0); ok = js_Invoke(cx, args, 0);
if (!ok) if (!ok)
break; break;
@@ -3179,8 +3180,8 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
case FILTER: case FILTER:
if (!cond) if (!cond)
break; break;
/* The filter passed *elemroot, so push it onto our result. */ /* The filter passed *args.getvp(), so push it onto our result. */
ok = SetArrayElement(cx, newarr, newlen++, *elemroot); ok = SetArrayElement(cx, newarr, newlen++, tvr.value());
if (!ok) if (!ok)
goto out; goto out;
break; break;
@@ -3200,7 +3201,6 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
} }
out: out:
js_FreeStack(cx, mark);
if (ok && mode == FILTER) if (ok && mode == FILTER)
ok = js_SetLengthProperty(cx, newarr, newlen); ok = js_SetLengthProperty(cx, newarr, newlen);
return ok; return ok;

View File

@@ -54,6 +54,7 @@
#include "jsstr.h" #include "jsstr.h"
#include "jsvector.h" #include "jsvector.h"
/* Check pseudo-booleans values. */ /* Check pseudo-booleans values. */
JS_STATIC_ASSERT(!(JSVAL_TRUE & JSVAL_HOLE_FLAG)); JS_STATIC_ASSERT(!(JSVAL_TRUE & JSVAL_HOLE_FLAG));
JS_STATIC_ASSERT(!(JSVAL_FALSE & JSVAL_HOLE_FLAG)); JS_STATIC_ASSERT(!(JSVAL_FALSE & JSVAL_HOLE_FLAG));

View File

@@ -63,6 +63,7 @@
#include "jsatominlines.h" #include "jsatominlines.h"
#include "jsobjinlines.h" #include "jsobjinlines.h"
#include "jsscopeinlines.h" #include "jsscopeinlines.h"
#include "jscntxtinlines.h"
using namespace avmplus; using namespace avmplus;
using namespace nanojit; using namespace nanojit;
@@ -350,36 +351,35 @@ JS_REQUIRES_STACK JSBool FASTCALL
js_PopInterpFrame(JSContext* cx, InterpState* state) js_PopInterpFrame(JSContext* cx, InterpState* state)
{ {
JS_ASSERT(cx->fp && cx->fp->down); JS_ASSERT(cx->fp && cx->fp->down);
JSInlineFrame* ifp = (JSInlineFrame*)cx->fp; JSStackFrame* const fp = cx->fp;
/* /*
* Mirror frame popping code from inline_return in js_Interpret. There are * Mirror frame popping code from inline_return in js_Interpret. There are
* some things we just don't want to handle. In those cases, the trace will * some things we just don't want to handle. In those cases, the trace will
* MISMATCH_EXIT. * MISMATCH_EXIT.
*/ */
if (ifp->hookData) if (fp->hookData)
return JS_FALSE; return JS_FALSE;
if (cx->version != ifp->callerVersion) if (cx->version != fp->callerVersion)
return JS_FALSE; return JS_FALSE;
if (cx->fp->flags & JSFRAME_CONSTRUCTING) if (fp->flags & JSFRAME_CONSTRUCTING)
return JS_FALSE; return JS_FALSE;
if (cx->fp->imacpc) if (fp->imacpc)
return JS_FALSE; return JS_FALSE;
if (cx->fp->blockChain) if (fp->blockChain)
return JS_FALSE; return JS_FALSE;
cx->fp->putActivationObjects(cx); fp->putActivationObjects(cx);
/* Update display table. */ /* Update display table. */
if (cx->fp->script->staticLevel < JS_DISPLAY_SIZE) if (fp->script->staticLevel < JS_DISPLAY_SIZE)
cx->display[cx->fp->script->staticLevel] = cx->fp->displaySave; cx->display[fp->script->staticLevel] = fp->displaySave;
/* Pop the frame and its memory. */ /* Pop the frame and its memory. */
cx->fp = cx->fp->down; JSStackFrame *down = fp->down;
JS_ASSERT(cx->fp->regs == &ifp->callerRegs); cx->stack().popInlineFrame(cx, fp, down);
cx->fp->regs = ifp->frame.regs; JS_ASSERT(cx->fp == down && cx->fp->regs == &fp->callerRegs);
down->regs = fp->regs;
JS_ARENA_RELEASE(&cx->stackPool, ifp->mark);
/* Update the inline call count. */ /* Update the inline call count. */
*state->inlineCallCountp = *state->inlineCallCountp - 1; *state->inlineCallCountp = *state->inlineCallCountp - 1;

View File

@@ -70,6 +70,23 @@
#include "jsstaticcheck.h" #include "jsstaticcheck.h"
#include "jsstr.h" #include "jsstr.h"
#include "jstracer.h" #include "jstracer.h"
#include "jsiter.h"
#include "jscntxtinlines.h"
#ifdef XP_WIN
# include <windows.h>
#else
# include <unistd.h>
# include <sys/mman.h>
# if !defined(MAP_ANONYMOUS)
# if defined(MAP_ANON)
# define MAP_ANONYMOUS MAP_ANON
# else
# define MAP_ANONYMOUS 0
# endif
# endif
#endif
using namespace js; using namespace js;
@@ -80,9 +97,10 @@ static void
MarkLocalRoots(JSTracer *trc, JSLocalRootStack *lrs); MarkLocalRoots(JSTracer *trc, JSLocalRootStack *lrs);
#ifdef DEBUG #ifdef DEBUG
bool JS_REQUIRES_STACK bool
CallStack::contains(JSStackFrame *fp) CallStack::contains(const JSStackFrame *fp) const
{ {
JS_ASSERT(inContext());
JSStackFrame *start; JSStackFrame *start;
JSStackFrame *stop; JSStackFrame *stop;
if (isSuspended()) { if (isSuspended()) {
@@ -100,6 +118,294 @@ CallStack::contains(JSStackFrame *fp)
} }
#endif #endif
bool
StackSpace::init()
{
void *p;
#ifdef XP_WIN
p = VirtualAlloc(NULL, sCapacityBytes, MEM_RESERVE, PAGE_READWRITE);
if (!p)
return false;
void *check = VirtualAlloc(p, sCommitBytes, MEM_COMMIT, PAGE_READWRITE);
if (p != check)
return false;
base = reinterpret_cast<jsval *>(p);
commitEnd = base + sCommitVals;
end = base + sCapacityVals;
#else
JS_ASSERT(sCapacityBytes % getpagesize() == 0);
p = mmap(NULL, sCapacityBytes, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
if (p == MAP_FAILED)
return false;
base = reinterpret_cast<jsval *>(p);
end = base + sCapacityVals;
#endif
return true;
}
void
StackSpace::finish()
{
#ifdef XP_WIN
VirtualFree(base, (commitEnd - base) * sizeof(jsval), MEM_DECOMMIT);
VirtualFree(base, 0, MEM_RELEASE);
#else
munmap(base, sCapacityBytes);
#endif
}
#ifdef XP_WIN
JS_FRIEND_API(bool)
StackSpace::bumpCommit(jsval *from, ptrdiff_t nvals) const
{
JS_ASSERT(end - from >= nvals);
jsval *newCommit = commitEnd;
jsval *request = from + nvals;
/* Use a dumb loop; will probably execute once. */
JS_ASSERT((end - newCommit) % sCommitVals == 0);
do {
newCommit += sCommitVals;
JS_ASSERT((end - newCommit) >= 0);
} while(newCommit < request);
/* Cast safe because sCapacityBytes is small. */
int32 size = (int32)(newCommit - commitEnd) * sizeof(jsval);
if (!VirtualAlloc(commitEnd, size, MEM_COMMIT, PAGE_READWRITE))
return false;
commitEnd = newCommit;
return true;
}
#endif
JS_REQUIRES_STACK void
StackSpace::mark(JSTracer *trc)
{
/*
* The correctness/completeness of marking depends on the continuity
* invariants described by the CallStack and StackSpace definitions.
*/
jsval *end = firstUnused();
for (CallStack *cs = currentCallStack; cs; cs = cs->getPreviousInThread()) {
if (!cs->inContext()) {
/* Mark slots/args trailing off callstack. */
JS_ASSERT(end == cs->getInitialArgEnd());
TraceValues(trc, cs->getInitialArgBegin(), cs->getInitialArgEnd(), "stack");
} else {
/* This may be the only pointer to the initialVarObj. */
if (cs->getInitialVarObj())
JS_CALL_OBJECT_TRACER(trc, cs->getInitialVarObj(), "varobj");
/* Mark slots/args trailing off of the last stack frame. */
JSStackFrame *fp = cs->getCurrentFrame();
TraceValues(trc, fp->slots(), end, "stack");
/* Mark stack frames and slots/args between stack frames. */
JSStackFrame *initialFrame = cs->getInitialFrame();
for (JSStackFrame *f = fp; f != initialFrame; f = f->down) {
js_TraceStackFrame(trc, f);
TraceValues(trc, f->down->slots(), f->argEnd(), "stack");
}
/* Mark initialFrame stack frame and leading args. */
js_TraceStackFrame(trc, initialFrame);
TraceValues(trc, cs->getInitialArgBegin(), initialFrame->argEnd(), "stack");
}
end = cs->previousCallStackEnd();
}
}
JS_REQUIRES_STACK bool
StackSpace::pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard &ag)
{
jsval *start = firstUnused();
uintN vplen = 2 + argc;
ptrdiff_t nvals = ValuesPerCallStack + vplen;
if (!ensureSpace(cx, start, nvals))
return false;
jsval *vp = start + ValuesPerCallStack;
jsval *vpend = vp + vplen;
memset(vp, 0, vplen * sizeof(jsval)); /* Init so GC-safe on exit. */
CallStack *cs = new(start) CallStack;
cs->setInitialArgEnd(vpend);
cs->setPreviousInThread(currentCallStack);
currentCallStack = cs;
ag.cx = cx;
ag.cs = cs;
ag.argc = argc;
ag.vp = vp;
return true;
}
JS_REQUIRES_STACK JS_FRIEND_API(bool)
StackSpace::pushInvokeArgsFriendAPI(JSContext *cx, uintN argc,
InvokeArgsGuard &ag)
{
return cx->stack().pushInvokeArgs(cx, argc, ag);
}
InvokeFrameGuard::InvokeFrameGuard()
: cx(NULL), cs(NULL), fp(NULL)
{}
/*
* To maintain the 1 to 0..1 relationship between callstacks and js_Interpret
* activations, a callstack is pushed if one was not pushed for the arguments
* (viz., if the ternary InvokeArgsGuard constructor was used instead of the
* nullary constructor + pushInvokeArgs).
*/
bool
StackSpace::getInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag,
uintN nmissing, uintN nslots,
InvokeFrameGuard &fg) const
{
if (ag.cs) {
JS_ASSERT(ag.cs == currentCallStack && !ag.cs->inContext());
jsval *start = ag.cs->getInitialArgEnd();
ptrdiff_t nvals = nmissing + ValuesPerStackFrame + nslots;
if (!ensureSpace(cx, start, nvals))
return false;
fg.fp = reinterpret_cast<JSStackFrame *>(start + nmissing);
return true;
}
JS_ASSERT(isCurrent(cx) && currentCallStack->isActive());
jsval *start = cx->fp->regs->sp;
ptrdiff_t nvals = nmissing + ValuesPerCallStack + ValuesPerStackFrame + nslots;
if (!ensureSpace(cx, start, nvals))
return false;
fg.cs = new(start + nmissing) CallStack;
fg.fp = reinterpret_cast<JSStackFrame *>(fg.cs + 1);
return true;
}
JS_REQUIRES_STACK void
StackSpace::pushInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag,
InvokeFrameGuard &fg)
{
JS_ASSERT(!!ag.cs ^ !!fg.cs);
JS_ASSERT_IF(ag.cs, ag.cs == currentCallStack && !ag.cs->inContext());
if (CallStack *cs = fg.cs) {
cs->setPreviousInThread(currentCallStack);
currentCallStack = cs;
}
JSStackFrame *fp = fg.fp;
fp->down = cx->fp;
cx->pushCallStackAndFrame(currentCallStack, fp);
currentCallStack->setInitialVarObj(NULL);
fg.cx = cx;
}
JS_REQUIRES_STACK
InvokeFrameGuard::~InvokeFrameGuard()
{
if (!cx)
return;
JS_ASSERT(fp && fp == cx->fp);
JS_ASSERT_IF(cs, cs == cx->stack().getCurrentCallStack());
cx->stack().popInvokeFrame(cx, cs);
}
JS_REQUIRES_STACK void
StackSpace::popInvokeFrame(JSContext *cx, CallStack *maybecs)
{
JS_ASSERT(isCurrent(cx) && currentCallStack->getInitialFrame() == cx->fp);
JS_ASSERT_IF(maybecs, maybecs == currentCallStack);
cx->popCallStackAndFrame();
if (maybecs)
currentCallStack = currentCallStack->getPreviousInThread();
}
ExecuteFrameGuard::ExecuteFrameGuard()
: cx(NULL), vp(NULL), fp(NULL)
{}
JS_REQUIRES_STACK
ExecuteFrameGuard::~ExecuteFrameGuard()
{
if (!cx)
return;
JS_ASSERT(cx->activeCallStack() == cs);
JS_ASSERT(cx->fp == fp);
cx->stack().popExecuteFrame(cx);
}
/*
* To maintain a 1 to 0..1 relationship between callstacks and js_Interpret
* activations, we push a callstack even if it wasn't otherwise necessary.
*/
JS_REQUIRES_STACK bool
StackSpace::getExecuteFrame(JSContext *cx, JSStackFrame *down,
uintN vplen, uintN nslots,
ExecuteFrameGuard &fg) const
{
jsval *start = firstUnused();
ptrdiff_t nvals = ValuesPerCallStack + vplen + ValuesPerStackFrame + nslots;
if (!ensureSpace(cx, start, nvals))
return false;
fg.cs = new(start) CallStack;
fg.vp = start + ValuesPerCallStack;
fg.fp = reinterpret_cast<JSStackFrame *>(fg.vp + vplen);
fg.down = down;
return true;
}
JS_REQUIRES_STACK void
StackSpace::pushExecuteFrame(JSContext *cx, ExecuteFrameGuard &fg,
JSObject *initialVarObj)
{
fg.fp->down = fg.down;
CallStack *cs = fg.cs;
cs->setPreviousInThread(currentCallStack);
currentCallStack = cs;
cx->pushCallStackAndFrame(cs, fg.fp);
cs->setInitialVarObj(initialVarObj);
fg.cx = cx;
}
JS_REQUIRES_STACK void
StackSpace::popExecuteFrame(JSContext *cx)
{
JS_ASSERT(isCurrent(cx) && cx->hasActiveCallStack());
cx->popCallStackAndFrame();
currentCallStack = currentCallStack->getPreviousInThread();
}
JS_REQUIRES_STACK void
StackSpace::getSynthesizedSlowNativeFrame(JSContext *cx, CallStack *&cs, JSStackFrame *&fp)
{
jsval *start = firstUnused();
JS_ASSERT(size_t(end - start) >= ValuesPerCallStack + ValuesPerStackFrame);
cs = new(start) CallStack;
fp = reinterpret_cast<JSStackFrame *>(cs + 1);
}
JS_REQUIRES_STACK void
StackSpace::pushSynthesizedSlowNativeFrame(JSContext *cx, CallStack *cs, JSStackFrame *fp)
{
JS_ASSERT(cx->fp->fun->isInterpreted());
JS_ASSERT(!fp->script && FUN_SLOW_NATIVE(fp->fun));
fp->down = cx->fp;
cs->setPreviousInThread(currentCallStack);
currentCallStack = cs;
cx->pushCallStackAndFrame(cs, fp);
cs->setInitialVarObj(NULL);
}
JS_REQUIRES_STACK void
StackSpace::popSynthesizedSlowNativeFrame(JSContext *cx)
{
JS_ASSERT(isCurrent(cx) && cx->hasActiveCallStack());
JS_ASSERT(currentCallStack->getInitialFrame() == cx->fp);
JS_ASSERT(!cx->fp->script && FUN_SLOW_NATIVE(cx->fp->fun));
cx->popCallStackAndFrame();
currentCallStack = currentCallStack->getPreviousInThread();
}
bool bool
JSThreadData::init() JSThreadData::init()
{ {
@@ -108,6 +414,8 @@ JSThreadData::init()
for (size_t i = 0; i != sizeof(*this); ++i) for (size_t i = 0; i != sizeof(*this); ++i)
JS_ASSERT(reinterpret_cast<uint8*>(this)[i] == 0); JS_ASSERT(reinterpret_cast<uint8*>(this)[i] == 0);
#endif #endif
if (!stackSpace.init())
return false;
#ifdef JS_TRACER #ifdef JS_TRACER
InitJIT(&traceMonitor); InitJIT(&traceMonitor);
#endif #endif
@@ -140,11 +448,13 @@ JSThreadData::finish()
#if defined JS_TRACER #if defined JS_TRACER
FinishJIT(&traceMonitor); FinishJIT(&traceMonitor);
#endif #endif
stackSpace.finish();
} }
void void
JSThreadData::mark(JSTracer *trc) JSThreadData::mark(JSTracer *trc)
{ {
stackSpace.mark(trc);
#ifdef JS_TRACER #ifdef JS_TRACER
traceMonitor.mark(trc); traceMonitor.mark(trc);
#endif #endif
@@ -374,7 +684,8 @@ js_InitThreads(JSRuntime *rt)
return false; return false;
} }
#else #else
rt->threadData.init(); if (!rt->threadData.init())
return false;
#endif #endif
return true; return true;
} }
@@ -488,8 +799,6 @@ js_NewContext(JSRuntime *rt, size_t stackChunkSize)
JS_STATIC_ASSERT(JSVERSION_DEFAULT == 0); JS_STATIC_ASSERT(JSVERSION_DEFAULT == 0);
JS_ASSERT(cx->version == JSVERSION_DEFAULT); JS_ASSERT(cx->version == JSVERSION_DEFAULT);
VOUCH_DOES_NOT_REQUIRE_STACK(); VOUCH_DOES_NOT_REQUIRE_STACK();
JS_InitArenaPool(&cx->stackPool, "stack", stackChunkSize, sizeof(jsval),
&cx->scriptStackQuota);
JS_InitArenaPool(&cx->tempPool, "temp", JS_InitArenaPool(&cx->tempPool, "temp",
1024, /* FIXME: bug 421435 */ 1024, /* FIXME: bug 421435 */
@@ -850,7 +1159,6 @@ FreeContext(JSContext *cx)
/* Free the stuff hanging off of cx. */ /* Free the stuff hanging off of cx. */
js_FreeRegExpStatics(cx); js_FreeRegExpStatics(cx);
VOUCH_DOES_NOT_REQUIRE_STACK(); VOUCH_DOES_NOT_REQUIRE_STACK();
JS_FinishArenaPool(&cx->stackPool);
JS_FinishArenaPool(&cx->tempPool); JS_FinishArenaPool(&cx->tempPool);
if (cx->lastMessage) if (cx->lastMessage)
@@ -1927,6 +2235,78 @@ js_CurrentPCIsInImacro(JSContext *cx)
#endif #endif
} }
JSContext::JSContext(JSRuntime *rt)
: runtime(rt),
fp(NULL),
busyArrays(this)
{}
void
JSContext::pushCallStackAndFrame(js::CallStack *newcs, JSStackFrame *newfp)
{
if (hasActiveCallStack())
currentCallStack->suspend(fp);
newcs->setPreviousInContext(currentCallStack);
currentCallStack = newcs;
setCurrentFrame(newfp);
newcs->joinContext(this, newfp);
}
void
JSContext::popCallStackAndFrame()
{
JS_ASSERT(currentCallStack->maybeContext() == this);
JS_ASSERT(currentCallStack->getInitialFrame() == fp);
currentCallStack->leaveContext();
currentCallStack = currentCallStack->getPreviousInContext();
if (currentCallStack) {
if (currentCallStack->isSaved()) {
setCurrentFrame(NULL);
} else {
setCurrentFrame(currentCallStack->getSuspendedFrame());
currentCallStack->resume();
}
} else {
JS_ASSERT(fp->down == NULL);
setCurrentFrame(NULL);
}
}
void
JSContext::saveActiveCallStack()
{
JS_ASSERT(hasActiveCallStack());
currentCallStack->save(fp);
setCurrentFrame(NULL);
}
void
JSContext::restoreCallStack()
{
JS_ASSERT(!hasActiveCallStack());
setCurrentFrame(currentCallStack->getSuspendedFrame());
currentCallStack->restore();
}
JSGenerator *
JSContext::generatorFor(JSStackFrame *fp) const
{
JS_ASSERT(stack().contains(fp) && fp->isGenerator());
JS_ASSERT(!fp->isFloatingGenerator());
JS_ASSERT(!genStack.empty());
if (JS_LIKELY(fp == genStack.back()->liveFrame))
return genStack.back();
/* General case; should only be needed for debug APIs. */
for (size_t i = 0; i < genStack.length(); ++i) {
if (genStack[i]->liveFrame == fp)
return genStack[i];
}
JS_NOT_REACHED("no matching generator");
return NULL;
}
CallStack * CallStack *
JSContext::containingCallStack(JSStackFrame *target) JSContext::containingCallStack(JSStackFrame *target)
{ {
@@ -1944,11 +2324,11 @@ JSContext::containingCallStack(JSStackFrame *target)
if (f == target) if (f == target)
return cs; return cs;
} }
cs = cs->getPrevious(); cs = cs->getPreviousInContext();
} }
/* A suspended callstack's top frame is its suspended frame. */ /* A suspended callstack's top frame is its suspended frame. */
for (; cs; cs = cs->getPrevious()) { for (; cs; cs = cs->getPreviousInContext()) {
JSStackFrame *f = cs->getSuspendedFrame(); JSStackFrame *f = cs->getSuspendedFrame();
JSStackFrame *stop = cs->getInitialFrame()->down; JSStackFrame *stop = cs->getInitialFrame()->down;
for (; f != stop; f = f->down) { for (; f != stop; f = f->down) {
@@ -2007,6 +2387,7 @@ JSContext::isConstructing()
return fp && (fp->flags & JSFRAME_CONSTRUCTING); return fp && (fp->flags & JSFRAME_CONSTRUCTING);
} }
/* /*
* Release pool's arenas if the stackPool has existed for longer than the * Release pool's arenas if the stackPool has existed for longer than the
* limit specified by gcEmptyArenaPoolLifespan. * limit specified by gcEmptyArenaPoolLifespan.
@@ -2025,7 +2406,6 @@ FreeOldArenas(JSRuntime *rt, JSArenaPool *pool)
void void
JSContext::purge() JSContext::purge()
{ {
FreeOldArenas(runtime, &stackPool);
FreeOldArenas(runtime, &regexpPool); FreeOldArenas(runtime, &regexpPool);
classProtoCache.purge(); classProtoCache.purge();
} }

View File

@@ -122,6 +122,7 @@ static const size_t MAX_NATIVE_STACK_SLOTS = 4096;
static const size_t MAX_CALL_STACK_ENTRIES = 500; static const size_t MAX_CALL_STACK_ENTRIES = 500;
static const size_t MAX_GLOBAL_SLOTS = 4096; static const size_t MAX_GLOBAL_SLOTS = 4096;
static const size_t GLOBAL_SLOTS_BUFFER_SIZE = MAX_GLOBAL_SLOTS + 1; static const size_t GLOBAL_SLOTS_BUFFER_SIZE = MAX_GLOBAL_SLOTS + 1;
static const size_t MAX_SLOW_NATIVE_EXTRA_SLOTS = 16;
/* Forward declarations of tracer types. */ /* Forward declarations of tracer types. */
class VMAllocator; class VMAllocator;
@@ -228,25 +229,56 @@ struct GlobalState {
}; };
/* /*
* A callstack contains a set of stack frames linked by fp->down. A callstack * Callstacks
* is a member of a JSContext and all of a JSContext's callstacks are kept in a
* list starting at cx->currentCallStack. A callstack may be active or
* suspended. There are zero or one active callstacks for a context and any
* number of suspended contexts. If there is an active context, it is the first
* in the currentCallStack list, |cx->fp != NULL| and the callstack's newest
* (top) stack frame is |cx->fp|. For all other (suspended) callstacks, the
* newest frame is pointed to by suspendedFrame.
* *
* While all frames in a callstack are down-linked, not all down-linked frames * A callstack logically contains the (possibly empty) set of stack frames
* are in the same callstack (e.g., calling js_Execute with |down != cx->fp| * associated with a single activation of the VM and the slots associated with
* will create a new frame in a new active callstack). * each frame. A callstack may or may not be "in" a context and a callstack is
* in a context iff its set of stack frames is empty. A callstack and its
* contained frames/slots also have an implied memory layout, as described in
* the js::StackSpace comment.
*
* The set of stack frames in a non-empty callstack start at the callstack's
* "current frame", which is the most recently pushed frame, and ends at the
* callstack's "initial frame". Note that, while all stack frames in a
* callstack are down-linked, not all down-linked frames are in the same
* callstack. Hence, for a callstack |cs|, |cs->getInitialFrame()->down| may be
* non-null and in a different callstack. This occurs when the VM reenters
* itself (via js_Invoke or js_Execute). Thus, in general, a callstack only
* contains a linear path (not necessarily to the root) of a (not the) tree
* (not list) of stack frames in a context.
*
* A callstack in a context may additionally be "active" or "suspended". A
* suspended callstack |cs| has a "suspended frame" which serves as the current
* frame of |cs|. There is at most one active callstack in a given context.
* Callstacks in a context execute LIFO and are maintained in a stack. The top
* of this stack is the context's "current callstack". If a context |cx| has an
* active callstack |cs|, then:
* 1. |cs| is |cx|'s current callstack,
* 2. |cx->fp != NULL|, and
* 3. |cs|'s current frame is |cx->fp|.
* Moreover, |cx->fp != NULL| iff |cx| has an active callstack.
*
* Finally, (to support JS_SaveFrameChain/JS_RestoreFrameChain) a suspended
* callstack may or may not be "saved". Normally, when the active callstack is
* popped, the previous callstack (which is necessarily suspended) becomes
* active. If the previous callstack was saved, however, then it stays
* suspended until it is made active by a call to JS_RestoreFrameChain. This is
* why a context may have a current callstack, but not an active callstack.
*/ */
class CallStack class CallStack
{ {
#ifdef DEBUG
/* The context to which this callstack belongs. */ /* The context to which this callstack belongs. */
JSContext *cx; JSContext *cx;
#endif
/* Link for JSContext callstack stack mentioned in big comment above. */
CallStack *previousInContext;
/* Link for StackSpace callstack stack mentioned in StackSpace comment. */
CallStack *previousInThread;
/* The first frame executed in this callstack. null iff cx is null */
JSStackFrame *initialFrame;
/* If this callstack is suspended, the top of the callstack. */ /* If this callstack is suspended, the top of the callstack. */
JSStackFrame *suspendedFrame; JSStackFrame *suspendedFrame;
@@ -254,31 +286,77 @@ class CallStack
/* This callstack was suspended by JS_SaveFrameChain. */ /* This callstack was suspended by JS_SaveFrameChain. */
bool saved; bool saved;
/* Links members of the JSContext::currentCallStack list. */ /* End of arguments before the first frame. See StackSpace comment. */
CallStack *previous; jsval *initialArgEnd;
/* The varobj on entry to initialFrame. */ /* The varobj on entry to initialFrame. */
JSObject *initialVarObj; JSObject *initialVarObj;
/* The first frame executed in this callstack. */
JSStackFrame *initialFrame;
public: public:
CallStack(JSContext *cx) CallStack()
: : cx(NULL), previousInContext(NULL), previousInThread(NULL),
#ifdef DEBUG initialFrame(NULL), suspendedFrame(NULL), saved(false),
cx(cx), initialArgEnd(NULL), initialVarObj(NULL)
#endif
suspendedFrame(NULL), saved(false), previous(NULL),
initialVarObj(NULL), initialFrame(NULL)
{} {}
#ifdef DEBUG /* Safe casts guaranteed by the contiguous-stack layout. */
bool contains(JSStackFrame *fp);
#endif jsval *previousCallStackEnd() const {
return (jsval *)this;
}
jsval *getInitialArgBegin() const {
return (jsval *)(this + 1);
}
/* The three mutually exclusive states of a callstack */
bool inContext() const {
JS_ASSERT(!!cx == !!initialFrame);
JS_ASSERT_IF(!initialFrame, !suspendedFrame && !saved);
return cx;
}
bool isActive() const {
JS_ASSERT_IF(suspendedFrame, inContext());
return initialFrame && !suspendedFrame;
}
bool isSuspended() const {
JS_ASSERT_IF(!suspendedFrame, !saved);
JS_ASSERT_IF(suspendedFrame, inContext());
return suspendedFrame;
}
/* Substate of suspended, queryable in any state. */
bool isSaved() const {
JS_ASSERT_IF(saved, isSuspended());
return saved;
}
/* Transitioning between inContext <--> isActive */
void joinContext(JSContext *cx, JSStackFrame *f) {
JS_ASSERT(!inContext());
this->cx = cx;
initialFrame = f;
}
void leaveContext() {
JS_ASSERT(inContext());
this->cx = NULL;
initialFrame = NULL;
}
JSContext *maybeContext() const {
return cx;
}
/* Transitioning between isActive <--> isSuspended */
void suspend(JSStackFrame *fp) { void suspend(JSStackFrame *fp) {
JS_ASSERT(fp && !isSuspended() && contains(fp)); JS_ASSERT(fp && isActive() && contains(fp));
suspendedFrame = fp; suspendedFrame = fp;
} }
@@ -287,45 +365,341 @@ class CallStack
suspendedFrame = NULL; suspendedFrame = NULL;
} }
JSStackFrame *getSuspendedFrame() const { /* When isSuspended, transitioning isSaved <--> !isSaved */
JS_ASSERT(suspendedFrame);
return suspendedFrame;
}
bool isSuspended() const { return !!suspendedFrame; }
void setPrevious(CallStack *cs) { previous = cs; }
CallStack *getPrevious() const { return previous; }
void setInitialVarObj(JSObject *o) { initialVarObj = o; }
JSObject *getInitialVarObj() const { return initialVarObj; }
void setInitialFrame(JSStackFrame *f) { initialFrame = f; }
JSStackFrame *getInitialFrame() const { return initialFrame; }
/*
* Saving and restoring is a special case of suspending and resuming
* whereby the active callstack becomes suspended without pushing a new
* active callstack. This means that if a callstack c1 is pushed on top of a
* saved callstack c2, when c1 is popped, c2 must not be made active. In
* the normal case, where c2 is not saved, when c1 is popped, c2 is made
* active. This distinction is indicated by the |saved| flag.
*/
void save(JSStackFrame *fp) { void save(JSStackFrame *fp) {
JS_ASSERT(!saved);
suspend(fp); suspend(fp);
saved = true; saved = true;
} }
void restore() { void restore() {
JS_ASSERT(saved);
saved = false; saved = false;
resume(); resume();
} }
bool isSaved() const { /* Data available when !inContext */
JS_ASSERT_IF(saved, isSuspended());
return saved; void setInitialArgEnd(jsval *v) {
JS_ASSERT(!inContext() && !initialArgEnd);
initialArgEnd = v;
} }
jsval *getInitialArgEnd() const {
JS_ASSERT(!inContext() && initialArgEnd);
return initialArgEnd;
}
/* Data available when inContext */
JSStackFrame *getInitialFrame() const {
JS_ASSERT(inContext());
return initialFrame;
}
inline JSStackFrame *getCurrentFrame() const;
/* Data available when isSuspended. */
JSStackFrame *getSuspendedFrame() const {
JS_ASSERT(isSuspended());
return suspendedFrame;
}
/* JSContext / js::StackSpace bookkeeping. */
void setPreviousInContext(CallStack *cs) {
previousInContext = cs;
}
CallStack *getPreviousInContext() const {
return previousInContext;
}
void setPreviousInThread(CallStack *cs) {
previousInThread = cs;
}
CallStack *getPreviousInThread() const {
return previousInThread;
}
void setInitialVarObj(JSObject *o) {
JS_ASSERT(inContext());
initialVarObj = o;
}
JSObject *getInitialVarObj() const {
JS_ASSERT(inContext());
return initialVarObj;
}
#ifdef DEBUG
JS_REQUIRES_STACK bool contains(const JSStackFrame *fp) const;
#endif
};
JS_STATIC_ASSERT(sizeof(CallStack) % sizeof(jsval) == 0);
static const size_t ValuesPerCallStack = sizeof(CallStack) / sizeof(jsval);
/*
* The ternary constructor is used when arguments are already pushed on the
* stack (as the sp of the current frame), which should only happen from within
* js_Interpret. Otherwise, see StackSpace::pushInvokeArgs.
*/
class InvokeArgsGuard
{
friend class StackSpace;
JSContext *cx;
CallStack *cs; /* null implies nothing pushed */
jsval *vp;
uintN argc;
public:
inline InvokeArgsGuard();
inline InvokeArgsGuard(jsval *vp, uintN argc);
inline ~InvokeArgsGuard();
jsval *getvp() const { return vp; }
uintN getArgc() const { JS_ASSERT(vp != NULL); return argc; }
};
/* See StackSpace::pushInvokeFrame. */
class InvokeFrameGuard
{
friend class StackSpace;
JSContext *cx; /* null implies nothing pushed */
CallStack *cs;
JSStackFrame *fp;
public:
InvokeFrameGuard();
JS_REQUIRES_STACK ~InvokeFrameGuard();
JSStackFrame *getFrame() const { return fp; }
};
/* See StackSpace::pushExecuteFrame. */
class ExecuteFrameGuard
{
friend class StackSpace;
JSContext *cx; /* null implies nothing pushed */
CallStack *cs;
jsval *vp;
JSStackFrame *fp;
JSStackFrame *down;
public:
ExecuteFrameGuard();
JS_REQUIRES_STACK ~ExecuteFrameGuard();
jsval *getvp() const { return vp; }
JSStackFrame *getFrame() const { return fp; }
};
/*
* Thread stack layout
*
* Each JSThreadData has one associated StackSpace object which allocates all
* callstacks for the thread. StackSpace performs all such allocations in a
* single, fixed-size buffer using a careful layout scheme that allows some
* associations between callstacks, frames, and slots to be implicit, rather
* than explicitly stored as pointers. To maintain useful invariants, stack
* space is not given out arbitrarily, but rather allocated/deallocated for
* specific purposes. The use cases currently supported are: calling js_Invoke
* with arguments, calling js_Execute and inline calls. See associated member
* functions below.
*
* First, we consider the layout of individual callstacks. (See the
* js::CallStack comment for terminology.) A non-empty callstack has the
* following layout:
*
* initial frame current frame ------. if regs,
* .------------. | | regs->sp
* | V V V
* |callstack| slots |frame| slots |frame| slots |frame| slots|
* | ^ | ^ |
* ? <----' `----------' `----------'
*
* Moreover, the bytes between a callstack and its first frame and between two
* adjacent frames in a callstack are GC-able jsvals. If the current frame's
* regs pointer is null (e.g., native frame), there are no final slots.
*
* An empty callstack roots the initial slots before the initial frame is
* pushed and after the initial frame has been popped (perhaps to be followed
* by subsequent initial frame pushes/pops...).
*
* initialArgEnd
* .---------.
* | V
* |callstack| slots |
*
* Above the level of callstacks, a StackSpace is simply a contiguous sequence
* of callstacks kept in a linked list:
*
* base currentCallStack firstUnused end
* | | | |
* V V V V
* |callstack| --- |callstack| --- |callstack| --- | |
* | ^ | ^ |
* 0 <----' `------------' `------------'
* previous previous previous
*
* Both js::StackSpace and JSContext maintain a stack of callstacks, the top of
* which is the "current callstack" for that thread or context, respectively.
* Since different contexts can arbitrarily interleave execution in a single
* thread, these stacks are different enough that a callstack needs both
* "previousInThread" and "previousInContext". (Not completely different; there
* is an order-preserving injection from each context's callstack-ordering to
* that of the context's thread.)
*/
class StackSpace
{
jsval *base;
#ifdef XP_WIN
mutable jsval *commitEnd;
#endif
jsval *end;
CallStack *currentCallStack;
/* Although guards are friends, XGuard should only call popX(). */
friend class InvokeArgsGuard;
JS_REQUIRES_STACK inline void popInvokeArgs(JSContext *cx, jsval *vp);
friend class InvokeFrameGuard;
JS_REQUIRES_STACK void popInvokeFrame(JSContext *cx, CallStack *maybecs);
friend class ExecuteFrameGuard;
JS_REQUIRES_STACK void popExecuteFrame(JSContext *cx);
/* Return a pointer to the first unused slot. */
JS_REQUIRES_STACK
inline jsval *firstUnused() const;
#ifdef DEBUG
inline bool isCurrent(JSContext *cx) const;
CallStack *getCurrentCallStack() const { return currentCallStack; }
#endif
/*
* Return whether nvals can be allocated from the top of the stack.
* N.B. the caller must ensure |from == firstUnused()|.
*/
inline bool ensureSpace(JSContext *maybecx, jsval *from, ptrdiff_t nvals) const;
#ifdef XP_WIN
/* Commit more memory from the reserved stack space. */
JS_FRIEND_API(bool) bumpCommit(jsval *from, ptrdiff_t nvals) const;
#endif
public:
static const size_t sCapacityVals = 512 * 1024;
static const size_t sCapacityBytes = sCapacityVals * sizeof(jsval);
static const size_t sCommitVals = 16 * 1024;
static const size_t sCommitBytes = sCommitVals * sizeof(jsval);
JS_STATIC_ASSERT(sCapacityVals % sCommitVals == 0);
/* Kept as a member of JSThreadData; cannot use constructor/destructor. */
bool init();
void finish();
#ifdef DEBUG
template <class T>
bool contains(T *t) const {
char *v = (char *)t;
return v >= (char *)base && v + sizeof(T) <= (char *)end;
}
#endif
/*
* When we LeaveTree, we need to rebuild the stack, which requires stack
* allocation. There is no good way to handle an OOM for these allocations,
* so this function checks that they cannot occur using the size of the
* TraceNativeStorage as a conservative upper bound.
*/
inline bool ensureEnoughSpaceToEnterTrace();
/* +1 for slow native's stack frame. */
static const ptrdiff_t sMaxJSValsNeededForTrace =
MAX_NATIVE_STACK_SLOTS + MAX_CALL_STACK_ENTRIES * ValuesPerStackFrame +
(ValuesPerCallStack + ValuesPerStackFrame /* synthesized slow native */);
/* Mark all callstacks, frames, and slots on the stack. */
JS_REQUIRES_STACK
void mark(JSTracer *trc);
/*
* For all three use cases below:
* - The boolean-valued functions call js_ReportOutOfScriptQuota on OOM.
* - The "get*Frame" functions do not change any global state, they just
* check OOM and return pointers to an uninitialized frame with the
* requested missing arguments/slots. Only once the "push*Frame"
* function has been called is global state updated. Thus, between
* "get*Frame" and "push*Frame", the frame and slots are unrooted.
* - The "push*Frame" functions will set fp->down; the caller needn't.
* - Functions taking "*Guard" arguments will use the guard's destructor
* to pop the allocation. The caller must ensure the guard has the
* appropriate lifetime.
* - The get*Frame functions put the 'nmissing' slots contiguously after
* the arguments.
*/
/*
* pushInvokeArgs allocates |argc+2| rooted values that will be passed as
* the arguments to js_Invoke. A single allocation can be used for multiple
* js_Invoke calls. The InvokeArgumentsGuard passed to js_Invoke must come
* from an immediately-enclosing (stack-wise) call to pushInvokeArgs.
*/
JS_REQUIRES_STACK
bool pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard &ag);
/* These functions are called inside js_Invoke, not js_Invoke clients. */
bool getInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag,
uintN nmissing, uintN nslots,
InvokeFrameGuard &fg) const;
JS_REQUIRES_STACK
void pushInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag,
InvokeFrameGuard &fg);
/*
* For the simpler case when arguments are allocated at the same time as
* the frame and it is not necessary to have rooted argument values before
* pushing the frame.
*/
JS_REQUIRES_STACK
bool getExecuteFrame(JSContext *cx, JSStackFrame *down,
uintN vplen, uintN nslots,
ExecuteFrameGuard &fg) const;
JS_REQUIRES_STACK
void pushExecuteFrame(JSContext *cx, ExecuteFrameGuard &fg,
JSObject *initialVarObj);
/*
* Since RAII cannot be used for inline frames, callers must manually
* call pushInlineFrame/popInlineFrame.
*/
JS_REQUIRES_STACK
inline JSStackFrame *getInlineFrame(JSContext *cx, jsval *sp,
uintN nmissing, uintN nslots) const;
JS_REQUIRES_STACK
inline void pushInlineFrame(JSContext *cx, JSStackFrame *fp, JSStackFrame *newfp);
JS_REQUIRES_STACK
inline void popInlineFrame(JSContext *cx, JSStackFrame *up, JSStackFrame *down);
/*
* For the special case of the slow native stack frame pushed and popped by
* tracing deep bail logic.
*/
JS_REQUIRES_STACK
void getSynthesizedSlowNativeFrame(JSContext *cx, CallStack *&cs, JSStackFrame *&fp);
JS_REQUIRES_STACK
void pushSynthesizedSlowNativeFrame(JSContext *cx, CallStack *cs, JSStackFrame *fp);
JS_REQUIRES_STACK
void popSynthesizedSlowNativeFrame(JSContext *cx);
/* Our privates leak into xpconnect, which needs a public symbol. */
JS_REQUIRES_STACK
JS_FRIEND_API(bool) pushInvokeArgsFriendAPI(JSContext *, uintN, InvokeArgsGuard &);
}; };
/* Holds the number of recording attemps for an address. */ /* Holds the number of recording attemps for an address. */
@@ -519,6 +893,9 @@ const uint32 JSLRS_NULL_MARK = uint32(-1);
struct JSThreadData { struct JSThreadData {
JSGCFreeLists gcFreeLists; JSGCFreeLists gcFreeLists;
/* Keeper of the contiguous stack used by all contexts in this thread. */
js::StackSpace stackSpace;
/* /*
* Flag indicating that we are waiving any soft limits on the GC heap * Flag indicating that we are waiving any soft limits on the GC heap
* because we want allocations to be infallible (except when we hit * because we want allocations to be infallible (except when we hit
@@ -1142,13 +1519,6 @@ struct JSArgumentFormatMap {
}; };
#endif #endif
struct JSStackHeader {
uintN nslots;
JSStackHeader *down;
};
#define JS_STACK_SEGMENT(sh) ((jsval *)(sh) + 2)
/* /*
* Key and entry types for the JSContext.resolvingTable hash table, typedef'd * Key and entry types for the JSContext.resolvingTable hash table, typedef'd
* here because all consumers need to see these declarations (and not just the * here because all consumers need to see these declarations (and not just the
@@ -1178,7 +1548,7 @@ class AutoGCRooter;
struct JSContext struct JSContext
{ {
explicit JSContext(JSRuntime *rt) : runtime(rt), busyArrays(this) {} explicit JSContext(JSRuntime *rt);
/* /*
* If this flag is set, we were asked to call back the operation callback * If this flag is set, we were asked to call back the operation callback
@@ -1246,15 +1616,21 @@ struct JSContext
size_t scriptStackQuota; size_t scriptStackQuota;
/* Data shared by threads in an address space. */ /* Data shared by threads in an address space. */
JSRuntime * const runtime; JSRuntime *const runtime;
/* Stack arena pool and frame pointer register. */
JS_REQUIRES_STACK
JSArenaPool stackPool;
/* Currently executing frame, set by stack operations. */
JS_REQUIRES_STACK JS_REQUIRES_STACK
JSStackFrame *fp; JSStackFrame *fp;
private:
friend class js::StackSpace;
/* 'fp' must only be changed by calling this function. */
void setCurrentFrame(JSStackFrame *fp) {
this->fp = fp;
}
public:
/* Temporary arena pool used while compiling and decompiling. */ /* Temporary arena pool used while compiling and decompiling. */
JSArenaPool tempPool; JSArenaPool tempPool;
@@ -1295,58 +1671,52 @@ struct JSContext
void *data2; void *data2;
private: private:
#ifdef __GNUC__
# pragma GCC visibility push(default)
#endif
friend void js_TraceContext(JSTracer *, JSContext *);
#ifdef __GNUC__
# pragma GCC visibility pop
#endif
/* Linked list of callstacks. See CallStack. */ /* Linked list of callstacks. See CallStack. */
js::CallStack *currentCallStack; js::CallStack *currentCallStack;
public: public:
#ifdef DEBUG
bool callStackInSync() const {
if (fp) {
JS_ASSERT(currentCallStack->isActive());
if (js::CallStack *prev = currentCallStack->getPreviousInContext())
JS_ASSERT(!prev->isActive());
} else {
JS_ASSERT_IF(currentCallStack, !currentCallStack->isActive());
}
return true;
}
#endif
/* Return whether this context has an active callstack. */
bool hasActiveCallStack() const {
JS_ASSERT(callStackInSync());
return fp;
}
/* Assuming there is an active callstack, return it. */ /* Assuming there is an active callstack, return it. */
js::CallStack *activeCallStack() const { js::CallStack *activeCallStack() const {
JS_ASSERT(currentCallStack && !currentCallStack->isSaved()); JS_ASSERT(hasActiveCallStack());
return currentCallStack;
}
/* Return the current callstack, which may or may not be active. */
js::CallStack *getCurrentCallStack() const {
JS_ASSERT(callStackInSync());
return currentCallStack; return currentCallStack;
} }
/* Add the given callstack to the list as the new active callstack. */ /* Add the given callstack to the list as the new active callstack. */
void pushCallStack(js::CallStack *newcs) { void pushCallStackAndFrame(js::CallStack *newcs, JSStackFrame *newfp);
if (fp)
currentCallStack->suspend(fp);
else
JS_ASSERT_IF(currentCallStack, currentCallStack->isSaved());
newcs->setPrevious(currentCallStack);
currentCallStack = newcs;
JS_ASSERT(!newcs->isSuspended() && !newcs->isSaved());
}
/* Remove the active callstack and make the next callstack active. */ /* Remove the active callstack and make the next callstack active. */
void popCallStack() { void popCallStackAndFrame();
JS_ASSERT(!currentCallStack->isSuspended() && !currentCallStack->isSaved());
currentCallStack = currentCallStack->getPrevious();
if (currentCallStack && !currentCallStack->isSaved()) {
JS_ASSERT(fp);
currentCallStack->resume();
}
}
/* Mark the top callstack as suspended, without pushing a new one. */ /* Mark the top callstack as suspended, without pushing a new one. */
void saveActiveCallStack() { void saveActiveCallStack();
JS_ASSERT(fp && currentCallStack && !currentCallStack->isSuspended());
currentCallStack->save(fp);
fp = NULL;
}
/* Undoes calls to suspendTopCallStack. */ /* Undoes calls to suspendTopCallStack. */
void restoreCallStack() { void restoreCallStack();
JS_ASSERT(!fp && currentCallStack && currentCallStack->isSuspended());
fp = currentCallStack->getSuspendedFrame();
currentCallStack->restore();
}
/* /*
* Perform a linear search of all frames in all callstacks in the given context * Perform a linear search of all frames in all callstacks in the given context
@@ -1367,9 +1737,6 @@ struct JSContext
((JSContext *)((char *)(tl) - offsetof(JSContext, threadLinks))) ((JSContext *)((char *)(tl) - offsetof(JSContext, threadLinks)))
#endif #endif
/* PDL of stack headers describing stack slots not rooted by argv, etc. */
JSStackHeader *stackHeaders;
/* Stack of thread-stack-allocated GC roots. */ /* Stack of thread-stack-allocated GC roots. */
js::AutoGCRooter *autoGCRooters; js::AutoGCRooter *autoGCRooters;
@@ -1412,6 +1779,33 @@ struct JSContext
JSClassProtoCache classProtoCache; JSClassProtoCache classProtoCache;
private:
/*
* To go from a live generator frame (on the stack) to its generator object
* (see comment js_FLoatingFrameIfGenerator), we maintain a stack of active
* generators, pushing and popping when entering and leaving generator
* frames, respectively.
*/
js::Vector<JSGenerator *, 0, js::SystemAllocPolicy> genStack;
public:
/* Return the generator object for the given generator frame. */
JSGenerator *generatorFor(JSStackFrame *fp) const;
/* Early OOM-check. */
bool ensureGeneratorStackSpace() {
return genStack.reserve(genStack.length() + 1);
}
bool enterGenerator(JSGenerator *gen) {
return genStack.append(gen);
}
void leaveGenerator(JSGenerator *gen) {
JS_ASSERT(genStack.back() == gen);
genStack.popBack();
}
/* Caller must be holding runtime->gcLock. */ /* Caller must be holding runtime->gcLock. */
void updateJITEnabled() { void updateJITEnabled() {
#ifdef JS_TRACER #ifdef JS_TRACER
@@ -1572,6 +1966,10 @@ struct JSContext
void purge(); void purge();
js::StackSpace &stack() const {
return JS_THREAD_DATA(this)->stackSpace;
}
private: private:
/* /*
@@ -1584,14 +1982,14 @@ private:
}; };
JS_ALWAYS_INLINE JSObject * JS_ALWAYS_INLINE JSObject *
JSStackFrame::varobj(js::CallStack *cs) JSStackFrame::varobj(js::CallStack *cs) const
{ {
JS_ASSERT(cs->contains(this)); JS_ASSERT(cs->contains(this));
return fun ? callobj : cs->getInitialVarObj(); return fun ? callobj : cs->getInitialVarObj();
} }
JS_ALWAYS_INLINE JSObject * JS_ALWAYS_INLINE JSObject *
JSStackFrame::varobj(JSContext *cx) JSStackFrame::varobj(JSContext *cx) const
{ {
JS_ASSERT(cx->activeCallStack()->contains(this)); JS_ASSERT(cx->activeCallStack()->contains(this));
return fun ? callobj : cx->activeCallStack()->getInitialVarObj(); return fun ? callobj : cx->activeCallStack()->getInitialVarObj();
@@ -2212,7 +2610,7 @@ js_ReportOutOfMemory(JSContext *cx);
/* /*
* Report that cx->scriptStackQuota is exhausted. * Report that cx->scriptStackQuota is exhausted.
*/ */
extern void void
js_ReportOutOfScriptQuota(JSContext *cx); js_ReportOutOfScriptQuota(JSContext *cx);
extern void extern void

View File

@@ -41,12 +41,148 @@
#define jscntxtinlines_h___ #define jscntxtinlines_h___
#include "jscntxt.h" #include "jscntxt.h"
#include "jsparse.h"
#include "jsxml.h" #include "jsxml.h"
#include "jsobjinlines.h" #include "jsobjinlines.h"
namespace js { namespace js {
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
CallStack::getCurrentFrame() const
{
JS_ASSERT(inContext());
return isSuspended() ? getSuspendedFrame() : cx->fp;
}
JS_REQUIRES_STACK inline jsval *
StackSpace::firstUnused() const
{
CallStack *ccs = currentCallStack;
if (!ccs)
return base;
if (!ccs->inContext())
return ccs->getInitialArgEnd();
JSStackFrame *fp = ccs->getCurrentFrame();
if (JSFrameRegs *regs = fp->regs)
return regs->sp;
return fp->slots();
}
#ifdef DEBUG
/* Inline so we don't need the friend API. */
JS_ALWAYS_INLINE bool
StackSpace::isCurrent(JSContext *cx) const
{
JS_ASSERT(cx == currentCallStack->maybeContext());
JS_ASSERT(cx->getCurrentCallStack() == currentCallStack);
JS_ASSERT(cx->callStackInSync());
return true;
}
#endif
JS_ALWAYS_INLINE bool
StackSpace::ensureSpace(JSContext *maybecx, jsval *from, ptrdiff_t nvals) const
{
JS_ASSERT(from == firstUnused());
#ifdef XP_WIN
JS_ASSERT(from <= commitEnd);
if (commitEnd - from >= nvals)
return true;
if (end - from < nvals) {
if (maybecx)
js_ReportOutOfScriptQuota(maybecx);
return false;
}
if (!bumpCommit(from, nvals)) {
if (maybecx)
js_ReportOutOfScriptQuota(maybecx);
return false;
}
return true;
#else
if (end - from < nvals) {
if (maybecx)
js_ReportOutOfScriptQuota(maybecx);
return false;
}
return true;
#endif
}
JS_ALWAYS_INLINE bool
StackSpace::ensureEnoughSpaceToEnterTrace()
{
#ifdef XP_WIN
return ensureSpace(NULL, firstUnused(), sMaxJSValsNeededForTrace);
#endif
return end - firstUnused() > sMaxJSValsNeededForTrace;
}
JS_ALWAYS_INLINE void
StackSpace::popInvokeArgs(JSContext *cx, jsval *vp)
{
JS_ASSERT(!currentCallStack->inContext());
currentCallStack = currentCallStack->getPreviousInThread();
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
StackSpace::getInlineFrame(JSContext *cx, jsval *sp,
uintN nmissing, uintN nslots) const
{
JS_ASSERT(isCurrent(cx) && cx->hasActiveCallStack());
JS_ASSERT(cx->fp->regs->sp == sp);
ptrdiff_t nvals = nmissing + ValuesPerStackFrame + nslots;
if (!ensureSpace(cx, sp, nvals))
return NULL;
JSStackFrame *fp = reinterpret_cast<JSStackFrame *>(sp + nmissing);
return fp;
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
StackSpace::pushInlineFrame(JSContext *cx, JSStackFrame *fp, JSStackFrame *newfp)
{
JS_ASSERT(isCurrent(cx) && cx->hasActiveCallStack());
JS_ASSERT(cx->fp == fp);
newfp->down = fp;
cx->setCurrentFrame(newfp);
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
StackSpace::popInlineFrame(JSContext *cx, JSStackFrame *up, JSStackFrame *down)
{
JS_ASSERT(isCurrent(cx) && cx->hasActiveCallStack());
JS_ASSERT(cx->fp == up && up->down == down);
cx->setCurrentFrame(down);
}
/*
* InvokeArgsGuard is used outside the JS engine. To simplify symbol visibility
* issues, force InvokeArgsGuard members inline:
*/
JS_ALWAYS_INLINE
InvokeArgsGuard::InvokeArgsGuard()
: cx(NULL), cs(NULL), vp(NULL)
{}
JS_ALWAYS_INLINE
InvokeArgsGuard::InvokeArgsGuard(jsval *vp, uintN argc)
: cx(NULL), cs(NULL), vp(vp), argc(argc)
{}
JS_ALWAYS_INLINE
InvokeArgsGuard::~InvokeArgsGuard()
{
if (!cs)
return;
JS_ASSERT(cs == cx->stack().getCurrentCallStack());
cx->stack().popInvokeArgs(cx, vp);
}
void void
AutoIdArray::trace(JSTracer *trc) { AutoIdArray::trace(JSTracer *trc) {
JS_ASSERT(tag == IDARRAY); JS_ASSERT(tag == IDARRAY);

View File

@@ -604,148 +604,126 @@ js_GetWatchedSetter(JSRuntime *rt, JSScope *scope,
JSBool JSBool
js_watch_set(JSContext *cx, JSObject *obj, jsval id, jsval *vp) js_watch_set(JSContext *cx, JSObject *obj, jsval id, jsval *vp)
{ {
JSRuntime *rt; JSRuntime *rt = cx->runtime;
JSWatchPoint *wp;
JSScopeProperty *sprop;
jsval propid, userid;
JSScope *scope;
JSBool ok;
rt = cx->runtime;
DBG_LOCK(rt); DBG_LOCK(rt);
for (wp = (JSWatchPoint *)rt->watchPointList.next; for (JSWatchPoint *wp = (JSWatchPoint *)rt->watchPointList.next;
&wp->links != &rt->watchPointList; &wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) { wp = (JSWatchPoint *)wp->links.next) {
sprop = wp->sprop; JSScopeProperty *sprop = wp->sprop;
if (wp->object == obj && SPROP_USERID(sprop) == id && if (wp->object == obj && SPROP_USERID(sprop) == id &&
!(wp->flags & JSWP_HELD)) { !(wp->flags & JSWP_HELD)) {
wp->flags |= JSWP_HELD; wp->flags |= JSWP_HELD;
DBG_UNLOCK(rt); DBG_UNLOCK(rt);
JS_LOCK_OBJ(cx, obj); JS_LOCK_OBJ(cx, obj);
propid = ID_TO_VALUE(sprop->id); jsval propid = ID_TO_VALUE(sprop->id);
userid = SPROP_USERID(sprop); jsval userid = SPROP_USERID(sprop);
scope = obj->scope(); JSScope *scope = obj->scope();
JS_UNLOCK_OBJ(cx, obj); JS_UNLOCK_OBJ(cx, obj);
/* NB: wp is held, so we can safely dereference it still. */ /* NB: wp is held, so we can safely dereference it still. */
ok = wp->handler(cx, obj, propid, if (!wp->handler(cx, obj, propid,
SPROP_HAS_VALID_SLOT(sprop, scope) SPROP_HAS_VALID_SLOT(sprop, scope)
? obj->getSlotMT(cx, sprop->slot) ? obj->getSlotMT(cx, sprop->slot)
: JSVAL_VOID, : JSVAL_VOID,
vp, wp->closure); vp, wp->closure)) {
if (ok) { DBG_LOCK(rt);
DropWatchPointAndUnlock(cx, wp, JSWP_HELD);
return JS_FALSE;
}
/*
* Create a pseudo-frame for the setter invocation so that any
* stack-walking security code under the setter will correctly
* identify the guilty party. So that the watcher appears to
* be active to obj_eval and other such code, point frame.pc
* at the JSOP_STOP at the end of the script.
*
* The pseudo-frame is not created for fast natives as they
* are treated as interpreter frame extensions and always
* trusted.
*/
JSObject *closure = wp->closure;
JSClass *clasp = closure->getClass();
JSFunction *fun;
JSScript *script;
if (clasp == &js_FunctionClass) {
fun = GET_FUNCTION_PRIVATE(cx, closure);
script = FUN_SCRIPT(fun);
} else if (clasp == &js_ScriptClass) {
fun = NULL;
script = (JSScript *) closure->getPrivate();
} else {
fun = NULL;
script = NULL;
}
uintN vplen = 2 + (fun ? (fun->minArgs() +
(fun->isInterpreted() ? 0
: fun->u.n.extra))
: 0);
uintN nslots = script ? script->nslots : 0;
/* Destructor pops frame. */
JSFrameRegs regs;
ExecuteFrameGuard frame;
if (fun && !fun->isFastNative()) {
/* /*
* Create a pseudo-frame for the setter invocation so that any * Get a pointer to new frame/slots. This memory is not
* stack-walking security code under the setter will correctly * "claimed", so the code before pushExecuteFrame must not
* identify the guilty party. So that the watcher appears to * reenter the interpreter.
* be active to obj_eval and other such code, point frame.pc
* at the JSOP_STOP at the end of the script.
*
* The pseudo-frame is not created for fast natives as they
* are treated as interpreter frame extensions and always
* trusted.
*/ */
JSObject *closure; JSStackFrame *down = js_GetTopStackFrame(cx);
JSClass *clasp; if (!cx->stack().getExecuteFrame(cx, down, vplen, nslots, frame)) {
JSFunction *fun; DBG_LOCK(rt);
JSScript *script; DropWatchPointAndUnlock(cx, wp, JSWP_HELD);
JSBool injectFrame; return JS_FALSE;
uintN nslots, slotsStart;
jsval smallv[5];
jsval *argv;
JSStackFrame frame;
JSFrameRegs regs;
closure = wp->closure;
clasp = closure->getClass();
if (clasp == &js_FunctionClass) {
fun = GET_FUNCTION_PRIVATE(cx, closure);
script = FUN_SCRIPT(fun);
} else if (clasp == &js_ScriptClass) {
fun = NULL;
script = (JSScript *) closure->getPrivate();
} else {
fun = NULL;
script = NULL;
} }
slotsStart = nslots = 2; /* Initialize slots/frame. */
injectFrame = JS_TRUE; jsval *vp = frame.getvp();
if (fun) { memset(vp, 0, vplen * sizeof(jsval));
nslots += FUN_MINARGS(fun); vp[0] = OBJECT_TO_JSVAL(closure);
if (!FUN_INTERPRETED(fun)) { JSStackFrame *fp = frame.getFrame();
nslots += fun->u.n.extra; memset(fp->slots(), 0, nslots * sizeof(jsval));
injectFrame = !(fun->flags & JSFUN_FAST_NATIVE); memset(fp, 0, sizeof(JSStackFrame));
} fp->script = script;
fp->regs = NULL;
slotsStart = nslots; fp->fun = fun;
fp->argv = vp + 2;
fp->scopeChain = closure->getParent();
JS_ASSERT_IF(!fun, !script);
if (script) {
JS_ASSERT(script->length >= JSOP_STOP_LENGTH);
regs.pc = script->code + script->length - JSOP_STOP_LENGTH;
regs.sp = fp->slots() + script->nfixed;
fp->regs = &regs;
} }
if (script)
nslots += script->nslots;
if (injectFrame) { /* Officially push |fp|. |frame|'s destructor pops. */
if (nslots <= JS_ARRAY_LENGTH(smallv)) { cx->stack().pushExecuteFrame(cx, frame, NULL);
argv = smallv;
} else {
argv = (jsval *) cx->malloc(nslots * sizeof(jsval));
if (!argv) {
DBG_LOCK(rt);
DropWatchPointAndUnlock(cx, wp, JSWP_HELD);
return JS_FALSE;
}
}
argv[0] = OBJECT_TO_JSVAL(closure); /* Now that fp has been pushed, get the call object. */
argv[1] = JSVAL_NULL; if (script && fun && fun->isHeavyweight() &&
PodZero(argv + 2, nslots - 2); !js_GetCallObject(cx, fp)) {
DBG_LOCK(rt);
PodZero(&frame); DropWatchPointAndUnlock(cx, wp, JSWP_HELD);
frame.script = script; return JS_FALSE;
frame.regs = NULL;
frame.fun = fun;
frame.argv = argv + 2;
frame.down = js_GetTopStackFrame(cx);
frame.scopeChain = closure->getParent();
if (script && script->nslots)
frame.slots = argv + slotsStart;
if (script) {
JS_ASSERT(script->length >= JSOP_STOP_LENGTH);
regs.pc = script->code + script->length
- JSOP_STOP_LENGTH;
regs.sp = NULL;
frame.regs = &regs;
if (fun &&
JSFUN_HEAVYWEIGHT_TEST(fun->flags) &&
!js_GetCallObject(cx, &frame)) {
if (argv != smallv)
cx->free(argv);
DBG_LOCK(rt);
DropWatchPointAndUnlock(cx, wp, JSWP_HELD);
return JS_FALSE;
}
}
cx->fp = &frame;
}
#ifdef __GNUC__
else
argv = NULL; /* suppress bogus gcc warnings */
#endif
ok = !wp->setter ||
(sprop->hasSetterValue()
? js_InternalCall(cx, obj,
js_CastAsObjectJSVal(wp->setter),
1, vp, vp)
: wp->setter(cx, obj, userid, vp));
if (injectFrame) {
/* Evil code can cause us to have an arguments object. */
frame.putActivationObjects(cx);
cx->fp = frame.down;
if (argv != smallv)
cx->free(argv);
} }
} }
JSBool ok = !wp->setter ||
(sprop->hasSetterValue()
? js_InternalCall(cx, obj,
js_CastAsObjectJSVal(wp->setter),
1, vp, vp)
: wp->setter(cx, obj, userid, vp));
/* Evil code can cause us to have an arguments object. */
if (frame.getFrame())
frame.getFrame()->putActivationObjects(cx);
DBG_LOCK(rt); DBG_LOCK(rt);
return DropWatchPointAndUnlock(cx, wp, JSWP_HELD) && ok; return DropWatchPointAndUnlock(cx, wp, JSWP_HELD) && ok;
} }
@@ -1203,6 +1181,8 @@ JS_GetFrameObject(JSContext *cx, JSStackFrame *fp)
JS_PUBLIC_API(JSObject *) JS_PUBLIC_API(JSObject *)
JS_GetFrameScopeChain(JSContext *cx, JSStackFrame *fp) JS_GetFrameScopeChain(JSContext *cx, JSStackFrame *fp)
{ {
JS_ASSERT(cx->stack().contains(fp));
/* Force creation of argument and call objects if not yet created */ /* Force creation of argument and call objects if not yet created */
(void) JS_GetFrameCallObject(cx, fp); (void) JS_GetFrameCallObject(cx, fp);
return js_GetScopeChain(cx, fp); return js_GetScopeChain(cx, fp);
@@ -1211,6 +1191,8 @@ JS_GetFrameScopeChain(JSContext *cx, JSStackFrame *fp)
JS_PUBLIC_API(JSObject *) JS_PUBLIC_API(JSObject *)
JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fp) JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fp)
{ {
JS_ASSERT(cx->stack().contains(fp));
if (! fp->fun) if (! fp->fun)
return NULL; return NULL;

View File

@@ -81,6 +81,7 @@
#include "jsatominlines.h" #include "jsatominlines.h"
#include "jsobjinlines.h" #include "jsobjinlines.h"
#include "jscntxtinlines.h"
using namespace js; using namespace js;
@@ -669,8 +670,12 @@ args_enumerate(JSContext *cx, JSObject *obj)
#if JS_HAS_GENERATORS #if JS_HAS_GENERATORS
/* /*
* If a generator-iterator's arguments or call object escapes, it needs to * If a generator's arguments or call object escapes, and the generator frame
* mark its generator object. * is not executing, the generator object needs to be marked because it is not
* otherwise reachable. An executing generator is rooted by its invocation. To
* distinguish the two cases (which imply different access paths to the
* generator object), we use the JSFRAME_FLOATING_GENERATOR flag, which is only
* set on the JSStackFrame kept in the generator object's JSGenerator.
*/ */
static void static void
args_or_call_trace(JSTracer *trc, JSObject *obj) args_or_call_trace(JSTracer *trc, JSObject *obj)
@@ -683,9 +688,9 @@ args_or_call_trace(JSTracer *trc, JSObject *obj)
} }
JSStackFrame *fp = (JSStackFrame *) obj->getPrivate(); JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
if (fp && (fp->flags & JSFRAME_GENERATOR)) { if (fp && fp->isFloatingGenerator()) {
JS_CALL_OBJECT_TRACER(trc, FRAME_TO_GENERATOR(fp)->obj, JSObject *obj = js_FloatingFrameToGenerator(fp)->obj;
"FRAME_TO_GENERATOR(fp)->obj"); JS_CALL_OBJECT_TRACER(trc, obj, "generator object");
} }
} }
#else #else
@@ -810,7 +815,9 @@ js_GetCallObject(JSContext *cx, JSStackFrame *fp)
#ifdef DEBUG #ifdef DEBUG
/* A call object should be a frame's outermost scope chain element. */ /* A call object should be a frame's outermost scope chain element. */
JSClass *classp = fp->scopeChain->getClass(); JSClass *classp = fp->scopeChain->getClass();
if (classp == &js_WithClass || classp == &js_BlockClass || classp == &js_CallClass) if (classp == &js_WithClass || classp == &js_BlockClass)
JS_ASSERT(fp->scopeChain->getPrivate() != js_FloatingFrameIfGenerator(cx, fp));
else if (classp == &js_CallClass)
JS_ASSERT(fp->scopeChain->getPrivate() != fp); JS_ASSERT(fp->scopeChain->getPrivate() != fp);
#endif #endif
@@ -920,7 +927,7 @@ js_PutCallObject(JSContext *cx, JSStackFrame *fp)
if (n != 0) { if (n != 0) {
JS_ASSERT(callobj->numSlots() >= JS_INITIAL_NSLOTS + n); JS_ASSERT(callobj->numSlots() >= JS_INITIAL_NSLOTS + n);
n += JS_INITIAL_NSLOTS; n += JS_INITIAL_NSLOTS;
CopyValuesToCallObject(callobj, fun->nargs, fp->argv, fun->u.i.nvars, fp->slots); CopyValuesToCallObject(callobj, fun->nargs, fp->argv, fun->u.i.nvars, fp->slots());
} }
/* Clear private pointers to fp, which is about to go away (js_Invoke). */ /* Clear private pointers to fp, which is about to go away (js_Invoke). */
@@ -1080,7 +1087,7 @@ CallPropertyOp(JSContext *cx, JSObject *obj, jsid id, jsval *vp,
array = fp->argv; array = fp->argv;
} else { } else {
JS_ASSERT(kind == JSCPK_VAR); JS_ASSERT(kind == JSCPK_VAR);
array = fp->slots; array = fp->slots();
} }
} }
@@ -1963,9 +1970,8 @@ JSBool
js_fun_call(JSContext *cx, uintN argc, jsval *vp) js_fun_call(JSContext *cx, uintN argc, jsval *vp)
{ {
JSObject *obj; JSObject *obj;
jsval fval, *argv, *invokevp; jsval fval, *argv;
JSString *str; JSString *str;
void *mark;
JSBool ok; JSBool ok;
LeaveTrace(cx); LeaveTrace(cx);
@@ -2005,18 +2011,17 @@ js_fun_call(JSContext *cx, uintN argc, jsval *vp)
} }
/* Allocate stack space for fval, obj, and the args. */ /* Allocate stack space for fval, obj, and the args. */
invokevp = js_AllocStack(cx, 2 + argc, &mark); InvokeArgsGuard args;
if (!invokevp) if (!cx->stack().pushInvokeArgs(cx, argc, args))
return JS_FALSE; return JS_FALSE;
/* Push fval, obj, and the args. */ /* Push fval, obj, and the args. */
invokevp[0] = fval; args.getvp()[0] = fval;
invokevp[1] = OBJECT_TO_JSVAL(obj); args.getvp()[1] = OBJECT_TO_JSVAL(obj);
memcpy(invokevp + 2, argv, argc * sizeof *argv); memcpy(args.getvp() + 2, argv, argc * sizeof *argv);
ok = js_Invoke(cx, argc, invokevp, 0); ok = js_Invoke(cx, args, 0);
*vp = *invokevp; *vp = *args.getvp();
js_FreeStack(cx, mark);
return ok; return ok;
} }
@@ -2024,11 +2029,10 @@ JSBool
js_fun_apply(JSContext *cx, uintN argc, jsval *vp) js_fun_apply(JSContext *cx, uintN argc, jsval *vp)
{ {
JSObject *obj, *aobj; JSObject *obj, *aobj;
jsval fval, *invokevp, *sp; jsval fval, *sp;
JSString *str; JSString *str;
jsuint length; jsuint length;
JSBool arraylike, ok; JSBool arraylike;
void *mark;
uintN i; uintN i;
if (argc == 0) { if (argc == 0) {
@@ -2090,12 +2094,13 @@ js_fun_apply(JSContext *cx, uintN argc, jsval *vp)
/* Allocate stack space for fval, obj, and the args. */ /* Allocate stack space for fval, obj, and the args. */
argc = (uintN)JS_MIN(length, JS_ARGS_LENGTH_MAX); argc = (uintN)JS_MIN(length, JS_ARGS_LENGTH_MAX);
invokevp = js_AllocStack(cx, 2 + argc, &mark);
if (!invokevp) InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, argc, args))
return JS_FALSE; return JS_FALSE;
/* Push fval, obj, and aobj's elements as args. */ /* Push fval, obj, and aobj's elements as args. */
sp = invokevp; sp = args.getvp();
*sp++ = fval; *sp++ = fval;
*sp++ = OBJECT_TO_JSVAL(obj); *sp++ = OBJECT_TO_JSVAL(obj);
if (aobj && aobj->isArguments()) { if (aobj && aobj->isArguments()) {
@@ -2121,17 +2126,14 @@ js_fun_apply(JSContext *cx, uintN argc, jsval *vp)
} }
} else { } else {
for (i = 0; i < argc; i++) { for (i = 0; i < argc; i++) {
ok = aobj->getProperty(cx, INT_TO_JSID(jsint(i)), sp); if (!aobj->getProperty(cx, INT_TO_JSID(jsint(i)), sp))
if (!ok) return JS_FALSE;
goto out;
sp++; sp++;
} }
} }
ok = js_Invoke(cx, argc, invokevp, 0); JSBool ok = js_Invoke(cx, args, 0);
*vp = *invokevp; *vp = *args.getvp();
out:
js_FreeStack(cx, mark);
return ok; return ok;
} }
@@ -2141,9 +2143,8 @@ fun_applyConstructor(JSContext *cx, uintN argc, jsval *vp)
{ {
JSObject *aobj; JSObject *aobj;
uintN length, i; uintN length, i;
void *mark;
jsval *invokevp, *sp;
JSBool ok; JSBool ok;
jsval *sp;
if (JSVAL_IS_PRIMITIVE(vp[2]) || if (JSVAL_IS_PRIMITIVE(vp[2]) ||
(aobj = JSVAL_TO_OBJECT(vp[2]), (aobj = JSVAL_TO_OBJECT(vp[2]),
@@ -2159,11 +2160,12 @@ fun_applyConstructor(JSContext *cx, uintN argc, jsval *vp)
if (length > JS_ARGS_LENGTH_MAX) if (length > JS_ARGS_LENGTH_MAX)
length = JS_ARGS_LENGTH_MAX; length = JS_ARGS_LENGTH_MAX;
invokevp = js_AllocStack(cx, 2 + length, &mark);
if (!invokevp)
return JS_FALSE; return JS_FALSE;
sp = invokevp; InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, length, args))
sp = args.getvp();
*sp++ = vp[1]; *sp++ = vp[1];
*sp++ = JSVAL_NULL; /* this is filled automagically */ *sp++ = JSVAL_NULL; /* this is filled automagically */
for (i = 0; i < length; i++) { for (i = 0; i < length; i++) {
@@ -2173,10 +2175,8 @@ fun_applyConstructor(JSContext *cx, uintN argc, jsval *vp)
sp++; sp++;
} }
ok = js_InvokeConstructor(cx, length, JS_TRUE, invokevp); JSBool ok = js_InvokeConstructor(cx, args, JS_TRUE);
*vp = *invokevp; *vp = *args.getvp();
out:
js_FreeStack(cx, mark);
return ok; return ok;
} }
#endif #endif

View File

@@ -163,6 +163,10 @@ struct JSFunction : public JSObject
bool optimizedClosure() const { return FUN_KIND(this) > JSFUN_INTERPRETED; } bool optimizedClosure() const { return FUN_KIND(this) > JSFUN_INTERPRETED; }
bool needsWrapper() const { return FUN_NULL_CLOSURE(this) && u.i.skipmin != 0; } bool needsWrapper() const { return FUN_NULL_CLOSURE(this) && u.i.skipmin != 0; }
bool isInterpreted() const { return FUN_INTERPRETED(this); }
bool isFastNative() const { return flags & JSFUN_FAST_NATIVE; }
bool isHeavyweight() const { return JSFUN_HEAVYWEIGHT_TEST(flags); }
bool minArgs() const { return FUN_MINARGS(this); }
uintN countVars() const { uintN countVars() const {
JS_ASSERT(FUN_INTERPRETED(this)); JS_ASSERT(FUN_INTERPRETED(this));
@@ -443,8 +447,12 @@ const uint32 ARGS_FIXED_RESERVED_SLOTS = JSSLOT_ARGS_START - JSSLOT_ARGS_LENGTH;
* arguments that can be supplied via the second (so-called |argArray|) param * arguments that can be supplied via the second (so-called |argArray|) param
* to Function.prototype.apply. This value also bounds the number of elements * to Function.prototype.apply. This value also bounds the number of elements
* parsed in an array initialiser. * parsed in an array initialiser.
*
* The thread's stack is the limiting factor for this number. It is currently
* 2MB, which fits a little less than 2^19 arguments (once the stack frame,
* callstack, etc are included). Pick a max args length that is a little less.
*/ */
const uint32 JS_ARGS_LENGTH_MAX = JS_BIT(24) - 1; const uint32 JS_ARGS_LENGTH_MAX = JS_BIT(19) - 1024;
/* /*
* JSSLOT_ARGS_LENGTH stores ((argc << 1) | overwritten_flag) as int jsval. * JSSLOT_ARGS_LENGTH stores ((argc << 1) | overwritten_flag) as int jsval.

View File

@@ -136,8 +136,6 @@ JS_STATIC_ASSERT(JSVAL_NULL == 0);
JS_STATIC_ASSERT(FINALIZE_EXTERNAL_STRING_LAST - FINALIZE_EXTERNAL_STRING0 == JS_STATIC_ASSERT(FINALIZE_EXTERNAL_STRING_LAST - FINALIZE_EXTERNAL_STRING0 ==
JS_EXTERNAL_STRING_LIMIT - 1); JS_EXTERNAL_STRING_LIMIT - 1);
JS_STATIC_ASSERT(sizeof(JSStackHeader) >= 2 * sizeof(jsval));
/* /*
* A GC arena contains GC_ARENA_SIZE bytes aligned on GC_ARENA_SIZE boundary. * A GC arena contains GC_ARENA_SIZE bytes aligned on GC_ARENA_SIZE boundary.
* The arena holds thing of the same size, a JSGCArenaInfo descriptor and a * The arena holds thing of the same size, a JSGCArenaInfo descriptor and a
@@ -1273,7 +1271,6 @@ js_named_root_dumper(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number,
return JS_DHASH_NEXT; return JS_DHASH_NEXT;
} }
JS_BEGIN_EXTERN_C
void void
js_DumpNamedRoots(JSRuntime *rt, js_DumpNamedRoots(JSRuntime *rt,
void (*dump)(const char *name, void *rp, void *data), void (*dump)(const char *name, void *rp, void *data),
@@ -1285,7 +1282,6 @@ js_DumpNamedRoots(JSRuntime *rt,
args.data = data; args.data = data;
JS_DHashTableEnumerate(&rt->gcRootsHash, js_named_root_dumper, &args); JS_DHashTableEnumerate(&rt->gcRootsHash, js_named_root_dumper, &args);
} }
JS_END_EXTERN_C
#endif /* DEBUG */ #endif /* DEBUG */
@@ -2263,55 +2259,17 @@ TraceObjectVector(JSTracer *trc, JSObject **vec, uint32 len)
void void
js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp) js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp)
{ {
uintN nslots, minargs, skip;
if (fp->callobj) if (fp->callobj)
JS_CALL_OBJECT_TRACER(trc, fp->callobj, "call"); JS_CALL_OBJECT_TRACER(trc, fp->callobj, "call");
if (fp->argsobj) if (fp->argsobj)
JS_CALL_OBJECT_TRACER(trc, JSVAL_TO_OBJECT(fp->argsobj), "arguments"); JS_CALL_OBJECT_TRACER(trc, JSVAL_TO_OBJECT(fp->argsobj), "arguments");
if (fp->script) { if (fp->script)
js_TraceScript(trc, fp->script); js_TraceScript(trc, fp->script);
/* fp->slots is null for watch pseudo-frames, see js_watch_set. */
if (fp->slots) {
/*
* Don't mark what has not been pushed yet, or what has been
* popped already.
*/
if (fp->regs && fp->regs->sp) {
nslots = (uintN) (fp->regs->sp - fp->slots);
JS_ASSERT(nslots >= fp->script->nfixed);
} else {
nslots = fp->script->nfixed;
}
TraceValues(trc, nslots, fp->slots, "slot");
}
} else {
JS_ASSERT(!fp->slots);
JS_ASSERT(!fp->regs);
}
/* Allow for primitive this parameter due to JSFUN_THISP_* flags. */ /* Allow for primitive this parameter due to JSFUN_THISP_* flags. */
JS_CALL_VALUE_TRACER(trc, fp->thisv, "this"); JS_CALL_VALUE_TRACER(trc, fp->thisv, "this");
if (fp->argv) {
JS_CALL_VALUE_TRACER(trc, fp->calleeValue(), "callee");
nslots = fp->argc;
skip = 0;
if (fp->fun) {
minargs = FUN_MINARGS(fp->fun);
if (minargs > nslots)
nslots = minargs;
if (!FUN_INTERPRETED(fp->fun)) {
JS_ASSERT(!(fp->fun->flags & JSFUN_FAST_NATIVE));
nslots += fp->fun->u.n.extra;
}
if (fp->fun->flags & JSFRAME_ROOTED_ARGV)
skip = 2 + fp->argc;
}
TraceValues(trc, 2 + nslots - skip, fp->argv - 2 + skip, "operand");
}
JS_CALL_VALUE_TRACER(trc, fp->rval, "rval"); JS_CALL_VALUE_TRACER(trc, fp->rval, "rval");
if (fp->scopeChain) if (fp->scopeChain)
JS_CALL_OBJECT_TRACER(trc, fp->scopeChain, "scope chain"); JS_CALL_OBJECT_TRACER(trc, fp->scopeChain, "scope chain");
@@ -2352,48 +2310,10 @@ JSWeakRoots::mark(JSTracer *trc)
js_CallValueTracerIfGCThing(trc, lastInternalResult); js_CallValueTracerIfGCThing(trc, lastInternalResult);
} }
static void inline void
TraceFrameChain(JSTracer *trc, JSStackFrame *fp)
{
do {
js_TraceStackFrame(trc, fp);
} while ((fp = fp->down) != NULL);
}
JS_REQUIRES_STACK JS_FRIEND_API(void)
js_TraceContext(JSTracer *trc, JSContext *acx) js_TraceContext(JSTracer *trc, JSContext *acx)
{ {
JSStackHeader *sh; /* Stack frames and slots are traced by StackSpace::mark. */
/*
* Trace active and suspended callstacks.
*
* Since js_GetTopStackFrame needs to dereference cx->thread to check for
* JIT frames, we check for non-null thread here and avoid null checks
* there. See bug 471197.
*/
#ifdef JS_THREADSAFE
if (acx->thread)
#endif
{
/* If |cx->fp|, the active callstack has newest (top) frame |cx->fp|. */
JSStackFrame *fp = js_GetTopStackFrame(acx);
if (fp) {
JS_ASSERT(!acx->activeCallStack()->isSuspended());
TraceFrameChain(trc, fp);
if (JSObject *o = acx->activeCallStack()->getInitialVarObj())
JS_CALL_OBJECT_TRACER(trc, o, "variables");
}
/* Trace suspended frames. */
CallStack *cur = acx->currentCallStack;
CallStack *cs = fp ? cur->getPrevious() : cur;
for (; cs; cs = cs->getPrevious()) {
TraceFrameChain(trc, cs->getSuspendedFrame());
if (cs->getInitialVarObj())
JS_CALL_OBJECT_TRACER(trc, cs->getInitialVarObj(), "var env");
}
}
/* Mark other roots-by-definition in acx. */ /* Mark other roots-by-definition in acx. */
if (acx->globalObject && !JS_HAS_OPTION(acx, JSOPTION_UNROOTED_GLOBAL)) if (acx->globalObject && !JS_HAS_OPTION(acx, JSOPTION_UNROOTED_GLOBAL))
@@ -2406,12 +2326,6 @@ js_TraceContext(JSTracer *trc, JSContext *acx)
acx->exception = JSVAL_NULL; acx->exception = JSVAL_NULL;
} }
for (sh = acx->stackHeaders; sh; sh = sh->down) {
METER(trc->context->runtime->gcStats.stackseg++);
METER(trc->context->runtime->gcStats.segslots += sh->nslots);
TraceValues(trc, sh->nslots, JS_STACK_SEGMENT(sh), "stack");
}
for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down) for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down)
gcr->trace(trc); gcr->trace(trc);

View File

@@ -50,8 +50,6 @@
#include "jstask.h" #include "jstask.h"
#include "jsversion.h" #include "jsversion.h"
JS_BEGIN_EXTERN_C
#define JSTRACE_XML 3 #define JSTRACE_XML 3
/* /*
@@ -453,13 +451,23 @@ js_DumpGCStats(JSRuntime *rt, FILE *fp);
extern void extern void
js_MarkTraps(JSTracer *trc); js_MarkTraps(JSTracer *trc);
JS_END_EXTERN_C
namespace js { namespace js {
void void
TraceObjectVector(JSTracer *trc, JSObject **vec, uint32 len); TraceObjectVector(JSTracer *trc, JSObject **vec, uint32 len);
inline void
TraceValues(JSTracer *trc, jsval *beg, jsval *end, const char *name)
{
for (jsval *vp = beg; vp < end; ++vp) {
jsval v = *vp;
if (JSVAL_IS_TRACEABLE(v)) {
JS_SET_TRACING_INDEX(trc, name, vp - beg);
js_CallGCMarker(trc, JSVAL_TO_TRACEABLE(v), JSVAL_TRACE_KIND(v));
}
}
}
inline void inline void
TraceValues(JSTracer *trc, size_t len, jsval *vec, const char *name) TraceValues(JSTracer *trc, size_t len, jsval *vec, const char *name)
{ {

File diff suppressed because it is too large Load Diff

View File

@@ -56,6 +56,21 @@ typedef struct JSFrameRegs {
jsval *sp; /* stack pointer */ jsval *sp; /* stack pointer */
} JSFrameRegs; } JSFrameRegs;
/* JS stack frame flags. */
#define JSFRAME_CONSTRUCTING 0x01 /* frame is for a constructor invocation */
#define JSFRAME_COMPUTED_THIS 0x02 /* frame.thisv was computed already and
JSVAL_IS_OBJECT(thisv) */
#define JSFRAME_ASSIGNING 0x04 /* a complex (not simplex JOF_ASSIGNING) op
is currently assigning to a property */
#define JSFRAME_DEBUGGER 0x08 /* frame for JS_EvaluateInStackFrame */
#define JSFRAME_EVAL 0x10 /* frame for obj_eval */
#define JSFRAME_FLOATING_GENERATOR 0x20 /* frame copy stored in a generator obj */
#define JSFRAME_YIELDING 0x40 /* js_Interpret dispatched JSOP_YIELD */
#define JSFRAME_ITERATOR 0x80 /* trying to get an iterator for for-in */
#define JSFRAME_GENERATOR 0x200 /* frame belongs to generator-iterator */
#define JSFRAME_OVERRIDE_ARGS 0x400 /* overridden arguments local variable */
#define JSFRAME_SPECIAL (JSFRAME_DEBUGGER | JSFRAME_EVAL)
/* /*
* JS stack frame, may be allocated on the C stack by native callers. Always * JS stack frame, may be allocated on the C stack by native callers. Always
@@ -67,21 +82,24 @@ typedef struct JSFrameRegs {
* sharp* and xml* members should be moved onto the stack as local variables * sharp* and xml* members should be moved onto the stack as local variables
* with well-known slots, if possible. * with well-known slots, if possible.
*/ */
struct JSStackFrame { struct JSStackFrame
JSFrameRegs *regs; {
jsbytecode *imacpc; /* null or interpreter macro call pc */ JSFrameRegs *regs;
jsval *slots; /* variables, locals and operand stack */ jsbytecode *imacpc; /* null or interpreter macro call pc */
JSObject *callobj; /* lazily created Call object */ JSObject *callobj; /* lazily created Call object */
jsval argsobj; /* lazily created arguments object, must be jsval argsobj; /* lazily created arguments object, must be
JSVAL_OBJECT */ JSVAL_OBJECT */
JSScript *script; /* script being interpreted */ JSScript *script; /* script being interpreted */
JSFunction *fun; /* function being called or null */ JSFunction *fun; /* function being called or null */
jsval thisv; /* "this" pointer if in method */ jsval thisv; /* "this" pointer if in method */
uintN argc; /* actual argument count */ uintN argc; /* actual argument count */
jsval *argv; /* base of argument stack slots */ jsval *argv; /* base of argument stack slots */
jsval rval; /* function return value */ jsval rval; /* function return value */
JSStackFrame *down; /* previous frame */ void *annotation; /* used by Java security */
void *annotation; /* used by Java security */
/* Maintained by StackSpace operations */
JSStackFrame *down; /* previous frame, part of
stack layout invariant */
/* /*
* We can't determine in advance which local variables can live on * We can't determine in advance which local variables can live on
@@ -129,6 +147,11 @@ struct JSStackFrame {
JSStackFrame *displaySave; /* previous value of display entry for JSStackFrame *displaySave; /* previous value of display entry for
script->staticLevel */ script->staticLevel */
/* Members only needed for inline calls. */
JSFrameRegs callerRegs; /* caller's regs for inline call */
void *hookData; /* debugger call hook data */
JSVersion callerVersion; /* dynamic version of calling script */
inline void assertValidStackDepth(uintN depth); inline void assertValidStackDepth(uintN depth);
void putActivationObjects(JSContext *cx) { void putActivationObjects(JSContext *cx) {
@@ -144,6 +167,14 @@ struct JSStackFrame {
} }
} }
jsval *argEnd() const {
return (jsval *)this;
}
jsval *slots() const {
return (jsval *)(this + 1);
}
jsval calleeValue() { jsval calleeValue() {
JS_ASSERT(argv); JS_ASSERT(argv);
return argv[-2]; return argv[-2];
@@ -163,14 +194,27 @@ struct JSStackFrame {
* VariableEnvironment (ES5 10.3). The given CallStack must contain this * VariableEnvironment (ES5 10.3). The given CallStack must contain this
* stack frame. * stack frame.
*/ */
JSObject *varobj(js::CallStack *cs); JSObject *varobj(js::CallStack *cs) const;
/* Short for: varobj(cx->activeCallStack()). */ /* Short for: varobj(cx->activeCallStack()). */
JSObject *varobj(JSContext *cx); JSObject *varobj(JSContext *cx) const;
inline JSObject *getThisObject(JSContext *cx); inline JSObject *getThisObject(JSContext *cx);
bool isGenerator() const { return flags & JSFRAME_GENERATOR; }
bool isFloatingGenerator() const {
JS_ASSERT_IF(flags & JSFRAME_FLOATING_GENERATOR, isGenerator());
return flags & JSFRAME_FLOATING_GENERATOR;
}
}; };
namespace js {
JS_STATIC_ASSERT(sizeof(JSStackFrame) % sizeof(jsval) == 0);
static const size_t ValuesPerStackFrame = sizeof(JSStackFrame) / sizeof(jsval);
}
#ifdef __cplusplus #ifdef __cplusplus
static JS_INLINE uintN static JS_INLINE uintN
FramePCOffset(JSStackFrame* fp) FramePCOffset(JSStackFrame* fp)
@@ -182,7 +226,7 @@ FramePCOffset(JSStackFrame* fp)
static JS_INLINE jsval * static JS_INLINE jsval *
StackBase(JSStackFrame *fp) StackBase(JSStackFrame *fp)
{ {
return fp->slots + fp->script->nfixed; return fp->slots() + fp->script->nfixed;
} }
#ifdef DEBUG #ifdef DEBUG
@@ -204,39 +248,6 @@ GlobalVarCount(JSStackFrame *fp)
return fp->script->nfixed; return fp->script->nfixed;
} }
typedef struct JSInlineFrame {
JSStackFrame frame; /* base struct */
JSFrameRegs callerRegs; /* parent's frame registers */
void *mark; /* mark before inline frame */
void *hookData; /* debugger call hook data */
JSVersion callerVersion; /* dynamic version of calling script */
} JSInlineFrame;
/* JS stack frame flags. */
#define JSFRAME_CONSTRUCTING 0x01 /* frame is for a constructor invocation */
#define JSFRAME_COMPUTED_THIS 0x02 /* frame.thisv was computed already and
JSVAL_IS_OBJECT(thisv) */
#define JSFRAME_ASSIGNING 0x04 /* a complex (not simplex JOF_ASSIGNING) op
is currently assigning to a property */
#define JSFRAME_DEBUGGER 0x08 /* frame for JS_EvaluateInStackFrame */
#define JSFRAME_EVAL 0x10 /* frame for obj_eval */
#define JSFRAME_ROOTED_ARGV 0x20 /* frame.argv is rooted by the caller */
#define JSFRAME_YIELDING 0x40 /* js_Interpret dispatched JSOP_YIELD */
#define JSFRAME_ITERATOR 0x80 /* trying to get an iterator for for-in */
#define JSFRAME_GENERATOR 0x200 /* frame belongs to generator-iterator */
#define JSFRAME_OVERRIDE_ARGS 0x400 /* overridden arguments local variable */
#define JSFRAME_SPECIAL (JSFRAME_DEBUGGER | JSFRAME_EVAL)
/*
* Interpreter stack arena-pool alloc and free functions.
*/
extern JS_REQUIRES_STACK JS_FRIEND_API(jsval *)
js_AllocStack(JSContext *cx, uintN nslots, void **markp);
extern JS_REQUIRES_STACK JS_FRIEND_API(void)
js_FreeStack(JSContext *cx, void *mark);
/* /*
* Refresh and return fp->scopeChain. It may be stale if block scopes are * Refresh and return fp->scopeChain. It may be stale if block scopes are
* active but not yet reflected by objects in the scope chain. If a block * active but not yet reflected by objects in the scope chain. If a block
@@ -277,16 +288,15 @@ extern const uint16 js_PrimitiveTestFlags[];
js_PrimitiveTestFlags[JSVAL_TAG(thisv) - 1])) js_PrimitiveTestFlags[JSVAL_TAG(thisv) - 1]))
/* /*
* NB: js_Invoke requires that cx is currently running JS (i.e., that cx->fp * The js::InvokeArgumentsGuard passed to js_Invoke must come from an
* is non-null), and that vp points to the callee, |this| parameter, and * immediately-enclosing successful call to js::StackSpace::pushInvokeArgs,
* actual arguments of the call. [vp .. vp + 2 + argc) must belong to the last * i.e., there must have been no un-popped pushes to cx->stack(). Furthermore,
* JS stack segment that js_AllocStack allocated. The function may use the * |args.getvp()[0]| should be the callee, |args.getvp()[1]| should be |this|,
* space available after vp + 2 + argc in the stack segment for temporaries, * and the range [args.getvp() + 2, args.getvp() + 2 + args.getArgc()) should
* so the caller should not use that space for values that must be preserved * be initialized actual arguments.
* across the call.
*/ */
extern JS_REQUIRES_STACK JS_FRIEND_API(JSBool) extern JS_REQUIRES_STACK JS_FRIEND_API(JSBool)
js_Invoke(JSContext *cx, uintN argc, jsval *vp, uintN flags); js_Invoke(JSContext *cx, const js::InvokeArgsGuard &args, uintN flags);
/* /*
* Consolidated js_Invoke flags simply rename certain JSFRAME_* flags, so that * Consolidated js_Invoke flags simply rename certain JSFRAME_* flags, so that
@@ -332,7 +342,8 @@ js_Execute(JSContext *cx, JSObject *chain, JSScript *script,
JSStackFrame *down, uintN flags, jsval *result); JSStackFrame *down, uintN flags, jsval *result);
extern JS_REQUIRES_STACK JSBool extern JS_REQUIRES_STACK JSBool
js_InvokeConstructor(JSContext *cx, uintN argc, JSBool clampReturn, jsval *vp); js_InvokeConstructor(JSContext *cx, const js::InvokeArgsGuard &args,
JSBool clampReturn);
extern JS_REQUIRES_STACK JSBool extern JS_REQUIRES_STACK JSBool
js_Interpret(JSContext *cx); js_Interpret(JSContext *cx);
@@ -386,12 +397,6 @@ js_GetUpvar(JSContext *cx, uintN level, uintN cookie);
#else #else
# define JS_STATIC_INTERPRET # define JS_STATIC_INTERPRET
extern JS_REQUIRES_STACK jsval *
js_AllocRawStack(JSContext *cx, uintN nslots, void **markp);
extern JS_REQUIRES_STACK void
js_FreeRawStack(JSContext *cx, void *mark);
/* /*
* ECMA requires "the global object", but in embeddings such as the browser, * ECMA requires "the global object", but in embeddings such as the browser,
* which have multiple top-level objects (windows, frames, etc. in the DOM), * which have multiple top-level objects (windows, frames, etc. in the DOM),

View File

@@ -682,19 +682,17 @@ generator_trace(JSTracer *trc, JSObject *obj)
return; return;
/* /*
* js_TraceStackFrame does not recursively trace the down-linked frame * Do not mark if the generator is running; the contents may be trash and
* chain, so we insist that gen->frame has no parent to trace when the * will be replaced when the generator stops.
* generator is not running.
*/ */
JS_ASSERT_IF(gen->state != JSGEN_RUNNING && gen->state != JSGEN_CLOSING, if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING)
!gen->frame.down); return;
/* JSStackFrame *fp = gen->getFloatingFrame();
* FIXME be 390950. Generator's frame is a part of the JS stack when the JS_ASSERT(gen->getLiveFrame() == fp);
* generator is running or closing. Thus tracing the frame in this case TraceValues(trc, gen->floatingStack, fp->argEnd(), "generator slots");
* here duplicates the work done in js_TraceContext. js_TraceStackFrame(trc, fp);
*/ TraceValues(trc, fp->slots(), gen->savedRegs.sp, "generator slots");
js_TraceStackFrame(trc, &gen->frame);
} }
JS_FRIEND_DATA(JSClass) js_GeneratorClass = { JS_FRIEND_DATA(JSClass) js_GeneratorClass = {
@@ -718,87 +716,83 @@ JS_FRIEND_DATA(JSClass) js_GeneratorClass = {
JS_REQUIRES_STACK JSObject * JS_REQUIRES_STACK JSObject *
js_NewGenerator(JSContext *cx) js_NewGenerator(JSContext *cx)
{ {
JSObject *obj; JSObject *obj = js_NewObject(cx, &js_GeneratorClass, NULL, NULL);
uintN argc, nargs, nslots;
JSGenerator *gen;
jsval *slots;
obj = js_NewObject(cx, &js_GeneratorClass, NULL, NULL);
if (!obj) if (!obj)
return NULL; return NULL;
/* Load and compute stack slot counts. */ /* Load and compute stack slot counts. */
JSStackFrame *fp = cx->fp; JSStackFrame *fp = cx->fp;
argc = fp->argc; uintN argc = fp->argc;
nargs = JS_MAX(argc, fp->fun->nargs); uintN nargs = JS_MAX(argc, fp->fun->nargs);
nslots = 2 + nargs + fp->script->nslots; uintN vplen = 2 + nargs;
/* Allocate obj's private data struct. */ /* Compute JSGenerator size. */
gen = (JSGenerator *) uintN nbytes = sizeof(JSGenerator) +
cx->malloc(sizeof(JSGenerator) + (nslots - 1) * sizeof(jsval)); (-1 + /* one jsval included in JSGenerator */
vplen +
ValuesPerStackFrame +
fp->script->nslots) * sizeof(jsval);
JSGenerator *gen = (JSGenerator *) cx->malloc(nbytes);
if (!gen) if (!gen)
return NULL; return NULL;
gen->obj = obj; /* Cut up floatingStack space. */
jsval *vp = gen->floatingStack;
JSStackFrame *newfp = reinterpret_cast<JSStackFrame *>(vp + vplen);
jsval *slots = newfp->slots();
/* Steal away objects reflecting fp and point them at gen->frame. */ /* Initialize JSGenerator. */
gen->frame.callobj = fp->callobj; gen->obj = obj;
if (fp->callobj) { gen->state = JSGEN_NEWBORN;
fp->callobj->setPrivate(&gen->frame); gen->savedRegs.pc = fp->regs->pc;
JS_ASSERT(fp->regs->sp == fp->slots() + fp->script->nfixed);
gen->savedRegs.sp = slots + fp->script->nfixed;
gen->vplen = vplen;
gen->liveFrame = newfp;
/* Copy generator's stack frame copy in from |cx->fp|. */
newfp->regs = &gen->savedRegs;
newfp->imacpc = NULL;
newfp->callobj = fp->callobj;
if (fp->callobj) { /* Steal call object. */
fp->callobj->setPrivate(newfp);
fp->callobj = NULL; fp->callobj = NULL;
} }
gen->frame.argsobj = fp->argsobj; newfp->argsobj = fp->argsobj;
if (fp->argsobj) { if (fp->argsobj) { /* Steal args object. */
JSVAL_TO_OBJECT(fp->argsobj)->setPrivate(&gen->frame); JSVAL_TO_OBJECT(fp->argsobj)->setPrivate(newfp);
fp->argsobj = NULL; fp->argsobj = NULL;
} }
newfp->script = fp->script;
/* These two references can be shared with fp until it goes away. */ newfp->fun = fp->fun;
gen->frame.thisv = fp->thisv; newfp->thisv = fp->thisv;
newfp->argc = fp->argc;
/* Copy call-invariant script and function references. */ newfp->argv = vp + 2;
gen->frame.script = fp->script; newfp->rval = fp->rval;
gen->frame.fun = fp->fun; newfp->annotation = NULL;
newfp->scopeChain = fp->scopeChain;
/* Use slots to carve space out of gen->slots. */
slots = gen->slots;
gen->arena.next = NULL;
gen->arena.base = (jsuword) slots;
gen->arena.limit = gen->arena.avail = (jsuword) (slots + nslots);
/* Copy rval, argv and vars. */
gen->frame.rval = fp->rval;
memcpy(slots, fp->argv - 2, (2 + nargs) * sizeof(jsval));
gen->frame.argc = fp->argc;
gen->frame.argv = slots + 2;
slots += 2 + nargs;
memcpy(slots, fp->slots, fp->script->nfixed * sizeof(jsval));
/* Initialize or copy virtual machine state. */
gen->frame.down = NULL;
gen->frame.annotation = NULL;
gen->frame.scopeChain = fp->scopeChain;
gen->frame.imacpc = NULL;
gen->frame.slots = slots;
JS_ASSERT(StackBase(fp) == fp->regs->sp);
gen->savedRegs.sp = slots + fp->script->nfixed;
gen->savedRegs.pc = fp->regs->pc;
gen->frame.regs = &gen->savedRegs;
gen->frame.flags = (fp->flags & ~JSFRAME_ROOTED_ARGV) | JSFRAME_GENERATOR;
/* JSOP_GENERATOR appears in the prologue, outside all blocks. */
JS_ASSERT(!fp->blockChain); JS_ASSERT(!fp->blockChain);
gen->frame.blockChain = NULL; newfp->blockChain = NULL;
newfp->flags = fp->flags | JSFRAME_GENERATOR | JSFRAME_FLOATING_GENERATOR;
/* Note that gen is newborn. */ /* Copy in arguments and slots. */
gen->state = JSGEN_NEWBORN; memcpy(vp, fp->argv - 2, vplen * sizeof(jsval));
memcpy(slots, fp->slots(), fp->script->nfixed * sizeof(jsval));
obj->setPrivate(gen); obj->setPrivate(gen);
return obj; return obj;
} }
JSGenerator *
js_FloatingFrameToGenerator(JSStackFrame *fp)
{
JS_ASSERT(fp->isGenerator() && fp->isFloatingGenerator());
char *floatingStackp = (char *)(fp->argv - 2);
char *p = floatingStackp - offsetof(JSGenerator, floatingStack);
return reinterpret_cast<JSGenerator *>(p);
}
typedef enum JSGeneratorOp { typedef enum JSGeneratorOp {
JSGENOP_NEXT, JSGENOP_NEXT,
JSGENOP_SEND, JSGENOP_SEND,
@@ -814,17 +808,17 @@ static JS_REQUIRES_STACK JSBool
SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj, SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
JSGenerator *gen, jsval arg) JSGenerator *gen, jsval arg)
{ {
JSStackFrame *fp;
JSArena *arena;
JSBool ok;
if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING) { if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING) {
js_ReportValueError(cx, JSMSG_NESTING_GENERATOR, js_ReportValueError(cx, JSMSG_NESTING_GENERATOR,
JSDVG_SEARCH_STACK, OBJECT_TO_JSVAL(obj), JSDVG_SEARCH_STACK, OBJECT_TO_JSVAL(obj),
JS_GetFunctionId(gen->frame.fun)); JS_GetFunctionId(gen->getFloatingFrame()->fun));
return JS_FALSE; return JS_FALSE;
} }
/* Check for OOM errors here, where we can fail easily. */
if (!cx->ensureGeneratorStackSpace())
return JS_FALSE;
JS_ASSERT(gen->state == JSGEN_NEWBORN || gen->state == JSGEN_OPEN); JS_ASSERT(gen->state == JSGEN_NEWBORN || gen->state == JSGEN_OPEN);
switch (op) { switch (op) {
case JSGENOP_NEXT: case JSGENOP_NEXT:
@@ -851,40 +845,94 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
break; break;
} }
/* Extend the current stack pool with gen->arena. */ JSStackFrame *genfp = gen->getFloatingFrame();
arena = cx->stackPool.current; JSBool ok;
JS_ASSERT(!arena->next); {
JS_ASSERT(!gen->arena.next); jsval *genVp = gen->floatingStack;
JS_ASSERT(cx->stackPool.current != &gen->arena); uintN vplen = gen->vplen;
cx->stackPool.current = arena->next = &gen->arena; uintN nslots = genfp->script->nslots;
/* Push gen->frame around the interpreter activation. */ /*
fp = js_GetTopStackFrame(cx); * Get a pointer to new frame/slots. This memory is not "claimed", so
cx->fp = &gen->frame; * the code before pushExecuteFrame must not reenter the interpreter.
gen->frame.down = fp; */
ok = js_Interpret(cx); ExecuteFrameGuard frame;
cx->fp = fp; if (!cx->stack().getExecuteFrame(cx, cx->fp, vplen, nslots, frame)) {
gen->frame.down = NULL; gen->state = JSGEN_CLOSED;
return JS_FALSE;
}
/* Retract the stack pool and sanitize gen->arena. */ jsval *vp = frame.getvp();
JS_ASSERT(!gen->arena.next); JSStackFrame *fp = frame.getFrame();
JS_ASSERT(arena->next == &gen->arena);
JS_ASSERT(cx->stackPool.current == &gen->arena);
cx->stackPool.current = arena;
arena->next = NULL;
if (gen->frame.flags & JSFRAME_YIELDING) { /*
* Copy and rebase stack frame/args/slots. The "floating" flag must
* only be set on the generator's frame. See args_or_call_trace.
*/
uintN usedBefore = gen->savedRegs.sp - genVp;
memcpy(vp, genVp, usedBefore * sizeof(jsval));
fp->flags &= ~JSFRAME_FLOATING_GENERATOR;
fp->argv = vp + 2;
fp->regs = &gen->savedRegs;
gen->savedRegs.sp = fp->slots() + (gen->savedRegs.sp - genfp->slots());
JS_ASSERT(uintN(gen->savedRegs.sp - fp->slots()) <= fp->script->nslots);
#ifdef DEBUG
JSObject *callobjBefore = fp->callobj;
jsval argsobjBefore = fp->argsobj;
#endif
/*
* Repoint Call, Arguments, Block and With objects to the new live
* frame. Call and Arguments are done directly because we have
* pointers to them. Block and With objects are done indirectly through
* 'liveFrame'. See js_LiveFrameToFloating comment in jsiter.h.
*/
if (genfp->callobj)
fp->callobj->setPrivate(fp);
if (genfp->argsobj)
JSVAL_TO_OBJECT(fp->argsobj)->setPrivate(fp);
gen->liveFrame = fp;
(void)cx->enterGenerator(gen); /* OOM check above. */
/* Officially push |fp|. |frame|'s destructor pops. */
cx->stack().pushExecuteFrame(cx, frame, NULL);
ok = js_Interpret(cx);
/* Restore call/args/block objects. */
cx->leaveGenerator(gen);
gen->liveFrame = genfp;
if (fp->argsobj)
JSVAL_TO_OBJECT(fp->argsobj)->setPrivate(genfp);
if (fp->callobj)
fp->callobj->setPrivate(genfp);
JS_ASSERT_IF(argsobjBefore, argsobjBefore == fp->argsobj);
JS_ASSERT_IF(callobjBefore, callobjBefore == fp->callobj);
/* Copy and rebase stack frame/args/slots. Restore "floating" flag. */
JS_ASSERT(uintN(gen->savedRegs.sp - fp->slots()) <= fp->script->nslots);
uintN usedAfter = gen->savedRegs.sp - vp;
memcpy(genVp, vp, usedAfter * sizeof(jsval));
genfp->flags |= JSFRAME_FLOATING_GENERATOR;
genfp->argv = genVp + 2;
gen->savedRegs.sp = genfp->slots() + (gen->savedRegs.sp - fp->slots());
JS_ASSERT(uintN(gen->savedRegs.sp - genfp->slots()) <= genfp->script->nslots);
}
if (gen->getFloatingFrame()->flags & JSFRAME_YIELDING) {
/* Yield cannot fail, throw or be called on closing. */ /* Yield cannot fail, throw or be called on closing. */
JS_ASSERT(ok); JS_ASSERT(ok);
JS_ASSERT(!cx->throwing); JS_ASSERT(!cx->throwing);
JS_ASSERT(gen->state == JSGEN_RUNNING); JS_ASSERT(gen->state == JSGEN_RUNNING);
JS_ASSERT(op != JSGENOP_CLOSE); JS_ASSERT(op != JSGENOP_CLOSE);
gen->frame.flags &= ~JSFRAME_YIELDING; genfp->flags &= ~JSFRAME_YIELDING;
gen->state = JSGEN_OPEN; gen->state = JSGEN_OPEN;
return JS_TRUE; return JS_TRUE;
} }
gen->frame.rval = JSVAL_VOID; genfp->rval = JSVAL_VOID;
gen->state = JSGEN_CLOSED; gen->state = JSGEN_CLOSED;
if (ok) { if (ok) {
/* Returned, explicitly or by falling off the end. */ /* Returned, explicitly or by falling off the end. */
@@ -977,7 +1025,7 @@ generator_op(JSContext *cx, JSGeneratorOp op, jsval *vp, uintN argc)
: JSVAL_VOID; : JSVAL_VOID;
if (!SendToGenerator(cx, op, obj, gen, arg)) if (!SendToGenerator(cx, op, obj, gen, arg))
return JS_FALSE; return JS_FALSE;
*vp = gen->frame.rval; *vp = gen->getFloatingFrame()->rval;
return JS_TRUE; return JS_TRUE;
} }

View File

@@ -108,18 +108,57 @@ typedef enum JSGeneratorState {
struct JSGenerator { struct JSGenerator {
JSObject *obj; JSObject *obj;
JSGeneratorState state; JSGeneratorState state;
JSStackFrame frame;
JSFrameRegs savedRegs; JSFrameRegs savedRegs;
JSArena arena; uintN vplen;
jsval slots[1]; JSStackFrame *liveFrame;
}; jsval floatingStack[1];
#define FRAME_TO_GENERATOR(fp) \ JSStackFrame *getFloatingFrame() {
((JSGenerator *) ((uint8 *)(fp) - offsetof(JSGenerator, frame))) return reinterpret_cast<JSStackFrame *>(floatingStack + vplen);
}
JSStackFrame *getLiveFrame() {
JS_ASSERT((state == JSGEN_RUNNING || state == JSGEN_CLOSING) ==
(liveFrame != getFloatingFrame()));
return liveFrame;
}
};
extern JSObject * extern JSObject *
js_NewGenerator(JSContext *cx); js_NewGenerator(JSContext *cx);
/*
* Generator stack frames do not have stable pointers since they get copied to
* and from the generator object and the stack (see SendToGenerator). This is a
* problem for Block and With objects, which need to store a pointer to the
* enclosing stack frame. The solution is for Block and With objects to store
* a pointer to the "floating" stack frame stored in the generator object,
* since it is stable, and maintain, in the generator object, a pointer to the
* "live" stack frame (either a copy on the stack or the floating frame). Thus,
* Block and With objects must "normalize" to and from the floating/live frames
* in the case of generators using the following functions.
*/
inline JSStackFrame *
js_FloatingFrameIfGenerator(JSContext *cx, JSStackFrame *fp)
{
JS_ASSERT(cx->stack().contains(fp));
if (JS_UNLIKELY(fp->isGenerator()))
return cx->generatorFor(fp)->getFloatingFrame();
return fp;
}
/* Given a floating frame, given the JSGenerator containing it. */
extern JSGenerator *
js_FloatingFrameToGenerator(JSStackFrame *fp);
inline JSStackFrame *
js_LiveFrameIfGenerator(JSStackFrame *fp)
{
if (fp->flags & JSFRAME_GENERATOR)
return js_FloatingFrameToGenerator(fp)->getLiveFrame();
return fp;
}
#endif #endif
extern JS_FRIEND_API(JSClass) js_GeneratorClass; extern JS_FRIEND_API(JSClass) js_GeneratorClass;

View File

@@ -3323,7 +3323,7 @@ js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth)
obj = js_NewObject(cx, &js_WithClass, proto, parent); obj = js_NewObject(cx, &js_WithClass, proto, parent);
if (!obj) if (!obj)
return NULL; return NULL;
obj->setPrivate(cx->fp); obj->setPrivate(js_FloatingFrameIfGenerator(cx, cx->fp));
OBJ_SET_BLOCK_DEPTH(cx, obj, depth); OBJ_SET_BLOCK_DEPTH(cx, obj, depth);
return obj; return obj;
} }
@@ -3351,7 +3351,8 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp)
return NULL; return NULL;
/* The caller sets parent on its own. */ /* The caller sets parent on its own. */
clone->init(&js_BlockClass, proto, NULL, reinterpret_cast<jsval>(fp)); jsval priv = (jsval)js_FloatingFrameIfGenerator(cx, fp);
clone->init(&js_BlockClass, proto, NULL, priv);
clone->fslots[JSSLOT_BLOCK_DEPTH] = proto->fslots[JSSLOT_BLOCK_DEPTH]; clone->fslots[JSSLOT_BLOCK_DEPTH] = proto->fslots[JSSLOT_BLOCK_DEPTH];
JS_ASSERT(cx->runtime->emptyBlockScope->freeslot == JSSLOT_BLOCK_DEPTH + 1); JS_ASSERT(cx->runtime->emptyBlockScope->freeslot == JSSLOT_BLOCK_DEPTH + 1);
@@ -3374,7 +3375,7 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind)
fp = cx->fp; fp = cx->fp;
obj = fp->scopeChain; obj = fp->scopeChain;
JS_ASSERT(obj->getClass() == &js_BlockClass); JS_ASSERT(obj->getClass() == &js_BlockClass);
JS_ASSERT(obj->getPrivate() == cx->fp); JS_ASSERT(obj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp));
JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj)); JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj));
/* /*
@@ -3398,14 +3399,14 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind)
JS_ASSERT(count >= 1); JS_ASSERT(count >= 1);
depth += fp->script->nfixed; depth += fp->script->nfixed;
obj->fslots[JSSLOT_BLOCK_DEPTH + 1] = fp->slots[depth]; obj->fslots[JSSLOT_BLOCK_DEPTH + 1] = fp->slots()[depth];
if (normalUnwind && count > 1) { if (normalUnwind && count > 1) {
--count; --count;
JS_LOCK_OBJ(cx, obj); JS_LOCK_OBJ(cx, obj);
if (!AllocSlots(cx, obj, JS_INITIAL_NSLOTS + count)) if (!AllocSlots(cx, obj, JS_INITIAL_NSLOTS + count))
normalUnwind = JS_FALSE; normalUnwind = JS_FALSE;
else else
memcpy(obj->dslots, fp->slots + depth + 1, count * sizeof(jsval)); memcpy(obj->dslots, fp->slots() + depth + 1, count * sizeof(jsval));
JS_UNLOCK_OBJ(cx, obj); JS_UNLOCK_OBJ(cx, obj);
} }
@@ -3430,9 +3431,10 @@ block_getProperty(JSContext *cx, JSObject *obj, jsval id, jsval *vp)
JSStackFrame *fp = (JSStackFrame *) obj->getPrivate(); JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
if (fp) { if (fp) {
fp = js_LiveFrameIfGenerator(fp);
index += fp->script->nfixed + OBJ_BLOCK_DEPTH(cx, obj); index += fp->script->nfixed + OBJ_BLOCK_DEPTH(cx, obj);
JS_ASSERT(index < fp->script->nslots); JS_ASSERT(index < fp->script->nslots);
*vp = fp->slots[index]; *vp = fp->slots()[index];
return true; return true;
} }
@@ -3455,9 +3457,10 @@ block_setProperty(JSContext *cx, JSObject *obj, jsval id, jsval *vp)
JSStackFrame *fp = (JSStackFrame *) obj->getPrivate(); JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
if (fp) { if (fp) {
fp = js_LiveFrameIfGenerator(fp);
index += fp->script->nfixed + OBJ_BLOCK_DEPTH(cx, obj); index += fp->script->nfixed + OBJ_BLOCK_DEPTH(cx, obj);
JS_ASSERT(index < fp->script->nslots); JS_ASSERT(index < fp->script->nslots);
fp->slots[index] = *vp; fp->slots()[index] = *vp;
return true; return true;
} }
@@ -7050,11 +7053,11 @@ js_DumpStackFrame(JSStackFrame *fp)
fprintf(stderr, "pc = %p\n", pc); fprintf(stderr, "pc = %p\n", pc);
fprintf(stderr, " current op: %s\n", js_CodeName[*pc]); fprintf(stderr, " current op: %s\n", js_CodeName[*pc]);
} }
if (sp && fp->slots) { if (sp && fp->slots()) {
fprintf(stderr, " slots: %p\n", (void *) fp->slots); fprintf(stderr, " slots: %p\n", (void *) fp->slots());
fprintf(stderr, " sp: %p = slots + %u\n", (void *) sp, (unsigned) (sp - fp->slots)); fprintf(stderr, " sp: %p = slots + %u\n", (void *) sp, (unsigned) (sp - fp->slots()));
if (sp - fp->slots < 10000) { // sanity if (sp - fp->slots() < 10000) { // sanity
for (jsval *p = fp->slots; p < sp; p++) { for (jsval *p = fp->slots(); p < sp; p++) {
fprintf(stderr, " %p: ", (void *) p); fprintf(stderr, " %p: ", (void *) p);
dumpValue(*p); dumpValue(*p);
fputc('\n', stderr); fputc('\n', stderr);
@@ -7062,7 +7065,7 @@ js_DumpStackFrame(JSStackFrame *fp)
} }
} else { } else {
fprintf(stderr, " sp: %p\n", (void *) sp); fprintf(stderr, " sp: %p\n", (void *) sp);
fprintf(stderr, " slots: %p\n", (void *) fp->slots); fprintf(stderr, " slots: %p\n", (void *) fp->slots());
} }
fprintf(stderr, " argv: %p (argc: %u)\n", (void *) fp->argv, (unsigned) fp->argc); fprintf(stderr, " argv: %p (argc: %u)\n", (void *) fp->argv, (unsigned) fp->argc);
MaybeDumpObject("callobj", fp->callobj); MaybeDumpObject("callobj", fp->callobj);
@@ -7085,8 +7088,6 @@ js_DumpStackFrame(JSStackFrame *fp)
fprintf(stderr, " debugger"); fprintf(stderr, " debugger");
if (fp->flags & JSFRAME_EVAL) if (fp->flags & JSFRAME_EVAL)
fprintf(stderr, " eval"); fprintf(stderr, " eval");
if (fp->flags & JSFRAME_ROOTED_ARGV)
fprintf(stderr, " rooted_argv");
if (fp->flags & JSFRAME_YIELDING) if (fp->flags & JSFRAME_YIELDING)
fprintf(stderr, " yielding"); fprintf(stderr, " yielding");
if (fp->flags & JSFRAME_ITERATOR) if (fp->flags & JSFRAME_ITERATOR)

View File

@@ -148,7 +148,7 @@ BEGIN_CASE(JSOP_POPN)
clasp = obj->getClass(); clasp = obj->getClass();
if (clasp != &js_BlockClass && clasp != &js_WithClass) if (clasp != &js_BlockClass && clasp != &js_WithClass)
continue; continue;
if (obj->getPrivate() != fp) if (obj->getPrivate() != js_FloatingFrameIfGenerator(cx, fp))
break; break;
JS_ASSERT(StackBase(fp) + OBJ_BLOCK_DEPTH(cx, obj) JS_ASSERT(StackBase(fp) + OBJ_BLOCK_DEPTH(cx, obj)
+ ((clasp == &js_BlockClass) + ((clasp == &js_BlockClass)
@@ -208,7 +208,7 @@ BEGIN_CASE(JSOP_STOP)
JS_ASSERT(op == JSOP_STOP); JS_ASSERT(op == JSOP_STOP);
end_imacro: end_imacro:
JS_ASSERT((uintN)(regs.sp - fp->slots) <= script->nslots); JS_ASSERT((uintN)(regs.sp - fp->slots()) <= script->nslots);
regs.pc = fp->imacpc + js_CodeSpec[*fp->imacpc].length; regs.pc = fp->imacpc + js_CodeSpec[*fp->imacpc].length;
fp->imacpc = NULL; fp->imacpc = NULL;
atoms = script->atomMap.vector; atoms = script->atomMap.vector;
@@ -223,16 +223,15 @@ BEGIN_CASE(JSOP_STOP)
if (inlineCallCount) if (inlineCallCount)
inline_return: inline_return:
{ {
JSInlineFrame *ifp = (JSInlineFrame *) fp;
void *hookData = ifp->hookData;
JS_ASSERT(!fp->blockChain); JS_ASSERT(!fp->blockChain);
JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChain, 0)); JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChain, 0));
JS_ASSERT(fp->down->regs == &fp->callerRegs);
if (script->staticLevel < JS_DISPLAY_SIZE) if (JS_LIKELY(script->staticLevel < JS_DISPLAY_SIZE))
cx->display[script->staticLevel] = fp->displaySave; cx->display[script->staticLevel] = fp->displaySave;
if (hookData) { void *hookData = fp->hookData;
if (JS_UNLIKELY(hookData != NULL)) {
JSInterpreterHook hook; JSInterpreterHook hook;
JSBool status; JSBool status;
@@ -267,7 +266,7 @@ BEGIN_CASE(JSOP_STOP)
/* Restore context version only if callee hasn't set version. */ /* Restore context version only if callee hasn't set version. */
if (JS_LIKELY(cx->version == currentVersion)) { if (JS_LIKELY(cx->version == currentVersion)) {
currentVersion = ifp->callerVersion; currentVersion = fp->callerVersion;
if (currentVersion != cx->version) if (currentVersion != cx->version)
js_SetVersion(cx, currentVersion); js_SetVersion(cx, currentVersion);
} }
@@ -282,22 +281,20 @@ BEGIN_CASE(JSOP_STOP)
JS_RUNTIME_METER(cx->runtime, constructs); JS_RUNTIME_METER(cx->runtime, constructs);
} }
JSStackFrame *down = fp->down;
bool recursive = fp->script == down->script;
/* Restore caller's registers. */ /* Restore caller's registers. */
regs = ifp->callerRegs; regs = fp->callerRegs;
regs.sp -= 1 + (size_t) fp->argc;
/* Store the return value in the caller's operand frame. */
regs.sp -= 1 + (size_t) ifp->frame.argc;
regs.sp[-1] = fp->rval; regs.sp[-1] = fp->rval;
down->regs = &regs;
bool recursive = fp->script == fp->down->script; /* Pop |fp| from the context. */
cx->stack().popInlineFrame(cx, fp, down);
/* Restore cx->fp and release the inline frame's space. */ /* Sync interpreter registers. */
cx->fp = fp = fp->down; fp = cx->fp;
JS_ASSERT(fp->regs == &ifp->callerRegs);
fp->regs = &regs;
JS_ARENA_RELEASE(&cx->stackPool, ifp->mark);
/* Restore the calling script's interpreter registers. */
script = fp->script; script = fp->script;
atoms = FrameAtomBase(cx, fp); atoms = FrameAtomBase(cx, fp);
@@ -509,7 +506,7 @@ BEGIN_CASE(JSOP_FORLOCAL)
JS_ASSERT(regs.sp - 2 >= StackBase(fp)); JS_ASSERT(regs.sp - 2 >= StackBase(fp));
slot = GET_SLOTNO(regs.pc); slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < fp->script->nslots); JS_ASSERT(slot < fp->script->nslots);
fp->slots[slot] = regs.sp[-1]; fp->slots()[slot] = regs.sp[-1];
END_CASE(JSOP_FORLOCAL) END_CASE(JSOP_FORLOCAL)
BEGIN_CASE(JSOP_FORNAME) BEGIN_CASE(JSOP_FORNAME)
@@ -1331,9 +1328,9 @@ BEGIN_CASE(JSOP_LOCALINC)
do_local_incop: do_local_incop:
slot = GET_SLOTNO(regs.pc); slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < fp->script->nslots); JS_ASSERT(slot < fp->script->nslots);
vp = fp->slots + slot; vp = fp->slots() + slot;
METER_SLOT_OP(op, slot); METER_SLOT_OP(op, slot);
vp = fp->slots + slot; vp = fp->slots() + slot;
do_int_fast_incop: do_int_fast_incop:
rval = *vp; rval = *vp;
@@ -1379,7 +1376,7 @@ BEGIN_CASE(JSOP_GVARINC)
slot = GET_SLOTNO(regs.pc); slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < GlobalVarCount(fp)); JS_ASSERT(slot < GlobalVarCount(fp));
METER_SLOT_OP(op, slot); METER_SLOT_OP(op, slot);
lval = fp->slots[slot]; lval = fp->slots()[slot];
if (JSVAL_IS_NULL(lval)) { if (JSVAL_IS_NULL(lval)) {
op = op2; op = op2;
DO_OP(); DO_OP();
@@ -1440,7 +1437,7 @@ BEGIN_CASE(JSOP_GETLOCALPROP)
i = SLOTNO_LEN; i = SLOTNO_LEN;
slot = GET_SLOTNO(regs.pc); slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < script->nslots); JS_ASSERT(slot < script->nslots);
PUSH_OPND(fp->slots[slot]); PUSH_OPND(fp->slots()[slot]);
goto do_getprop_body; goto do_getprop_body;
BEGIN_CASE(JSOP_GETPROP) BEGIN_CASE(JSOP_GETPROP)
@@ -1627,7 +1624,7 @@ BEGIN_CASE(JSOP_CALLPROP)
END_CASE(JSOP_CALLPROP) END_CASE(JSOP_CALLPROP)
BEGIN_CASE(JSOP_UNBRAND) BEGIN_CASE(JSOP_UNBRAND)
JS_ASSERT(regs.sp - fp->slots >= 1); JS_ASSERT(regs.sp - fp->slots() >= 1);
lval = FETCH_OPND(-1); lval = FETCH_OPND(-1);
obj = JSVAL_TO_OBJECT(lval); obj = JSVAL_TO_OBJECT(lval);
if (!obj->unbrand(cx)) if (!obj->unbrand(cx))
@@ -1993,7 +1990,7 @@ BEGIN_CASE(JSOP_NEW)
} }
} }
if (!js_InvokeConstructor(cx, argc, JS_FALSE, vp)) if (!js_InvokeConstructor(cx, InvokeArgsGuard(vp, argc), JS_FALSE))
goto error; goto error;
regs.sp = vp + 1; regs.sp = vp + 1;
CHECK_INTERRUPT_HANDLER(); CHECK_INTERRUPT_HANDLER();
@@ -2018,148 +2015,100 @@ BEGIN_CASE(JSOP_APPLY)
if (FUN_INTERPRETED(fun)) if (FUN_INTERPRETED(fun))
inline_call: inline_call:
{ {
uintN nframeslots, nvars, missing; JSScript *newscript = fun->u.i.script;
JSArena *a; if (JS_UNLIKELY(newscript->isEmpty())) {
jsuword nbytes;
void *newmark;
jsval *newsp;
JSInlineFrame *newifp;
JSInterpreterHook hook;
script = fun->u.i.script;
if (script->isEmpty()) {
script = fp->script;
*vp = JSVAL_VOID; *vp = JSVAL_VOID;
regs.sp = vp + 1; regs.sp = vp + 1;
goto end_call; goto end_call;
} }
/* Restrict recursion of lightweight functions. */ /* Restrict recursion of lightweight functions. */
if (inlineCallCount >= JS_MAX_INLINE_CALL_COUNT) { if (JS_UNLIKELY(inlineCallCount >= JS_MAX_INLINE_CALL_COUNT)) {
js_ReportOverRecursed(cx); js_ReportOverRecursed(cx);
script = fp->script;
goto error; goto error;
} }
/* Compute the total number of stack slots needed by fun. */ /*
nframeslots = JS_HOWMANY(sizeof(JSInlineFrame), sizeof(jsval)); * Get pointer to new frame/slots, without changing global state.
atoms = script->atomMap.vector; * Initialize missing args if there are any.
nbytes = (nframeslots + script->nslots) * sizeof(jsval); */
StackSpace &stack = cx->stack();
/* Allocate missing expected args adjacent to actuals. */ uintN nslots = newscript->nslots;
a = cx->stackPool.current; uintN funargs = fun->nargs;
newmark = (void *) a->avail; JSStackFrame *newfp;
if (fun->nargs <= argc) { if (argc < funargs) {
missing = 0; uintN missing = funargs - argc;
newfp = stack.getInlineFrame(cx, regs.sp, missing, nslots);
if (!newfp)
goto error;
for (jsval *v = regs.sp, *end = v + missing; v != end; ++v)
*v = JSVAL_VOID;
} else { } else {
newsp = vp + 2 + fun->nargs; newfp = stack.getInlineFrame(cx, regs.sp, 0, nslots);
JS_ASSERT(newsp > regs.sp); if (!newfp)
if ((jsuword) newsp <= a->limit) { goto error;
if ((jsuword) newsp > a->avail)
a->avail = (jsuword) newsp;
jsval *argsp = newsp;
do {
*--argsp = JSVAL_VOID;
} while (argsp != regs.sp);
missing = 0;
} else {
missing = fun->nargs - argc;
nbytes += (2 + fun->nargs) * sizeof(jsval);
}
} }
/* Allocate the inline frame with its slots and operands. */ /* Initialize stack frame. */
if (a->avail + nbytes <= a->limit) { newfp->callobj = NULL;
newsp = (jsval *) a->avail; newfp->argsobj = NULL;
a->avail += nbytes; newfp->script = newscript;
JS_ASSERT(missing == 0); newfp->fun = fun;
} else { newfp->argc = argc;
JS_ARENA_ALLOCATE_CAST(newsp, jsval *, &cx->stackPool, newfp->argv = vp + 2;
nbytes); newfp->rval = JSVAL_VOID;
if (!newsp) { newfp->annotation = NULL;
js_ReportOutOfScriptQuota(cx); newfp->scopeChain = parent = obj->getParent();
goto bad_inline_call; newfp->flags = flags;
} newfp->blockChain = NULL;
if (JS_LIKELY(newscript->staticLevel < JS_DISPLAY_SIZE)) {
/* JSStackFrame **disp = &cx->display[newscript->staticLevel];
* Move args if the missing ones overflow arena a, then push newfp->displaySave = *disp;
* undefined for the missing args. *disp = newfp;
*/
if (missing) {
memcpy(newsp, vp, (2 + argc) * sizeof(jsval));
vp = newsp;
newsp = vp + 2 + argc;
do {
*newsp++ = JSVAL_VOID;
} while (--missing != 0);
}
} }
/* Claim space for the stack frame and initialize it. */
newifp = (JSInlineFrame *) newsp;
newsp += nframeslots;
newifp->frame.callobj = NULL;
newifp->frame.argsobj = NULL;
newifp->frame.script = script;
newifp->frame.fun = fun;
newifp->frame.argc = argc;
newifp->frame.argv = vp + 2;
newifp->frame.rval = JSVAL_VOID;
newifp->frame.down = fp;
newifp->frame.annotation = NULL;
newifp->frame.scopeChain = parent = obj->getParent();
newifp->frame.flags = flags;
newifp->frame.blockChain = NULL;
if (script->staticLevel < JS_DISPLAY_SIZE) {
JSStackFrame **disp = &cx->display[script->staticLevel];
newifp->frame.displaySave = *disp;
*disp = &newifp->frame;
}
newifp->mark = newmark;
/* Compute the 'this' parameter now that argv is set. */
JS_ASSERT(!JSFUN_BOUND_METHOD_TEST(fun->flags)); JS_ASSERT(!JSFUN_BOUND_METHOD_TEST(fun->flags));
newifp->frame.thisv = vp[1]; newfp->thisv = vp[1];
newfp->regs = NULL;
newifp->frame.regs = NULL; newfp->imacpc = NULL;
newifp->frame.imacpc = NULL;
newifp->frame.slots = newsp;
/* Push void to initialize local variables. */ /* Push void to initialize local variables. */
nvars = fun->u.i.nvars; jsval *newsp = StackBase(newfp);
while (nvars--) for (jsval *v = newfp->slots(); v != newsp; ++v)
*newsp++ = JSVAL_VOID; *v = JSVAL_VOID;
/* Scope with a call object parented by callee's parent. */ /* Scope with a call object parented by callee's parent. */
if (JSFUN_HEAVYWEIGHT_TEST(fun->flags) && if (fun->isHeavyweight() && !js_GetCallObject(cx, newfp))
!js_GetCallObject(cx, &newifp->frame)) { goto error;
goto bad_inline_call;
}
/* Switch version if currentVersion wasn't overridden. */ /* Switch version if currentVersion wasn't overridden. */
newifp->callerVersion = (JSVersion) cx->version; newfp->callerVersion = (JSVersion) cx->version;
if (JS_LIKELY(cx->version == currentVersion)) { if (JS_LIKELY(cx->version == currentVersion)) {
currentVersion = (JSVersion) script->version; currentVersion = (JSVersion) newscript->version;
if (currentVersion != cx->version) if (JS_UNLIKELY(currentVersion != cx->version))
js_SetVersion(cx, currentVersion); js_SetVersion(cx, currentVersion);
} }
/* Push the frame and set interpreter registers. */ /* Push the frame and set interpreter registers. */
newifp->callerRegs = regs; newfp->callerRegs = regs;
fp->regs = &newifp->callerRegs; fp->regs = &newfp->callerRegs;
regs.sp = newsp; regs.sp = newsp;
regs.pc = script->code; regs.pc = newscript->code;
newifp->frame.regs = &regs; newfp->regs = &regs;
cx->fp = fp = &newifp->frame; stack.pushInlineFrame(cx, fp, newfp);
JS_ASSERT(newfp == cx->fp);
/* Import into locals. */
fp = newfp;
script = newscript;
atoms = script->atomMap.vector;
/* Call the debugger hook if present. */ /* Call the debugger hook if present. */
hook = cx->debugHooks->callHook; if (JSInterpreterHook hook = cx->debugHooks->callHook) {
if (hook) { fp->hookData = hook(cx, fp, JS_TRUE, 0,
newifp->hookData = hook(cx, &newifp->frame, JS_TRUE, 0, cx->debugHooks->callHookData);
cx->debugHooks->callHookData);
CHECK_INTERRUPT_HANDLER(); CHECK_INTERRUPT_HANDLER();
} else { } else {
newifp->hookData = NULL; fp->hookData = NULL;
} }
inlineCallCount++; inlineCallCount++;
@@ -2189,13 +2138,6 @@ BEGIN_CASE(JSOP_APPLY)
/* Load first op and dispatch it (safe since JSOP_STOP). */ /* Load first op and dispatch it (safe since JSOP_STOP). */
op = (JSOp) *regs.pc; op = (JSOp) *regs.pc;
DO_OP(); DO_OP();
bad_inline_call:
JS_ASSERT(fp->regs == &regs);
script = fp->script;
atoms = script->atomMap.vector;
js_FreeRawStack(cx, newmark);
goto error;
} }
if (fun->flags & JSFUN_FAST_NATIVE) { if (fun->flags & JSFUN_FAST_NATIVE) {
@@ -2248,7 +2190,7 @@ BEGIN_CASE(JSOP_APPLY)
} }
} }
ok = js_Invoke(cx, argc, vp, 0); ok = js_Invoke(cx, InvokeArgsGuard(vp, argc), 0);
regs.sp = vp + 1; regs.sp = vp + 1;
CHECK_INTERRUPT_HANDLER(); CHECK_INTERRUPT_HANDLER();
if (!ok) if (!ok)
@@ -2262,7 +2204,8 @@ END_CASE(JSOP_CALL)
BEGIN_CASE(JSOP_SETCALL) BEGIN_CASE(JSOP_SETCALL)
argc = GET_ARGC(regs.pc); argc = GET_ARGC(regs.pc);
vp = regs.sp - argc - 2; vp = regs.sp - argc - 2;
if (js_Invoke(cx, argc, vp, 0)) ok = js_Invoke(cx, InvokeArgsGuard(vp, argc), 0);
if (ok)
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_LEFTSIDE_OF_ASS); JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_LEFTSIDE_OF_ASS);
goto error; goto error;
END_CASE(JSOP_SETCALL) END_CASE(JSOP_SETCALL)
@@ -2629,20 +2572,20 @@ END_SET_CASE(JSOP_SETARG)
BEGIN_CASE(JSOP_GETLOCAL) BEGIN_CASE(JSOP_GETLOCAL)
slot = GET_SLOTNO(regs.pc); slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < script->nslots); JS_ASSERT(slot < script->nslots);
PUSH_OPND(fp->slots[slot]); PUSH_OPND(fp->slots()[slot]);
END_CASE(JSOP_GETLOCAL) END_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_CALLLOCAL) BEGIN_CASE(JSOP_CALLLOCAL)
slot = GET_SLOTNO(regs.pc); slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < script->nslots); JS_ASSERT(slot < script->nslots);
PUSH_OPND(fp->slots[slot]); PUSH_OPND(fp->slots()[slot]);
PUSH_OPND(JSVAL_NULL); PUSH_OPND(JSVAL_NULL);
END_CASE(JSOP_CALLLOCAL) END_CASE(JSOP_CALLLOCAL)
BEGIN_CASE(JSOP_SETLOCAL) BEGIN_CASE(JSOP_SETLOCAL)
slot = GET_SLOTNO(regs.pc); slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < script->nslots); JS_ASSERT(slot < script->nslots);
vp = &fp->slots[slot]; vp = &fp->slots()[slot];
*vp = FETCH_OPND(-1); *vp = FETCH_OPND(-1);
END_SET_CASE(JSOP_SETLOCAL) END_SET_CASE(JSOP_SETLOCAL)
@@ -2721,7 +2664,7 @@ BEGIN_CASE(JSOP_CALLGVAR)
slot = GET_SLOTNO(regs.pc); slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < GlobalVarCount(fp)); JS_ASSERT(slot < GlobalVarCount(fp));
METER_SLOT_OP(op, slot); METER_SLOT_OP(op, slot);
lval = fp->slots[slot]; lval = fp->slots()[slot];
if (JSVAL_IS_NULL(lval)) { if (JSVAL_IS_NULL(lval)) {
op = (op == JSOP_GETGVAR) ? JSOP_NAME : JSOP_CALLNAME; op = (op == JSOP_GETGVAR) ? JSOP_NAME : JSOP_CALLNAME;
DO_OP(); DO_OP();
@@ -2742,7 +2685,7 @@ BEGIN_CASE(JSOP_SETGVAR)
rval = FETCH_OPND(-1); rval = FETCH_OPND(-1);
JS_ASSERT(fp->varobj(cx) == cx->activeCallStack()->getInitialVarObj()); JS_ASSERT(fp->varobj(cx) == cx->activeCallStack()->getInitialVarObj());
obj = cx->activeCallStack()->getInitialVarObj(); obj = cx->activeCallStack()->getInitialVarObj();
lval = fp->slots[slot]; lval = fp->slots()[slot];
if (JSVAL_IS_NULL(lval)) { if (JSVAL_IS_NULL(lval)) {
/* /*
* Inline-clone and deoptimize JSOP_SETNAME code here because * Inline-clone and deoptimize JSOP_SETNAME code here because
@@ -2834,7 +2777,7 @@ BEGIN_CASE(JSOP_DEFVAR)
* The atom index for the global's name literal is identical to its * The atom index for the global's name literal is identical to its
* variable index. * variable index.
*/ */
fp->slots[index] = INT_TO_JSVAL(sprop->slot); fp->slots()[index] = INT_TO_JSVAL(sprop->slot);
} }
} }
@@ -3076,7 +3019,7 @@ BEGIN_CASE(JSOP_DEFLOCALFUN)
slot = GET_SLOTNO(regs.pc); slot = GET_SLOTNO(regs.pc);
TRACE_2(DefLocalFunSetSlot, slot, obj); TRACE_2(DefLocalFunSetSlot, slot, obj);
fp->slots[slot] = OBJECT_TO_JSVAL(obj); fp->slots()[slot] = OBJECT_TO_JSVAL(obj);
END_CASE(JSOP_DEFLOCALFUN) END_CASE(JSOP_DEFLOCALFUN)
BEGIN_CASE(JSOP_DEFLOCALFUN_FC) BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
@@ -3089,7 +3032,7 @@ BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
slot = GET_SLOTNO(regs.pc); slot = GET_SLOTNO(regs.pc);
TRACE_2(DefLocalFunSetSlot, slot, obj); TRACE_2(DefLocalFunSetSlot, slot, obj);
fp->slots[slot] = OBJECT_TO_JSVAL(obj); fp->slots()[slot] = OBJECT_TO_JSVAL(obj);
END_CASE(JSOP_DEFLOCALFUN_FC) END_CASE(JSOP_DEFLOCALFUN_FC)
BEGIN_CASE(JSOP_DEFLOCALFUN_DBGFC) BEGIN_CASE(JSOP_DEFLOCALFUN_DBGFC)
@@ -3100,7 +3043,7 @@ BEGIN_CASE(JSOP_DEFLOCALFUN_DBGFC)
goto error; goto error;
slot = GET_SLOTNO(regs.pc); slot = GET_SLOTNO(regs.pc);
fp->slots[slot] = OBJECT_TO_JSVAL(obj); fp->slots()[slot] = OBJECT_TO_JSVAL(obj);
END_CASE(JSOP_DEFLOCALFUN_DBGFC) END_CASE(JSOP_DEFLOCALFUN_DBGFC)
BEGIN_CASE(JSOP_LAMBDA) BEGIN_CASE(JSOP_LAMBDA)
@@ -3479,7 +3422,7 @@ END_CASE(JSOP_INITELEM)
BEGIN_CASE(JSOP_DEFSHARP) BEGIN_CASE(JSOP_DEFSHARP)
slot = GET_UINT16(regs.pc); slot = GET_UINT16(regs.pc);
JS_ASSERT(slot + 1 < fp->script->nfixed); JS_ASSERT(slot + 1 < fp->script->nfixed);
lval = fp->slots[slot]; lval = fp->slots()[slot];
if (!JSVAL_IS_PRIMITIVE(lval)) { if (!JSVAL_IS_PRIMITIVE(lval)) {
obj = JSVAL_TO_OBJECT(lval); obj = JSVAL_TO_OBJECT(lval);
} else { } else {
@@ -3487,7 +3430,7 @@ BEGIN_CASE(JSOP_DEFSHARP)
obj = js_NewArrayObject(cx, 0, NULL); obj = js_NewArrayObject(cx, 0, NULL);
if (!obj) if (!obj)
goto error; goto error;
fp->slots[slot] = OBJECT_TO_JSVAL(obj); fp->slots()[slot] = OBJECT_TO_JSVAL(obj);
} }
i = (jsint) GET_UINT16(regs.pc + UINT16_LEN); i = (jsint) GET_UINT16(regs.pc + UINT16_LEN);
id = INT_TO_JSID(i); id = INT_TO_JSID(i);
@@ -3506,12 +3449,12 @@ END_CASE(JSOP_DEFSHARP)
BEGIN_CASE(JSOP_USESHARP) BEGIN_CASE(JSOP_USESHARP)
slot = GET_UINT16(regs.pc); slot = GET_UINT16(regs.pc);
JS_ASSERT(slot + 1 < fp->script->nfixed); JS_ASSERT(slot + 1 < fp->script->nfixed);
lval = fp->slots[slot]; lval = fp->slots()[slot];
i = (jsint) GET_UINT16(regs.pc + UINT16_LEN); i = (jsint) GET_UINT16(regs.pc + UINT16_LEN);
if (JSVAL_IS_VOID(lval)) { if (JSVAL_IS_VOID(lval)) {
rval = JSVAL_VOID; rval = JSVAL_VOID;
} else { } else {
obj = JSVAL_TO_OBJECT(fp->slots[slot]); obj = JSVAL_TO_OBJECT(fp->slots()[slot]);
id = INT_TO_JSID(i); id = INT_TO_JSID(i);
if (!obj->getProperty(cx, id, &rval)) if (!obj->getProperty(cx, id, &rval))
goto error; goto error;
@@ -3530,7 +3473,7 @@ END_CASE(JSOP_USESHARP)
BEGIN_CASE(JSOP_SHARPINIT) BEGIN_CASE(JSOP_SHARPINIT)
slot = GET_UINT16(regs.pc); slot = GET_UINT16(regs.pc);
JS_ASSERT(slot + 1 < fp->script->nfixed); JS_ASSERT(slot + 1 < fp->script->nfixed);
vp = &fp->slots[slot]; vp = &fp->slots()[slot];
rval = vp[1]; rval = vp[1];
/* /*
@@ -3620,7 +3563,7 @@ BEGIN_CASE(JSOP_SETLOCALPOP)
JS_ASSERT((size_t) (regs.sp - StackBase(fp)) >= 2); JS_ASSERT((size_t) (regs.sp - StackBase(fp)) >= 2);
slot = GET_UINT16(regs.pc); slot = GET_UINT16(regs.pc);
JS_ASSERT(slot + 1 < script->nslots); JS_ASSERT(slot + 1 < script->nslots);
fp->slots[slot] = POP_OPND(); fp->slots()[slot] = POP_OPND();
END_CASE(JSOP_SETLOCALPOP) END_CASE(JSOP_SETLOCALPOP)
BEGIN_CASE(JSOP_IFPRIMTOP) BEGIN_CASE(JSOP_IFPRIMTOP)
@@ -3937,7 +3880,7 @@ BEGIN_CASE(JSOP_ENTERBLOCK)
JS_ASSERT(StackBase(fp) + OBJ_BLOCK_DEPTH(cx, obj) == regs.sp); JS_ASSERT(StackBase(fp) + OBJ_BLOCK_DEPTH(cx, obj) == regs.sp);
vp = regs.sp + OBJ_BLOCK_COUNT(cx, obj); vp = regs.sp + OBJ_BLOCK_COUNT(cx, obj);
JS_ASSERT(regs.sp < vp); JS_ASSERT(regs.sp < vp);
JS_ASSERT(vp <= fp->slots + script->nslots); JS_ASSERT(vp <= fp->slots() + script->nslots);
while (regs.sp < vp) { while (regs.sp < vp) {
STORE_OPND(0, JSVAL_VOID); STORE_OPND(0, JSVAL_VOID);
regs.sp++; regs.sp++;
@@ -3957,7 +3900,7 @@ BEGIN_CASE(JSOP_ENTERBLOCK)
while ((clasp = obj2->getClass()) == &js_WithClass) while ((clasp = obj2->getClass()) == &js_WithClass)
obj2 = obj2->getParent(); obj2 = obj2->getParent();
if (clasp == &js_BlockClass && if (clasp == &js_BlockClass &&
obj2->getPrivate() == fp) { obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, fp)) {
JSObject *youngestProto = obj2->getProto(); JSObject *youngestProto = obj2->getProto();
JS_ASSERT(!OBJ_IS_CLONED_BLOCK(youngestProto)); JS_ASSERT(!OBJ_IS_CLONED_BLOCK(youngestProto));
parent = obj; parent = obj;
@@ -4035,7 +3978,7 @@ BEGIN_CASE(JSOP_GENERATOR)
BEGIN_CASE(JSOP_YIELD) BEGIN_CASE(JSOP_YIELD)
ASSERT_NOT_THROWING(cx); ASSERT_NOT_THROWING(cx);
if (FRAME_TO_GENERATOR(fp)->state == JSGEN_CLOSING) { if (cx->generatorFor(fp)->state == JSGEN_CLOSING) {
js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD, js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD,
JSDVG_SEARCH_STACK, fp->argv[-2], NULL); JSDVG_SEARCH_STACK, fp->argv[-2], NULL);
goto error; goto error;
@@ -4050,7 +3993,7 @@ BEGIN_CASE(JSOP_ARRAYPUSH)
slot = GET_UINT16(regs.pc); slot = GET_UINT16(regs.pc);
JS_ASSERT(script->nfixed <= slot); JS_ASSERT(script->nfixed <= slot);
JS_ASSERT(slot < script->nslots); JS_ASSERT(slot < script->nslots);
lval = fp->slots[slot]; lval = fp->slots()[slot];
obj = JSVAL_TO_OBJECT(lval); obj = JSVAL_TO_OBJECT(lval);
rval = FETCH_OPND(-1); rval = FETCH_OPND(-1);
if (!js_ArrayCompPush(cx, obj, rval)) if (!js_ArrayCompPush(cx, obj, rval))

View File

@@ -140,8 +140,12 @@ extern "C++" {
namespace js { namespace js {
class ExecuteArgsGuard;
class InvokeFrameGuard;
class InvokeArgsGuard;
class TraceRecorder; class TraceRecorder;
struct TraceMonitor; class TraceMonitor;
class StackSpace;
class CallStack; class CallStack;
class TokenStream; class TokenStream;

View File

@@ -245,7 +245,7 @@ TraceRecorder::upRecursion()
* Need to compute this from the down frame, since the stack could have * Need to compute this from the down frame, since the stack could have
* moved on this one. * moved on this one.
*/ */
fi->spdist = cx->fp->down->regs->sp - cx->fp->down->slots; fi->spdist = cx->fp->down->regs->sp - cx->fp->down->slots();
JS_ASSERT(cx->fp->argc == cx->fp->down->argc); JS_ASSERT(cx->fp->argc == cx->fp->down->argc);
fi->set_argc(uint16(cx->fp->argc), false); fi->set_argc(uint16(cx->fp->argc), false);
fi->callerHeight = downPostSlots; fi->callerHeight = downPostSlots;
@@ -599,12 +599,11 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
&fp->scopeChainVal, &fp->scopeChainVal,
&info); &info);
/* vars */ /* vars */
LIns* slots_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, slots), LIns* slots_ins = addName(lir->ins2(LIR_piadd, fp_ins, INS_CONSTWORD(sizeof(JSStackFrame))),
ACC_OTHER),
"slots"); "slots");
for (unsigned i = 0; i < fp->script->nfixed; i++) for (unsigned i = 0; i < fp->script->nfixed; i++)
slurpSlot(lir->insLoad(LIR_ldp, slots_ins, i * sizeof(jsval), ACC_OTHER), slurpSlot(lir->insLoad(LIR_ldp, slots_ins, i * sizeof(jsval), ACC_OTHER),
&fp->slots[i], &fp->slots()[i],
&info); &info);
/* stack vals */ /* stack vals */
unsigned nfixed = fp->script->nfixed; unsigned nfixed = fp->script->nfixed;

View File

@@ -77,7 +77,9 @@
#include "jsbit.h" #include "jsbit.h"
#include "jsvector.h" #include "jsvector.h"
#include "jsversion.h" #include "jsversion.h"
#include "jsstrinlines.h" #include "jsstrinlines.h"
#include "jscntxtinlines.h"
using namespace js; using namespace js;
@@ -1671,30 +1673,21 @@ str_search(JSContext *cx, uintN argc, jsval *vp)
struct ReplaceData struct ReplaceData
{ {
ReplaceData(JSContext *cx) ReplaceData(JSContext *cx)
: g(cx), invokevp(NULL), cb(cx) : g(cx), cb(cx)
{} {}
~ReplaceData() { JSString *str; /* 'this' parameter object as a string */
if (invokevp) { RegExpGuard g; /* regexp parameter object and private data */
/* If we set invokevp, we already left trace. */ JSObject *lambda; /* replacement function object or null */
VOUCH_HAVE_STACK(); JSString *repstr; /* replacement string */
js_FreeStack(g.cx(), invokevpMark); jschar *dollar; /* null or pointer to first $ in repstr */
} jschar *dollarEnd; /* limit pointer for js_strchr_limit */
} jsint index; /* index in result of next replacement */
jsint leftIndex; /* left context index in str->chars */
JSString *str; /* 'this' parameter object as a string */ JSSubString dollarStr; /* for "$$" InterpretDollar result */
RegExpGuard g; /* regexp parameter object and private data */ bool calledBack; /* record whether callback has been called */
JSObject *lambda; /* replacement function object or null */ InvokeArgsGuard args; /* arguments for lambda's js_Invoke call */
JSString *repstr; /* replacement string */ JSCharBuffer cb; /* buffer built during DoMatch */
jschar *dollar; /* null or pointer to first $ in repstr */
jschar *dollarEnd; /* limit pointer for js_strchr_limit */
jsint index; /* index in result of next replacement */
jsint leftIndex; /* left context index in str->chars */
JSSubString dollarStr; /* for "$$" InterpretDollar result */
bool calledBack; /* record whether callback has been called */
jsval *invokevp; /* reusable allocation from js_AllocStack */
void *invokevpMark; /* the mark to return */
JSCharBuffer cb; /* buffer built during DoMatch */
}; };
static JSSubString * static JSSubString *
@@ -1793,12 +1786,9 @@ FindReplaceLength(JSContext *cx, ReplaceData &rdata, size_t *sizep)
uintN p = rdata.g.re()->parenCount; uintN p = rdata.g.re()->parenCount;
uintN argc = 1 + p + 2; uintN argc = 1 + p + 2;
if (!rdata.invokevp) { if (!rdata.args.getvp() &&
rdata.invokevp = js_AllocStack(cx, 2 + argc, &rdata.invokevpMark); !cx->stack().pushInvokeArgs(cx, argc, rdata.args))
if (!rdata.invokevp) return false;
return false;
}
jsval* invokevp = rdata.invokevp;
MUST_FLOW_THROUGH("lambda_out"); MUST_FLOW_THROUGH("lambda_out");
bool ok = false; bool ok = false;
@@ -1813,7 +1803,7 @@ FindReplaceLength(JSContext *cx, ReplaceData &rdata, size_t *sizep)
JSRegExpStatics save = cx->regExpStatics; JSRegExpStatics save = cx->regExpStatics;
/* Push lambda and its 'this' parameter. */ /* Push lambda and its 'this' parameter. */
jsval *sp = invokevp; jsval *sp = rdata.args.getvp();
*sp++ = OBJECT_TO_JSVAL(lambda); *sp++ = OBJECT_TO_JSVAL(lambda);
*sp++ = OBJECT_TO_JSVAL(lambda->getParent()); *sp++ = OBJECT_TO_JSVAL(lambda->getParent());
@@ -1849,7 +1839,7 @@ FindReplaceLength(JSContext *cx, ReplaceData &rdata, size_t *sizep)
*sp++ = INT_TO_JSVAL((jsint)cx->regExpStatics.leftContext.length); *sp++ = INT_TO_JSVAL((jsint)cx->regExpStatics.leftContext.length);
*sp++ = STRING_TO_JSVAL(rdata.str); *sp++ = STRING_TO_JSVAL(rdata.str);
if (!js_Invoke(cx, argc, invokevp, 0)) if (!js_Invoke(cx, rdata.args, 0))
goto lambda_out; goto lambda_out;
/* /*
@@ -1857,7 +1847,7 @@ FindReplaceLength(JSContext *cx, ReplaceData &rdata, size_t *sizep)
* created by this js_ValueToString that would otherwise be GC- * created by this js_ValueToString that would otherwise be GC-
* able, until we use rdata.repstr in DoReplace. * able, until we use rdata.repstr in DoReplace.
*/ */
repstr = js_ValueToString(cx, *invokevp); repstr = js_ValueToString(cx, *rdata.args.getvp());
if (!repstr) if (!repstr)
goto lambda_out; goto lambda_out;

View File

@@ -82,6 +82,7 @@
#include "jsobjinlines.h" #include "jsobjinlines.h"
#include "jsscopeinlines.h" #include "jsscopeinlines.h"
#include "jsscriptinlines.h" #include "jsscriptinlines.h"
#include "jscntxtinlines.h"
#include "jsautooplen.h" // generated headers last #include "jsautooplen.h" // generated headers last
#include "imacros.c.out" #include "imacros.c.out"
@@ -1761,7 +1762,7 @@ VisitFrameSlots(Visitor &visitor, unsigned depth, JSStackFrame *fp,
if (!visitor.visitStackSlots(&fp->scopeChainVal, 1, fp)) if (!visitor.visitStackSlots(&fp->scopeChainVal, 1, fp))
return false; return false;
visitor.setStackSlotKind("var"); visitor.setStackSlotKind("var");
if (!visitor.visitStackSlots(fp->slots, fp->script->nfixed, fp)) if (!visitor.visitStackSlots(fp->slots(), fp->script->nfixed, fp))
return false; return false;
} }
visitor.setStackSlotKind("stack"); visitor.setStackSlotKind("stack");
@@ -2530,10 +2531,10 @@ TraceRecorder::nativeStackOffset(jsval* p) const
/* /*
* If it's not in a pending frame, it must be on the stack of the current * If it's not in a pending frame, it must be on the stack of the current
* frame above sp but below fp->slots + script->nslots. * frame above sp but below fp->slots() + script->nslots.
*/ */
if (!visitor.stopped()) { if (!visitor.stopped()) {
JS_ASSERT(size_t(p - cx->fp->slots) < cx->fp->script->nslots); JS_ASSERT(size_t(p - cx->fp->slots()) < cx->fp->script->nslots);
offset += size_t(p - cx->fp->regs->sp) * sizeof(double); offset += size_t(p - cx->fp->regs->sp) * sizeof(double);
} }
return offset; return offset;
@@ -3062,7 +3063,7 @@ GetUpvarArgOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDept
// For this traits type, 'slot' is an index into the local slots array. // For this traits type, 'slot' is an index into the local slots array.
struct UpvarVarTraits { struct UpvarVarTraits {
static jsval interp_get(JSStackFrame* fp, int32 slot) { static jsval interp_get(JSStackFrame* fp, int32 slot) {
return fp->slots[slot]; return fp->slots()[slot];
} }
static uint32 native_slot(uint32 argc, int32 slot) { static uint32 native_slot(uint32 argc, int32 slot) {
@@ -3083,7 +3084,7 @@ GetUpvarVarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDept
*/ */
struct UpvarStackTraits { struct UpvarStackTraits {
static jsval interp_get(JSStackFrame* fp, int32 slot) { static jsval interp_get(JSStackFrame* fp, int32 slot) {
return fp->slots[slot + fp->script->nfixed]; return fp->slots()[slot + fp->script->nfixed];
} }
static uint32 native_slot(uint32 argc, int32 slot) { static uint32 native_slot(uint32 argc, int32 slot) {
@@ -3233,7 +3234,7 @@ struct VarClosureTraits
} }
// See also UpvarVarTraits. // See also UpvarVarTraits.
static inline jsval* slots(JSStackFrame* fp) { return fp->slots; } static inline jsval* slots(JSStackFrame* fp) { return fp->slots(); }
static inline jsval* slots(JSObject* obj) { static inline jsval* slots(JSObject* obj) {
// We know Call objects use dslots. // We know Call objects use dslots.
return obj->dslots + slot_offset(obj); return obj->dslots + slot_offset(obj);
@@ -3635,7 +3636,7 @@ TraceRecorder::attemptImport(jsval* p)
CountSlotsVisitor countVisitor(p); CountSlotsVisitor countVisitor(p);
VisitStackSlots(countVisitor, cx, callDepth); VisitStackSlots(countVisitor, cx, callDepth);
if (countVisitor.stopped() || size_t(p - cx->fp->slots) < cx->fp->script->nslots) if (countVisitor.stopped() || size_t(p - cx->fp->slots()) < cx->fp->script->nslots)
return get(p); return get(p);
return NULL; return NULL;
@@ -5275,7 +5276,7 @@ TraceRecorder::hasMethod(JSObject* obj, jsid id, bool& found)
JS_REQUIRES_STACK RecordingStatus JS_REQUIRES_STACK RecordingStatus
TraceRecorder::hasIteratorMethod(JSObject* obj, bool& found) TraceRecorder::hasIteratorMethod(JSObject* obj, bool& found)
{ {
JS_ASSERT(cx->fp->regs->sp + 2 <= cx->fp->slots + cx->fp->script->nslots); JS_ASSERT(cx->fp->regs->sp + 2 <= cx->fp->slots() + cx->fp->script->nslots);
return hasMethod(obj, ATOM_TO_JSID(cx->runtime->atomState.iteratorAtom), found); return hasMethod(obj, ATOM_TO_JSID(cx->runtime->atomState.iteratorAtom), found);
} }
@@ -5423,122 +5424,89 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee)
js_ReconstructStackDepth(cx, fp->script, fi.pc) == js_ReconstructStackDepth(cx, fp->script, fi.pc) ==
uintN(fi.spdist - fp->script->nfixed)); uintN(fi.spdist - fp->script->nfixed));
uintN nframeslots = JS_HOWMANY(sizeof(JSInlineFrame), sizeof(jsval)); /* Simulate js_Interpret locals for when |cx->fp == fp|. */
JSScript* script = fun->u.i.script; JSScript* newscript = fun->u.i.script;
size_t nbytes = (nframeslots + script->nslots) * sizeof(jsval); jsval* sp = fp->slots() + fi.spdist;
/* Code duplicated from inline_call: case in js_Interpret (FIXME). */
JSArena* a = cx->stackPool.current;
void* newmark = (void*) a->avail;
uintN argc = fi.get_argc(); uintN argc = fi.get_argc();
jsval* vp = fp->slots + fi.spdist - (2 + argc); jsval* vp = sp - (2 + argc);
uintN missing = 0;
jsval* newsp;
if (fun->nargs > argc) { /* Fixup |fp| using |fi|. */
const JSFrameRegs& regs = *fp->regs; fp->regs->sp = sp;
fp->regs->pc = fi.pc;
newsp = vp + 2 + fun->nargs;
JS_ASSERT(newsp > regs.sp);
if ((jsuword) newsp <= a->limit) {
if ((jsuword) newsp > a->avail)
a->avail = (jsuword) newsp;
jsval* argsp = newsp;
do {
*--argsp = JSVAL_VOID;
} while (argsp != regs.sp);
missing = 0;
} else {
missing = fun->nargs - argc;
nbytes += (2 + fun->nargs) * sizeof(jsval);
}
}
/* Allocate the inline frame with its vars and operands. */
if (a->avail + nbytes <= a->limit) {
newsp = (jsval *) a->avail;
a->avail += nbytes;
JS_ASSERT(missing == 0);
} else {
JS_ARENA_ALLOCATE_CAST(newsp, jsval *, &cx->stackPool, nbytes);
if (!newsp)
OutOfMemoryAbort();
/*
* Move args if the missing ones overflow arena a, then push
* undefined for the missing args.
*/
if (missing) {
memcpy(newsp, vp, (2 + argc) * sizeof(jsval));
vp = newsp;
newsp = vp + 2 + argc;
do {
*newsp++ = JSVAL_VOID;
} while (--missing != 0);
}
}
/* Claim space for the stack frame and initialize it. */
JSInlineFrame* newifp = (JSInlineFrame *) newsp;
newsp += nframeslots;
newifp->frame.callobj = NULL;
newifp->frame.argsobj = NULL;
newifp->frame.script = script;
newifp->frame.fun = fun;
bool constructing = fi.is_constructing();
newifp->frame.argc = argc;
newifp->callerRegs.pc = fi.pc;
newifp->callerRegs.sp = fp->slots + fi.spdist;
fp->imacpc = fi.imacpc; fp->imacpc = fi.imacpc;
fp->blockChain = fi.block;
#ifdef DEBUG #ifdef DEBUG
if (fi.block != fp->blockChain) { if (fi.block != fp->blockChain) {
for (JSObject* obj = fi.block; obj != fp->blockChain; obj = obj->getParent()) for (JSObject* obj = fi.block; obj != fp->blockChain; obj = obj->getParent())
JS_ASSERT(obj); JS_ASSERT(obj);
} }
#endif #endif
fp->blockChain = fi.block;
newifp->frame.argv = newifp->callerRegs.sp - argc; /*
JS_ASSERT(newifp->frame.argv); * Get pointer to new frame/slots, without changing global state.
* Initialize missing args if there are any. (Copied from js_Interpret.)
*
* StackSpace::getInlineFrame calls js_ReportOutOfScriptQuota if there is
* no space (which will try to deep bail, which is bad), however we already
* check on entry to ExecuteTree that there is enough space.
*/
StackSpace &stack = cx->stack();
uintN nslots = newscript->nslots;
uintN funargs = fun->nargs;
jsval *argv = vp + 2;
JSStackFrame *newfp;
if (argc < funargs) {
uintN missing = funargs - argc;
newfp = stack.getInlineFrame(cx, sp, missing, nslots);
for (jsval *v = argv + argc, *end = v + missing; v != end; ++v)
*v = JSVAL_VOID;
} else {
newfp = stack.getInlineFrame(cx, sp, 0, nslots);
}
/* Initialize the new stack frame. */
newfp->callobj = NULL;
newfp->argsobj = NULL;
newfp->script = newscript;
newfp->fun = fun;
newfp->argc = argc;
newfp->argv = argv;
#ifdef DEBUG #ifdef DEBUG
// Initialize argv[-1] to a known-bogus value so we'll catch it if // Initialize argv[-1] to a known-bogus value so we'll catch it if
// someone forgets to initialize it later. // someone forgets to initialize it later.
newifp->frame.argv[-1] = JSVAL_HOLE; newfp->argv[-1] = JSVAL_HOLE;
#endif #endif
JS_ASSERT(newifp->frame.argv >= StackBase(fp) + 2); newfp->rval = JSVAL_VOID;
newfp->annotation = NULL;
newifp->frame.rval = JSVAL_VOID; newfp->scopeChain = NULL; // will be updated in FlushNativeStackFrame
newifp->frame.down = fp; newfp->flags = fi.is_constructing() ? JSFRAME_CONSTRUCTING : 0;
newifp->frame.annotation = NULL; newfp->blockChain = NULL;
newifp->frame.scopeChain = NULL; // will be updated in FlushNativeStackFrame newfp->thisv = JSVAL_NULL; // will be updated in FlushNativeStackFrame
newifp->frame.flags = constructing ? JSFRAME_CONSTRUCTING : 0; newfp->imacpc = NULL;
newifp->frame.blockChain = NULL; if (newscript->staticLevel < JS_DISPLAY_SIZE) {
newifp->mark = newmark; JSStackFrame **disp = &cx->display[newscript->staticLevel];
newifp->frame.thisv = JSVAL_NULL; // will be updated in FlushNativeStackFrame newfp->displaySave = *disp;
*disp = newfp;
newifp->frame.regs = fp->regs;
newifp->frame.regs->pc = script->code;
newifp->frame.regs->sp = newsp + script->nfixed;
newifp->frame.imacpc = NULL;
newifp->frame.slots = newsp;
if (script->staticLevel < JS_DISPLAY_SIZE) {
JSStackFrame **disp = &cx->display[script->staticLevel];
newifp->frame.displaySave = *disp;
*disp = &newifp->frame;
} }
/* /*
* Note that fp->script is still the caller's script; set the callee * Note that fp->script is still the caller's script; set the callee
* inline frame's idea of caller version from its version. * inline frame's idea of caller version from its version.
*/ */
newifp->callerVersion = (JSVersion) fp->script->version; newfp->callerVersion = (JSVersion) fp->script->version;
// After this paragraph, fp and cx->fp point to the newly synthesized frame. /*
fp->regs = &newifp->callerRegs; * Weave regs like JSOP_CALL/JSOP_STOP. |fp->regs| should point to the
fp = cx->fp = &newifp->frame; * |regs| variable in the innermost js_Interpret activation.
*/
newfp->callerRegs = *fp->regs;
newfp->regs = fp->regs;
fp->regs = &newfp->callerRegs;
newfp->regs->pc = newscript->code;
newfp->regs->sp = newfp->slots() + newscript->nfixed;
/* Push inline frame. (Copied from js_Interpret.) */
stack.pushInlineFrame(cx, fp, newfp);
/* /*
* If there's a call hook, invoke it to compute the hookData used by * If there's a call hook, invoke it to compute the hookData used by
@@ -5546,9 +5514,9 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee)
*/ */
JSInterpreterHook hook = cx->debugHooks->callHook; JSInterpreterHook hook = cx->debugHooks->callHook;
if (hook) { if (hook) {
newifp->hookData = hook(cx, fp, JS_TRUE, 0, cx->debugHooks->callHookData); newfp->hookData = hook(cx, newfp, JS_TRUE, 0, cx->debugHooks->callHookData);
} else { } else {
newifp->hookData = NULL; newfp->hookData = NULL;
} }
/* /*
@@ -5559,29 +5527,30 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee)
* everything down to the caller's fp->slots (where vars start) and avoid * everything down to the caller's fp->slots (where vars start) and avoid
* some of the complexity? * some of the complexity?
*/ */
return (fi.spdist - fp->down->script->nfixed) + return (fi.spdist - newfp->down->script->nfixed) +
((fun->nargs > fp->argc) ? fun->nargs - fp->argc : 0) + ((fun->nargs > newfp->argc) ? fun->nargs - newfp->argc : 0) +
script->nfixed + SPECIAL_FRAME_SLOTS; newscript->nfixed + SPECIAL_FRAME_SLOTS;
} }
static void JS_REQUIRES_STACK static void
SynthesizeSlowNativeFrame(InterpState& state, JSContext *cx, VMSideExit *exit) SynthesizeSlowNativeFrame(InterpState& state, JSContext *cx, VMSideExit *exit)
{ {
VOUCH_DOES_NOT_REQUIRE_STACK(); /*
* StackSpace::getInlineFrame calls js_ReportOutOfScriptQuota if there is
* no space (which will try to deep bail, which is bad), however we already
* check on entry to ExecuteTree that there is enough space.
*/
CallStack *cs;
JSStackFrame *fp;
cx->stack().getSynthesizedSlowNativeFrame(cx, cs, fp);
void *mark; JSObject *callee = JSVAL_TO_OBJECT(state.nativeVp[0]);
JSInlineFrame *ifp; JSFunction *fun = GET_FUNCTION_PRIVATE(cx, callee);
JS_ASSERT(!fun->isInterpreted() && !fun->isFastNative());
JS_ASSERT(fun->u.n.extra == 0);
/* This allocation is infallible: ExecuteTree reserved enough stack. */
mark = JS_ARENA_MARK(&cx->stackPool);
JS_ARENA_ALLOCATE_CAST(ifp, JSInlineFrame *, &cx->stackPool, sizeof(JSInlineFrame));
if (!ifp)
OutOfMemoryAbort();
JSStackFrame *fp = &ifp->frame;
fp->regs = NULL; fp->regs = NULL;
fp->imacpc = NULL; fp->imacpc = NULL;
fp->slots = NULL;
fp->callobj = NULL; fp->callobj = NULL;
fp->argsobj = NULL; fp->argsobj = NULL;
fp->script = NULL; fp->script = NULL;
@@ -5590,7 +5559,6 @@ SynthesizeSlowNativeFrame(InterpState& state, JSContext *cx, VMSideExit *exit)
fp->argv = state.nativeVp + 2; fp->argv = state.nativeVp + 2;
fp->fun = GET_FUNCTION_PRIVATE(cx, fp->calleeObject()); fp->fun = GET_FUNCTION_PRIVATE(cx, fp->calleeObject());
fp->rval = JSVAL_VOID; fp->rval = JSVAL_VOID;
fp->down = cx->fp;
fp->annotation = NULL; fp->annotation = NULL;
JS_ASSERT(cx->fp->scopeChain); JS_ASSERT(cx->fp->scopeChain);
fp->scopeChain = cx->fp->scopeChain; fp->scopeChain = cx->fp->scopeChain;
@@ -5598,8 +5566,7 @@ SynthesizeSlowNativeFrame(InterpState& state, JSContext *cx, VMSideExit *exit)
fp->flags = exit->constructing() ? JSFRAME_CONSTRUCTING : 0; fp->flags = exit->constructing() ? JSFRAME_CONSTRUCTING : 0;
fp->displaySave = NULL; fp->displaySave = NULL;
ifp->mark = mark; cx->stack().pushSynthesizedSlowNativeFrame(cx, cs, fp);
cx->fp = fp;
} }
static JS_REQUIRES_STACK bool static JS_REQUIRES_STACK bool
@@ -6462,7 +6429,7 @@ ExecuteTree(JSContext* cx, TreeFragment* f, uintN& inlineCallCount,
JS_ASSERT(f->root == f && f->code()); JS_ASSERT(f->root == f && f->code());
TraceMonitor* tm = &JS_TRACE_MONITOR(cx); TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
if (!ScopeChainCheck(cx, f)) if (!ScopeChainCheck(cx, f) || !cx->stack().ensureEnoughSpaceToEnterTrace())
return NULL; return NULL;
/* Initialize trace state. */ /* Initialize trace state. */
@@ -6594,14 +6561,8 @@ LeaveTree(TraceMonitor *tm, InterpState& state, VMSideExit* lr)
* *
* First, if we just returned from a slow native, pop its stack frame. * First, if we just returned from a slow native, pop its stack frame.
*/ */
if (!cx->fp->script) { if (!cx->fp->script)
JSStackFrame *fp = cx->fp; cx->stack().popSynthesizedSlowNativeFrame(cx);
JS_ASSERT(FUN_SLOW_NATIVE(fp->fun));
JS_ASSERT(!fp->regs);
JS_ASSERT(fp->down->regs != &((JSInlineFrame *) fp)->callerRegs);
cx->fp = fp->down;
JS_ARENA_RELEASE(&cx->stackPool, ((JSInlineFrame *) fp)->mark);
}
JS_ASSERT(cx->fp->script); JS_ASSERT(cx->fp->script);
if (!(bs & BUILTIN_ERROR)) { if (!(bs & BUILTIN_ERROR)) {
@@ -6644,7 +6605,7 @@ LeaveTree(TraceMonitor *tm, InterpState& state, VMSideExit* lr)
regs->sp += cs.ndefs; regs->sp += cs.ndefs;
regs->pc += cs.length; regs->pc += cs.length;
JS_ASSERT_IF(!cx->fp->imacpc, JS_ASSERT_IF(!cx->fp->imacpc,
cx->fp->slots + cx->fp->script->nfixed + cx->fp->slots() + cx->fp->script->nfixed +
js_ReconstructStackDepth(cx, cx->fp->script, regs->pc) == js_ReconstructStackDepth(cx, cx->fp->script, regs->pc) ==
regs->sp); regs->sp);
@@ -6766,7 +6727,7 @@ LeaveTree(TraceMonitor *tm, InterpState& state, VMSideExit* lr)
fp->imacpc = innermost->imacpc; fp->imacpc = innermost->imacpc;
fp->regs->sp = StackBase(fp) + (innermost->sp_adj / sizeof(double)) - calldepth_slots; fp->regs->sp = StackBase(fp) + (innermost->sp_adj / sizeof(double)) - calldepth_slots;
JS_ASSERT_IF(!fp->imacpc, JS_ASSERT_IF(!fp->imacpc,
fp->slots + fp->script->nfixed + fp->slots() + fp->script->nfixed +
js_ReconstructStackDepth(cx, fp->script, fp->regs->pc) == fp->regs->sp); js_ReconstructStackDepth(cx, fp->script, fp->regs->pc) == fp->regs->sp);
#ifdef EXECUTE_TREE_TIMER #ifdef EXECUTE_TREE_TIMER
@@ -7747,7 +7708,7 @@ JS_REQUIRES_STACK jsval&
TraceRecorder::varval(unsigned n) const TraceRecorder::varval(unsigned n) const
{ {
JS_ASSERT(n < cx->fp->script->nslots); JS_ASSERT(n < cx->fp->script->nslots);
return cx->fp->slots[n]; return cx->fp->slots()[n];
} }
JS_REQUIRES_STACK jsval& JS_REQUIRES_STACK jsval&
@@ -7918,7 +7879,7 @@ TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, jsval*& vp,
} else if (sprop->getterOp() == js_GetCallVar || } else if (sprop->getterOp() == js_GetCallVar ||
sprop->getterOp() == js_GetCallVarChecked) { sprop->getterOp() == js_GetCallVarChecked) {
JS_ASSERT(slot < cfp->script->nslots); JS_ASSERT(slot < cfp->script->nslots);
vp = &cfp->slots[slot]; vp = &cfp->slots()[slot];
upvar_slot = cx->fp->fun->nargs + slot; upvar_slot = cx->fp->fun->nargs + slot;
nr.v = *vp; nr.v = *vp;
} else { } else {
@@ -9609,8 +9570,8 @@ TraceRecorder::clearFrameSlotsFromTracker(Tracker& which, JSStackFrame* fp, unsi
which.set(&fp->argsobj, (LIns*)0); which.set(&fp->argsobj, (LIns*)0);
which.set(&fp->scopeChain, (LIns*)0); which.set(&fp->scopeChain, (LIns*)0);
} }
vp = &fp->slots[0]; vp = &fp->slots()[0];
vpstop = &fp->slots[nslots]; vpstop = &fp->slots()[nslots];
while (vp < vpstop) while (vp < vpstop)
which.set(vp++, (LIns*)0); which.set(vp++, (LIns*)0);
} }
@@ -9679,7 +9640,7 @@ TraceRecorder::putActivationObjects()
if (nslots) { if (nslots) {
slots_ins = lir->insAlloc(sizeof(jsval) * nslots); slots_ins = lir->insAlloc(sizeof(jsval) * nslots);
for (int i = 0; i < nslots; ++i) { for (int i = 0; i < nslots; ++i) {
LIns* slot_ins = box_jsval(cx->fp->slots[i], get(&cx->fp->slots[i])); LIns* slot_ins = box_jsval(cx->fp->slots()[i], get(&cx->fp->slots()[i]));
lir->insStorei(slot_ins, slots_ins, i * sizeof(jsval), ACC_OTHER); lir->insStorei(slot_ins, slots_ins, i * sizeof(jsval), ACC_OTHER);
} }
} else { } else {
@@ -9736,21 +9697,41 @@ TraceRecorder::record_EnterFrame(uintN& inlineCallCount)
) )
LIns* void_ins = INS_VOID(); LIns* void_ins = INS_VOID();
// Before we enter this frame, we need to clear out any dangling insns left
// in the tracer. While we also clear when returning from a function, it is
// possible to have the following sequence of stack usage:
//
// [fp1]***************** push
// [fp1]***** pop
// [fp1]*****[fp2] call
// [fp1]*****[fp2]*** push
//
// Duplicate native stack layout computation: see VisitFrameSlots header comment. // Duplicate native stack layout computation: see VisitFrameSlots header comment.
// This doesn't do layout arithmetic, but it must initialize in the tracker all the // This doesn't do layout arithmetic, but it must initialize in the tracker all the
// slots defined as imported by VisitFrameSlots. // slots defined as imported by VisitFrameSlots.
jsval* vp = &fp->argv[fp->argc]; jsval* vp = &fp->argv[fp->argc];
jsval* vpstop = vp + ptrdiff_t(fp->fun->nargs) - ptrdiff_t(fp->argc); jsval* vpstop = vp + ptrdiff_t(fp->fun->nargs) - ptrdiff_t(fp->argc);
while (vp < vpstop) { for (; vp < vpstop; ++vp) {
nativeFrameTracker.set(vp, (LIns*)0); nativeFrameTracker.set(vp, NULL);
set(vp++, void_ins); set(vp, void_ins);
} }
vp = &fp->slots[0]; nativeFrameTracker.set(&fp->argsobj, NULL);
vpstop = vp + fp->script->nfixed;
while (vp < vpstop)
set(vp++, void_ins);
set(&fp->argsobj, INS_NULL()); set(&fp->argsobj, INS_NULL());
nativeFrameTracker.set(&fp->scopeChain, NULL);
vp = fp->slots();
vpstop = vp + fp->script->nfixed;
for (; vp < vpstop; ++vp) {
nativeFrameTracker.set(vp, NULL);
set(vp, void_ins);
}
vp = vpstop;
vpstop = vp + (fp->script->nslots - fp->script->nfixed);
for (; vp < vpstop; ++vp)
nativeFrameTracker.set(vp, NULL);
LIns* callee_ins = get(&cx->fp->argv[-2]); LIns* callee_ins = get(&cx->fp->argv[-2]);
LIns* scopeChain_ins = stobj_get_parent(callee_ins); LIns* scopeChain_ins = stobj_get_parent(callee_ins);
@@ -11317,7 +11298,7 @@ TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, JSScopeProperty
if (sprop->setterOp() == SetCallVar) { if (sprop->setterOp() == SetCallVar) {
JS_ASSERT(sprop->hasShortID()); JS_ASSERT(sprop->hasShortID());
uintN slot = uint16(sprop->shortid); uintN slot = uint16(sprop->shortid);
jsval *vp2 = &fp->slots[slot]; jsval *vp2 = &fp->slots()[slot];
set(vp2, v_ins); set(vp2, v_ins);
return RECORD_CONTINUE; return RECORD_CONTINUE;
} }
@@ -12445,12 +12426,6 @@ TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc,
JSStackFrame* fp = cx->fp; JSStackFrame* fp = cx->fp;
// TODO: track the copying via the tracker...
if (argc < fun->nargs &&
jsuword(fp->regs->sp + (fun->nargs - argc)) > cx->stackPool.current->limit) {
RETURN_STOP("can't trace calls with too few args requiring argv move");
}
// Generate a type map for the outgoing frame and stash it in the LIR // Generate a type map for the outgoing frame and stash it in the LIR
unsigned stackSlots = NativeStackSlots(cx, 0 /* callDepth */); unsigned stackSlots = NativeStackSlots(cx, 0 /* callDepth */);
FrameInfo* fi = (FrameInfo*) FrameInfo* fi = (FrameInfo*)
@@ -12468,7 +12443,7 @@ TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc,
tree->gcthings.addUnique(OBJECT_TO_JSVAL(fp->blockChain)); tree->gcthings.addUnique(OBJECT_TO_JSVAL(fp->blockChain));
fi->pc = fp->regs->pc; fi->pc = fp->regs->pc;
fi->imacpc = fp->imacpc; fi->imacpc = fp->imacpc;
fi->spdist = fp->regs->sp - fp->slots; fi->spdist = fp->regs->sp - fp->slots();
fi->set_argc(uint16(argc), constructing); fi->set_argc(uint16(argc), constructing);
fi->callerHeight = stackSlots - (2 + argc); fi->callerHeight = stackSlots - (2 + argc);
fi->callerArgc = fp->argc; fi->callerArgc = fp->argc;
@@ -13665,7 +13640,7 @@ TraceRecorder::record_JSOP_BINDNAME()
// are still on the stack. We never use BINDNAME to refer to these. // are still on the stack. We never use BINDNAME to refer to these.
while (obj->getClass() == &js_BlockClass) { while (obj->getClass() == &js_BlockClass) {
// The block's values are still on the stack. // The block's values are still on the stack.
JS_ASSERT(obj->getPrivate() == fp); JS_ASSERT(obj->getPrivate() == js_FloatingFrameIfGenerator(cx, fp));
obj = obj->getParent(); obj = obj->getParent();
// Blocks always have parents. // Blocks always have parents.
JS_ASSERT(obj); JS_ASSERT(obj);
@@ -14262,7 +14237,7 @@ TraceRecorder::record_JSOP_RETRVAL()
JS_REQUIRES_STACK AbortableRecordingStatus JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_GETGVAR() TraceRecorder::record_JSOP_GETGVAR()
{ {
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)]; jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval)) if (JSVAL_IS_NULL(slotval))
return ARECORD_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here. return ARECORD_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
@@ -14278,7 +14253,7 @@ TraceRecorder::record_JSOP_GETGVAR()
JS_REQUIRES_STACK AbortableRecordingStatus JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_SETGVAR() TraceRecorder::record_JSOP_SETGVAR()
{ {
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)]; jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval)) if (JSVAL_IS_NULL(slotval))
return ARECORD_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here. return ARECORD_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
@@ -14294,7 +14269,7 @@ TraceRecorder::record_JSOP_SETGVAR()
JS_REQUIRES_STACK AbortableRecordingStatus JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_INCGVAR() TraceRecorder::record_JSOP_INCGVAR()
{ {
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)]; jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval)) if (JSVAL_IS_NULL(slotval))
// We will see JSOP_INCNAME from the interpreter's jump, so no-op here. // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
return ARECORD_CONTINUE; return ARECORD_CONTINUE;
@@ -14310,7 +14285,7 @@ TraceRecorder::record_JSOP_INCGVAR()
JS_REQUIRES_STACK AbortableRecordingStatus JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_DECGVAR() TraceRecorder::record_JSOP_DECGVAR()
{ {
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)]; jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval)) if (JSVAL_IS_NULL(slotval))
// We will see JSOP_INCNAME from the interpreter's jump, so no-op here. // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
return ARECORD_CONTINUE; return ARECORD_CONTINUE;
@@ -14326,7 +14301,7 @@ TraceRecorder::record_JSOP_DECGVAR()
JS_REQUIRES_STACK AbortableRecordingStatus JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_GVARINC() TraceRecorder::record_JSOP_GVARINC()
{ {
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)]; jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval)) if (JSVAL_IS_NULL(slotval))
// We will see JSOP_INCNAME from the interpreter's jump, so no-op here. // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
return ARECORD_CONTINUE; return ARECORD_CONTINUE;
@@ -14342,7 +14317,7 @@ TraceRecorder::record_JSOP_GVARINC()
JS_REQUIRES_STACK AbortableRecordingStatus JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_GVARDEC() TraceRecorder::record_JSOP_GVARDEC()
{ {
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)]; jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval)) if (JSVAL_IS_NULL(slotval))
// We will see JSOP_INCNAME from the interpreter's jump, so no-op here. // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
return ARECORD_CONTINUE; return ARECORD_CONTINUE;
@@ -14752,8 +14727,8 @@ TraceRecorder::record_JSOP_ARRAYPUSH()
{ {
uint32_t slot = GET_UINT16(cx->fp->regs->pc); uint32_t slot = GET_UINT16(cx->fp->regs->pc);
JS_ASSERT(cx->fp->script->nfixed <= slot); JS_ASSERT(cx->fp->script->nfixed <= slot);
JS_ASSERT(cx->fp->slots + slot < cx->fp->regs->sp - 1); JS_ASSERT(cx->fp->slots() + slot < cx->fp->regs->sp - 1);
jsval &arrayval = cx->fp->slots[slot]; jsval &arrayval = cx->fp->slots()[slot];
JS_ASSERT(JSVAL_IS_OBJECT(arrayval)); JS_ASSERT(JSVAL_IS_OBJECT(arrayval));
JS_ASSERT(JSVAL_TO_OBJECT(arrayval)->isDenseArray()); JS_ASSERT(JSVAL_TO_OBJECT(arrayval)->isDenseArray());
LIns *array_ins = get(&arrayval); LIns *array_ins = get(&arrayval);
@@ -14833,7 +14808,7 @@ TraceRecorder::record_JSOP_INDEXBASE3()
JS_REQUIRES_STACK AbortableRecordingStatus JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_CALLGVAR() TraceRecorder::record_JSOP_CALLGVAR()
{ {
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)]; jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval)) if (JSVAL_IS_NULL(slotval))
// We will see JSOP_CALLNAME from the interpreter's jump, so no-op here. // We will see JSOP_CALLNAME from the interpreter's jump, so no-op here.
return ARECORD_CONTINUE; return ARECORD_CONTINUE;

View File

@@ -74,6 +74,7 @@ JS_PUBLIC_API(void) JS_Assert(const char *s, const char *file, JSIntn ln)
* trapped. * trapped.
*/ */
*((int *) NULL) = 0; /* To continue from here in GDB: "return" then "continue". */ *((int *) NULL) = 0; /* To continue from here in GDB: "return" then "continue". */
raise(SIGABRT); /* In case above statement gets nixed by the optimizer. */
#else #else
raise(SIGABRT); /* To continue from here in GDB: "signal 0". */ raise(SIGABRT); /* To continue from here in GDB: "signal 0". */
#endif #endif

View File

@@ -1406,7 +1406,7 @@ ValueToScript(JSContext *cx, jsval v)
script = (JSScript *) JS_GetPrivate(cx, obj); script = (JSScript *) JS_GetPrivate(cx, obj);
} else if (clasp == &js_GeneratorClass) { } else if (clasp == &js_GeneratorClass) {
JSGenerator *gen = (JSGenerator *) JS_GetPrivate(cx, obj); JSGenerator *gen = (JSGenerator *) JS_GetPrivate(cx, obj);
fun = gen->frame.fun; fun = gen->getFloatingFrame()->fun;
script = FUN_SCRIPT(fun); script = FUN_SCRIPT(fun);
} }
} }

View File

@@ -38,13 +38,13 @@
var gTestfile = 'regress-350256-02.js'; var gTestfile = 'regress-350256-02.js';
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
var BUGNUMBER = 350256; var BUGNUMBER = 350256;
var summary = 'Array.apply maximum arguments: 2^20'; var summary = 'Array.apply maximum arguments: 2^19 - 1024';
var actual = ''; var actual = '';
var expect = ''; var expect = '';
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
test(Math.pow(2, 20)); test(Math.pow(2, 19) - 1024);
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
function test(length) function test(length)

View File

@@ -38,13 +38,13 @@
var gTestfile = 'regress-350256-03.js'; var gTestfile = 'regress-350256-03.js';
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
var BUGNUMBER = 350256; var BUGNUMBER = 350256;
var summary = 'Array.apply maximum arguments: 2^24-1'; var summary = 'Array.apply maximum arguments: 2^19-1024';
var actual = ''; var actual = '';
var expect = ''; var expect = '';
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
test(Math.pow(2, 24)-1); test(Math.pow(2, 19)-1024);
//----------------------------------------------------------------------------- //-----------------------------------------------------------------------------
function test(length) function test(length)

View File

@@ -0,0 +1,16 @@
function g(e) {
return ("" + e)
}
function blah() {
do {
yield
} while ({}(p = arguments))
}
rv = blah();
try {
for (a in rv) ;
} catch (e) {
print("" + g(e))
}
gc()

View File

@@ -0,0 +1,41 @@
// Fun.apply ignores arguments past JS_ARGS_LENGTH_MAX = 2^19 - 1024
const numFatArgs = Math.pow(2,19) - 1024;
// Recursion on trace is limited to MAX_CALL_STACK_ENTRIES = 500
const traceDepth = 490;
var trace = true;
function maybeTrace(x) {
if (!trace)
eval("");
if (x <= 0)
return 0;
return maybeTrace(x-1);
}
function fatStack() {
return maybeTrace(traceDepth);
}
// This tests that we conservatively guard against stack space exhaustion
// before entering trace.
exception = false;
try {
fatStack.apply(null, new Array(numFatArgs));
} catch (e) {
assertEq(e.toString(), "InternalError: script stack space quota is exhausted");
exception = true;
}
assertEq(exception, true);
checkStats({traceCompleted:1});
// This tests that, without tracing, we exhaust stack space.
trace = false;
var exception = false;
try {
fatStack.apply(null, new Array(numFatArgs));
} catch (e) {
assertEq(e.toString(), "InternalError: script stack space quota is exhausted");
exception = true;
}
assertEq(exception, true);

View File

@@ -338,6 +338,8 @@ STDMETHODIMP XPCDispatchTearOff::Invoke(DISPID dispIdMember, REFIID riid,
XPCJSRuntime* rt = ccx.GetRuntime(); XPCJSRuntime* rt = ccx.GetRuntime();
int j; int j;
js::InvokeArgsGuard args;
thisObj = obj = GetJSObject();; thisObj = obj = GetJSObject();;
if(!cx || !xpcc) if(!cx || !xpcc)
@@ -349,16 +351,12 @@ STDMETHODIMP XPCDispatchTearOff::Invoke(DISPID dispIdMember, REFIID riid,
xpcc->SetException(nsnull); xpcc->SetException(nsnull);
ccx.GetThreadData()->SetException(nsnull); ccx.GetThreadData()->SetException(nsnull);
// We use js_AllocStack, js_Invoke, and js_FreeStack so that the gcthings // We use js_Invoke so that the gcthings we use as args will be rooted
// we use as args will be rooted by the engine as we do conversions and // by the engine as we do conversions and prepare to do the function
// prepare to do the function call. This adds a fair amount of complexity, // call. This adds a fair amount of complexity, but is a good
// but is a good optimization compared to calling JS_AddRoot for each item. // optimization compared to calling JS_AddRoot for each item.
// setup stack
// allocate extra space for function and 'this'
stack_size = argc + 2;
js::LeaveTrace(cx);
// In the xpidl [function] case we are making sure now that the // In the xpidl [function] case we are making sure now that the
// JSObject is callable. If it is *not* callable then we silently // JSObject is callable. If it is *not* callable then we silently
@@ -382,19 +380,17 @@ STDMETHODIMP XPCDispatchTearOff::Invoke(DISPID dispIdMember, REFIID riid,
goto pre_call_clean_up; goto pre_call_clean_up;
} }
// if stack_size is zero then we won't be needing a stack if (!cx->stack().pushInvokeArgsFriendAPI(cx, argc, args))
if(stack_size && !(stackbase = sp = js_AllocStack(cx, stack_size, &mark)))
{ {
retval = NS_ERROR_OUT_OF_MEMORY; retval = NS_ERROR_OUT_OF_MEMORY;
goto pre_call_clean_up; goto pre_call_clean_up;
} }
sp = stackbase = args.getvp();
// this is a function call, so push function and 'this' // this is a function call, so push function and 'this'
if(stack_size != argc) *sp++ = fval;
{ *sp++ = OBJECT_TO_JSVAL(thisObj);
*sp++ = fval;
*sp++ = OBJECT_TO_JSVAL(thisObj);
}
// make certain we leave no garbage in the stack // make certain we leave no garbage in the stack
for(i = 0; i < argc; i++) for(i = 0; i < argc; i++)
@@ -445,7 +441,7 @@ pre_call_clean_up:
if(!JSVAL_IS_PRIMITIVE(fval)) if(!JSVAL_IS_PRIMITIVE(fval))
{ {
success = js_Invoke(cx, argc, stackbase, 0); success = js_Invoke(cx, args, 0);
result = stackbase[0]; result = stackbase[0];
} }
else else
@@ -527,9 +523,6 @@ pre_call_clean_up:
} }
done: done:
if(sp)
js_FreeStack(cx, mark);
// TODO: I think we may need to translate this error, // TODO: I think we may need to translate this error,
// for now we'll pass through // for now we'll pass through
return retval; return retval;

View File

@@ -1259,8 +1259,6 @@ nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
jsval* sp = nsnull; jsval* sp = nsnull;
uint8 i; uint8 i;
uint8 argc=0; uint8 argc=0;
uint8 stack_size;
jsval result;
uint8 paramCount=0; uint8 paramCount=0;
nsresult retval = NS_ERROR_FAILURE; nsresult retval = NS_ERROR_FAILURE;
nsresult pending_result = NS_OK; nsresult pending_result = NS_OK;
@@ -1270,13 +1268,13 @@ nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
JSObject* obj; JSObject* obj;
const char* name = info->name; const char* name = info->name;
jsval fval; jsval fval;
void* mark;
JSBool foundDependentParam; JSBool foundDependentParam;
XPCContext* xpcc; XPCContext* xpcc;
JSContext* cx; JSContext* cx;
JSObject* thisObj; JSObject* thisObj;
JSBool popPrincipal = JS_FALSE; JSBool popPrincipal = JS_FALSE;
nsIScriptSecurityManager* ssm = nsnull; nsIScriptSecurityManager* ssm = nsnull;
bool invokeCall;
// Make sure not to set the callee on ccx until after we've gone through // Make sure not to set the callee on ccx until after we've gone through
// the whole nsIXPCFunctionThisTranslator bit. That code uses ccx to // the whole nsIXPCFunctionThisTranslator bit. That code uses ccx to
@@ -1296,17 +1294,7 @@ nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
} }
AutoScriptEvaluate scriptEval(cx); AutoScriptEvaluate scriptEval(cx);
#ifdef DEBUG_stats_jband js::InvokeArgsGuard args;
PRIntervalTime startTime = PR_IntervalNow();
PRIntervalTime endTime = 0;
static int totalTime = 0;
static int count = 0;
static const int interval = 10;
if(0 == (++count % interval))
printf("<<<<<<<< %d calls on nsXPCWrappedJSs made. (%d)\n", count, PR_IntervalToMilliseconds(totalTime));
#endif
obj = thisObj = wrapper->GetJSObject(); obj = thisObj = wrapper->GetJSObject();
@@ -1324,25 +1312,19 @@ nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
xpcc->SetException(nsnull); xpcc->SetException(nsnull);
ccx.GetThreadData()->SetException(nsnull); ccx.GetThreadData()->SetException(nsnull);
// We use js_AllocStack, js_Invoke, and js_FreeStack so that the gcthings // We use js_Invoke so that the gcthings we use as args will be rooted by
// we use as args will be rooted by the engine as we do conversions and // the engine as we do conversions and prepare to do the function call.
// prepare to do the function call. This adds a fair amount of complexity, // This adds a fair amount of complexity, but is a good optimization
// but is a good optimization compared to calling JS_AddRoot for each item. // compared to calling JS_AddRoot for each item.
js::LeaveTrace(cx); js::LeaveTrace(cx);
// setup stack // setup stack
// if this isn't a function call then we don't need to push extra stuff // if this isn't a function call then we don't need to push extra stuff
if(XPT_MD_IS_GETTER(info->flags) || XPT_MD_IS_SETTER(info->flags)) invokeCall = !(XPT_MD_IS_SETTER(info->flags) || XPT_MD_IS_GETTER(info->flags));
if (invokeCall)
{ {
stack_size = argc;
}
else
{
// allocate extra space for function and 'this'
stack_size = argc + 2;
// We get fval before allocating the stack to avoid gc badness that can // We get fval before allocating the stack to avoid gc badness that can
// happen if the GetProperty call leaves our request and the gc runs // happen if the GetProperty call leaves our request and the gc runs
// while the stack we allocate contains garbage. // while the stack we allocate contains garbage.
@@ -1442,18 +1424,21 @@ nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
} }
} }
// if stack_size is zero then we won't be needing a stack /*
if(stack_size && !(stackbase = sp = js_AllocStack(cx, stack_size, &mark))) * pushInvokeArgs allocates |2 + argc| slots, but getters and setters
* require only one rooted jsval, so waste one value.
*/
JS_ASSERT_IF(!invokeCall, argc < 2);
if (!cx->stack().pushInvokeArgsFriendAPI(cx, invokeCall ? argc : 0, args))
{ {
retval = NS_ERROR_OUT_OF_MEMORY; retval = NS_ERROR_OUT_OF_MEMORY;
goto pre_call_clean_up; goto pre_call_clean_up;
} }
NS_ASSERTION(XPT_MD_IS_GETTER(info->flags) || sp, sp = stackbase = args.getvp();
"Only a getter needs no stack.");
// this is a function call, so push function and 'this' // this is a function call, so push function and 'this'
if(stack_size != argc) if(invokeCall)
{ {
*sp++ = fval; *sp++ = fval;
*sp++ = OBJECT_TO_JSVAL(thisObj); *sp++ = OBJECT_TO_JSVAL(thisObj);
@@ -1650,11 +1635,8 @@ pre_call_clean_up:
// Make sure "this" doesn't get deleted during this call. // Make sure "this" doesn't get deleted during this call.
nsCOMPtr<nsIXPCWrappedJSClass> kungFuDeathGrip(this); nsCOMPtr<nsIXPCWrappedJSClass> kungFuDeathGrip(this);
result = JSVAL_NULL;
AUTO_MARK_JSVAL(ccx, &result);
if(!readyToDoTheCall) if(!readyToDoTheCall)
goto done; return retval;
// do the deed - note exceptions // do the deed - note exceptions
@@ -1676,8 +1658,7 @@ pre_call_clean_up:
if(NS_FAILED(rv)) if(NS_FAILED(rv))
{ {
JS_ReportOutOfMemory(ccx); JS_ReportOutOfMemory(ccx);
retval = NS_ERROR_OUT_OF_MEMORY; return NS_ERROR_OUT_OF_MEMORY;
goto done;
} }
popPrincipal = JS_TRUE; popPrincipal = JS_TRUE;
@@ -1685,16 +1666,16 @@ pre_call_clean_up:
} }
} }
/* On success, the return value is placed in |*stackbase|. */
if(XPT_MD_IS_GETTER(info->flags)) if(XPT_MD_IS_GETTER(info->flags))
success = JS_GetProperty(cx, obj, name, &result); success = JS_GetProperty(cx, obj, name, stackbase);
else if(XPT_MD_IS_SETTER(info->flags)) else if(XPT_MD_IS_SETTER(info->flags))
success = JS_SetProperty(cx, obj, name, sp-1); success = JS_SetProperty(cx, obj, name, stackbase);
else else
{ {
if(!JSVAL_IS_PRIMITIVE(fval)) if(!JSVAL_IS_PRIMITIVE(fval))
{ {
success = js_Invoke(cx, argc, stackbase, 0); success = js_Invoke(cx, args, 0);
result = *stackbase;
} }
else else
{ {
@@ -1733,8 +1714,7 @@ pre_call_clean_up:
// May also want to check if we're moving from content->chrome and force // May also want to check if we're moving from content->chrome and force
// a report in that case. // a report in that case.
retval = CheckForException(ccx, name, GetInterfaceName(), forceReport); return CheckForException(ccx, name, GetInterfaceName(), forceReport);
goto done;
} }
ccx.GetThreadData()->SetException(nsnull); // XXX necessary? ccx.GetThreadData()->SetException(nsnull); // XXX necessary?
@@ -1771,7 +1751,7 @@ pre_call_clean_up:
pv = (nsXPTCMiniVariant*) nativeParams[i].val.p; pv = (nsXPTCMiniVariant*) nativeParams[i].val.p;
if(param.IsRetval()) if(param.IsRetval())
val = result; val = *stackbase;
else if(JSVAL_IS_PRIMITIVE(stackbase[i+2]) || else if(JSVAL_IS_PRIMITIVE(stackbase[i+2]) ||
!JS_GetPropertyById(cx, JSVAL_TO_OBJECT(stackbase[i+2]), !JS_GetPropertyById(cx, JSVAL_TO_OBJECT(stackbase[i+2]),
mRuntime->GetStringID(XPCJSRuntime::IDX_VALUE), mRuntime->GetStringID(XPCJSRuntime::IDX_VALUE),
@@ -1822,7 +1802,7 @@ pre_call_clean_up:
pv = (nsXPTCMiniVariant*) nativeParams[i].val.p; pv = (nsXPTCMiniVariant*) nativeParams[i].val.p;
if(param.IsRetval()) if(param.IsRetval())
val = result; val = *stackbase;
else if(!JS_GetPropertyById(cx, JSVAL_TO_OBJECT(stackbase[i+2]), else if(!JS_GetPropertyById(cx, JSVAL_TO_OBJECT(stackbase[i+2]),
mRuntime->GetStringID(XPCJSRuntime::IDX_VALUE), mRuntime->GetStringID(XPCJSRuntime::IDX_VALUE),
&val)) &val))
@@ -1935,15 +1915,6 @@ pre_call_clean_up:
retval = pending_result; retval = pending_result;
} }
done:
if(sp)
js_FreeStack(cx, mark);
#ifdef DEBUG_stats_jband
endTime = PR_IntervalNow();
printf("%s::%s %d ( c->js ) \n", GetInterfaceName(), info->GetName(), PR_IntervalToMilliseconds(endTime-startTime));
totalTime += endTime-startTime;
#endif
return retval; return retval;
} }