Merge mozilla-central and tracemonkey. (a=blockers)
[mozilla-central.git] / js / src / jscntxtinlines.h
blobbf3b055d48bb1ef47fabf0fd15594918feaeb935
1 /* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=4 sw=4 et tw=78:
4 * ***** BEGIN LICENSE BLOCK *****
5 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
7 * The contents of this file are subject to the Mozilla Public License Version
8 * 1.1 (the "License"); you may not use this file except in compliance with
9 * the License. You may obtain a copy of the License at
10 * http://www.mozilla.org/MPL/
12 * Software distributed under the License is distributed on an "AS IS" basis,
13 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
14 * for the specific language governing rights and limitations under the
15 * License.
17 * The Original Code is SpiderMonkey code.
19 * The Initial Developer of the Original Code is
20 * Mozilla Corporation.
21 * Portions created by the Initial Developer are Copyright (C) 2010
22 * the Initial Developer. All Rights Reserved.
24 * Contributor(s):
25 * Jeff Walden <jwalden+code@mit.edu> (original author)
27 * Alternatively, the contents of this file may be used under the terms of
28 * either of the GNU General Public License Version 2 or later (the "GPL"),
29 * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
41 #ifndef jscntxtinlines_h___
42 #define jscntxtinlines_h___
44 #include "jscntxt.h"
45 #include "jscompartment.h"
46 #include "jsparse.h"
47 #include "jsstaticcheck.h"
48 #include "jsxml.h"
49 #include "jsregexp.h"
50 #include "jsgc.h"
52 namespace js {
54 static inline JSObject *
55 GetGlobalForScopeChain(JSContext *cx)
58 * This is essentially GetScopeChain(cx)->getGlobal(), but without
59 * falling off trace.
61 * This use of cx->fp, possibly on trace, is deliberate:
62 * cx->fp->scopeChain->getGlobal() returns the same object whether we're on
63 * trace or not, since we do not trace calls across global objects.
65 VOUCH_DOES_NOT_REQUIRE_STACK();
67 if (cx->hasfp())
68 return cx->fp()->scopeChain().getGlobal();
70 JSObject *scope = cx->globalObject;
71 if (!scope) {
72 JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_INACTIVE);
73 return NULL;
75 OBJ_TO_INNER_OBJECT(cx, scope);
76 return scope;
81 #ifdef JS_METHODJIT
82 inline js::mjit::JaegerCompartment *JSContext::jaegerCompartment()
84 return compartment->jaegerCompartment;
86 #endif
88 inline bool
89 JSContext::ensureGeneratorStackSpace()
91 bool ok = genStack.reserve(genStack.length() + 1);
92 if (!ok)
93 js_ReportOutOfMemory(this);
94 return ok;
97 JSStackFrame *
98 JSContext::computeNextFrame(JSStackFrame *fp)
100 JSStackFrame *next = NULL;
101 for (js::StackSegment *ss = currentSegment; ; ss = ss->getPreviousInContext()) {
102 JSStackFrame *end = ss->getInitialFrame()->prev();
103 for (JSStackFrame *f = ss->getCurrentFrame(); f != end; next = f, f = f->prev()) {
104 if (f == fp)
105 return next;
107 if (end != ss->getPreviousInContext()->getCurrentFrame())
108 next = NULL;
112 inline js::RegExpStatics *
113 JSContext::regExpStatics()
115 return js::RegExpStatics::extractFrom(js::GetGlobalForScopeChain(this));
118 namespace js {
120 JS_REQUIRES_STACK JS_ALWAYS_INLINE JSFrameRegs *
121 StackSegment::getCurrentRegs() const
123 JS_ASSERT(inContext());
124 return isActive() ? cx->regs : getSuspendedRegs();
127 JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
128 StackSegment::getCurrentFrame() const
130 return getCurrentRegs()->fp;
133 JS_REQUIRES_STACK inline Value *
134 StackSpace::firstUnused() const
136 StackSegment *seg = currentSegment;
137 if (!seg) {
138 JS_ASSERT(invokeArgEnd == NULL);
139 return base;
141 if (seg->inContext()) {
142 Value *sp = seg->getCurrentRegs()->sp;
143 if (invokeArgEnd > sp) {
144 JS_ASSERT(invokeSegment == currentSegment);
145 JS_ASSERT_IF(seg->maybeContext()->hasfp(),
146 invokeFrame == seg->maybeContext()->fp());
147 return invokeArgEnd;
149 return sp;
151 JS_ASSERT(invokeArgEnd);
152 JS_ASSERT(invokeSegment == currentSegment);
153 return invokeArgEnd;
157 /* Inline so we don't need the friend API. */
158 JS_ALWAYS_INLINE bool
159 StackSpace::isCurrentAndActive(JSContext *cx) const
161 #ifdef DEBUG
162 JS_ASSERT_IF(cx->getCurrentSegment(),
163 cx->getCurrentSegment()->maybeContext() == cx);
164 cx->assertSegmentsInSync();
165 #endif
166 return currentSegment &&
167 currentSegment->isActive() &&
168 currentSegment == cx->getCurrentSegment();
171 STATIC_POSTCONDITION(!return || ubound(from) >= nvals)
172 JS_ALWAYS_INLINE bool
173 StackSpace::ensureSpace(JSContext *maybecx, Value *from, ptrdiff_t nvals) const
175 JS_ASSERT(from >= firstUnused());
176 #ifdef XP_WIN
177 JS_ASSERT(from <= commitEnd);
178 if (commitEnd - from >= nvals)
179 goto success;
180 if (end - from < nvals) {
181 if (maybecx)
182 js_ReportOutOfScriptQuota(maybecx);
183 return false;
185 if (!bumpCommit(from, nvals)) {
186 if (maybecx)
187 js_ReportOutOfScriptQuota(maybecx);
188 return false;
190 goto success;
191 #else
192 if (end - from < nvals) {
193 if (maybecx)
194 js_ReportOutOfScriptQuota(maybecx);
195 return false;
197 goto success;
198 #endif
199 success:
200 #ifdef DEBUG
201 memset(from, 0xde, nvals * sizeof(js::Value));
202 #endif
203 return true;
206 JS_ALWAYS_INLINE bool
207 StackSpace::ensureEnoughSpaceToEnterTrace()
209 #ifdef XP_WIN
210 return ensureSpace(NULL, firstUnused(), MAX_TRACE_SPACE_VALS);
211 #endif
212 return end - firstUnused() > MAX_TRACE_SPACE_VALS;
215 JS_ALWAYS_INLINE bool
216 StackSpace::EnsureSpaceCheck::operator()(const StackSpace &stack, JSContext *cx,
217 Value *from, uintN nvals)
219 return stack.ensureSpace(cx, from, nvals);
222 JS_ALWAYS_INLINE bool
223 StackSpace::LimitCheck::operator()(const StackSpace &stack, JSContext *cx,
224 Value *from, uintN nvals)
226 JS_ASSERT(from == stack.firstUnused());
227 JS_ASSERT(from < *limit);
228 if (*limit - from >= ptrdiff_t(nvals))
229 return true;
230 if (stack.bumpCommitAndLimit(base, from, nvals, limit))
231 return true;
232 js_ReportOverRecursed(cx);
233 return false;
236 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
237 StackSpace::pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard *ag)
239 if (JS_UNLIKELY(!isCurrentAndActive(cx)))
240 return pushSegmentForInvoke(cx, argc, ag);
242 Value *sp = cx->regs->sp;
243 Value *start = invokeArgEnd > sp ? invokeArgEnd : sp;
244 JS_ASSERT(start == firstUnused());
245 uintN nvals = 2 + argc;
246 if (!ensureSpace(cx, start, nvals))
247 return false;
249 Value *vp = start;
250 Value *vpend = vp + nvals;
251 /* Don't need to MakeRangeGCSafe: the VM stack is conservatively marked. */
253 /* Use invokeArgEnd to root [vp, vpend) until the frame is pushed. */
254 ag->prevInvokeArgEnd = invokeArgEnd;
255 invokeArgEnd = vpend;
256 #ifdef DEBUG
257 ag->prevInvokeSegment = invokeSegment;
258 invokeSegment = currentSegment;
259 ag->prevInvokeFrame = invokeFrame;
260 invokeFrame = cx->maybefp();
261 #endif
263 ag->cx = cx;
264 ag->argv_ = vp + 2;
265 ag->argc_ = argc;
266 return true;
269 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
270 StackSpace::popInvokeArgs(const InvokeArgsGuard &ag)
272 if (JS_UNLIKELY(ag.seg != NULL)) {
273 popSegmentForInvoke(ag);
274 return;
277 JS_ASSERT(isCurrentAndActive(ag.cx));
278 JS_ASSERT(invokeSegment == currentSegment);
279 JS_ASSERT(invokeFrame == ag.cx->maybefp());
280 JS_ASSERT(invokeArgEnd == ag.argv() + ag.argc());
282 #ifdef DEBUG
283 invokeSegment = ag.prevInvokeSegment;
284 invokeFrame = ag.prevInvokeFrame;
285 #endif
286 invokeArgEnd = ag.prevInvokeArgEnd;
289 JS_ALWAYS_INLINE
290 InvokeArgsGuard::~InvokeArgsGuard()
292 if (JS_UNLIKELY(!pushed()))
293 return;
294 cx->stack().popInvokeArgs(*this);
297 template <class Check>
298 JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
299 StackSpace::getCallFrame(JSContext *cx, Value *firstUnused, uintN nactual,
300 JSFunction *fun, JSScript *script, uint32 *flags,
301 Check check) const
303 JS_ASSERT(fun->script() == script);
305 /* Include an extra sizeof(JSStackFrame) for the method-jit. */
306 uintN nvals = VALUES_PER_STACK_FRAME + script->nslots;
307 uintN nformal = fun->nargs;
309 /* Maintain layout invariant: &formalArgs[0] == ((Value *)fp) - nformal. */
311 if (nactual == nformal) {
312 if (JS_UNLIKELY(!check(*this, cx, firstUnused, nvals)))
313 return NULL;
314 return reinterpret_cast<JSStackFrame *>(firstUnused);
317 if (nactual < nformal) {
318 *flags |= JSFRAME_UNDERFLOW_ARGS;
319 uintN nmissing = nformal - nactual;
320 if (JS_UNLIKELY(!check(*this, cx, firstUnused, nmissing + nvals)))
321 return NULL;
322 SetValueRangeToUndefined(firstUnused, nmissing);
323 return reinterpret_cast<JSStackFrame *>(firstUnused + nmissing);
326 *flags |= JSFRAME_OVERFLOW_ARGS;
327 uintN ncopy = 2 + nformal;
328 if (JS_UNLIKELY(!check(*this, cx, firstUnused, ncopy + nvals)))
329 return NULL;
331 Value *dst = firstUnused;
332 Value *src = firstUnused - (2 + nactual);
333 PodCopy(dst, src, ncopy);
334 Debug_SetValueRangeToCrashOnTouch(src, ncopy);
335 return reinterpret_cast<JSStackFrame *>(firstUnused + ncopy);
338 JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
339 StackSpace::getInvokeFrame(JSContext *cx, const CallArgs &args,
340 JSFunction *fun, JSScript *script,
341 uint32 *flags, InvokeFrameGuard *fg) const
343 JS_ASSERT(firstUnused() == args.argv() + args.argc());
345 Value *firstUnused = args.argv() + args.argc();
346 fg->regs_.fp = getCallFrame(cx, firstUnused, args.argc(), fun, script, flags,
347 EnsureSpaceCheck());
348 fg->regs_.sp = fg->regs_.fp->slots() + script->nfixed;
349 fg->regs_.pc = script->code;
351 return fg->regs_.fp != NULL;
354 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
355 StackSpace::pushInvokeFrame(JSContext *cx, const CallArgs &args,
356 InvokeFrameGuard *fg)
358 JS_ASSERT(firstUnused() == args.argv() + args.argc());
360 if (JS_UNLIKELY(!currentSegment->inContext())) {
361 cx->pushSegmentAndFrame(currentSegment, fg->regs_);
362 } else {
363 fg->prevRegs_ = cx->regs;
364 cx->setCurrentRegs(&fg->regs_);
367 fg->cx_ = cx;
368 JS_ASSERT(isCurrentAndActive(cx));
371 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
372 StackSpace::popInvokeFrame(const InvokeFrameGuard &fg)
374 JSContext *cx = fg.cx_;
375 JSStackFrame *fp = fg.regs_.fp;
377 JS_ASSERT(isCurrentAndActive(cx));
378 if (JS_UNLIKELY(currentSegment->getInitialFrame() == fp)) {
379 cx->popSegmentAndFrame();
380 } else {
381 JS_ASSERT(&fg.regs_ == cx->regs);
382 JS_ASSERT(fp->prev_ == fg.prevRegs_->fp);
383 JS_ASSERT(fp->prevpc() == fg.prevRegs_->pc);
384 cx->setCurrentRegs(fg.prevRegs_);
388 JS_ALWAYS_INLINE void
389 InvokeFrameGuard::pop()
391 JS_ASSERT(pushed());
392 cx_->stack().popInvokeFrame(*this);
393 cx_ = NULL;
396 JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
397 StackSpace::getInlineFrame(JSContext *cx, Value *sp, uintN nactual,
398 JSFunction *fun, JSScript *script, uint32 *flags) const
400 JS_ASSERT(isCurrentAndActive(cx));
401 JS_ASSERT(cx->hasActiveSegment());
402 JS_ASSERT(cx->regs->sp == sp);
404 return getCallFrame(cx, sp, nactual, fun, script, flags, EnsureSpaceCheck());
407 JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
408 StackSpace::getInlineFrameWithinLimit(JSContext *cx, Value *sp, uintN nactual,
409 JSFunction *fun, JSScript *script, uint32 *flags,
410 JSStackFrame *base, Value **limit) const
412 JS_ASSERT(isCurrentAndActive(cx));
413 JS_ASSERT(cx->hasActiveSegment());
414 JS_ASSERT(cx->regs->sp == sp);
416 return getCallFrame(cx, sp, nactual, fun, script, flags, LimitCheck(base, limit));
419 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
420 StackSpace::pushInlineFrame(JSContext *cx, JSScript *script, JSStackFrame *fp,
421 JSFrameRegs *regs)
423 JS_ASSERT(isCurrentAndActive(cx));
424 JS_ASSERT(cx->regs == regs && script == fp->script());
426 regs->fp = fp;
427 regs->pc = script->code;
428 regs->sp = fp->slots() + script->nfixed;
431 JS_REQUIRES_STACK JS_ALWAYS_INLINE void
432 StackSpace::popInlineFrame(JSContext *cx, JSStackFrame *prev, Value *newsp)
434 JS_ASSERT(isCurrentAndActive(cx));
435 JS_ASSERT(cx->hasActiveSegment());
436 JS_ASSERT(cx->regs->fp->prev_ == prev);
437 JS_ASSERT(!cx->regs->fp->hasImacropc());
438 JS_ASSERT(prev->base() <= newsp && newsp <= cx->regs->fp->formalArgsEnd());
440 JSFrameRegs *regs = cx->regs;
441 regs->pc = prev->pc(cx, regs->fp);
442 regs->fp = prev;
443 regs->sp = newsp;
446 JS_ALWAYS_INLINE Value *
447 StackSpace::getStackLimit(JSContext *cx)
449 Value *sp = cx->regs->sp;
450 JS_ASSERT(sp == firstUnused());
451 Value *limit = sp + STACK_QUOTA;
454 * Try to reserve the whole STACK_QUOTA. If that fails, though, just
455 * reserve the minimum required space: enough for the nslots + an
456 * additional stack frame.
458 #ifdef XP_WIN
459 if (JS_LIKELY(limit <= commitEnd))
460 return limit;
461 if (ensureSpace(NULL /* don't report error */, sp, STACK_QUOTA))
462 return limit;
463 uintN minimum = cx->fp()->numSlots() + VALUES_PER_STACK_FRAME;
464 return ensureSpace(cx, sp, minimum) ? sp + minimum : NULL;
465 #else
466 if (JS_LIKELY(limit <= end))
467 return limit;
468 uintN minimum = cx->fp()->numSlots() + VALUES_PER_STACK_FRAME;
469 return ensureSpace(cx, sp, minimum) ? sp + minimum : NULL;
470 #endif
473 JS_REQUIRES_STACK inline
474 FrameRegsIter::FrameRegsIter(JSContext *cx)
475 : cx(cx)
477 curseg = cx->getCurrentSegment();
478 if (JS_UNLIKELY(!curseg || !curseg->isActive())) {
479 initSlow();
480 return;
482 JS_ASSERT(cx->regs->fp);
483 curfp = cx->regs->fp;
484 cursp = cx->regs->sp;
485 curpc = cx->regs->pc;
486 return;
489 inline FrameRegsIter &
490 FrameRegsIter::operator++()
492 JSStackFrame *fp = curfp;
493 JSStackFrame *prev = curfp = curfp->prev();
494 if (!prev)
495 return *this;
497 curpc = curfp->pc(cx, fp);
499 if (JS_UNLIKELY(fp == curseg->getInitialFrame())) {
500 incSlow(fp, prev);
501 return *this;
504 cursp = fp->formalArgsEnd();
505 return *this;
508 class AutoNamespaceArray : protected AutoGCRooter {
509 public:
510 AutoNamespaceArray(JSContext *cx) : AutoGCRooter(cx, NAMESPACES) {
511 array.init();
514 ~AutoNamespaceArray() {
515 array.finish(context);
518 uint32 length() const { return array.length; }
520 public:
521 friend void AutoGCRooter::trace(JSTracer *trc);
523 JSXMLArray array;
526 #ifdef DEBUG
527 class CompartmentChecker
529 private:
530 JSContext *context;
531 JSCompartment *compartment;
533 public:
534 explicit CompartmentChecker(JSContext *cx) : context(cx), compartment(cx->compartment) {
535 check(cx->hasfp() ? JS_GetGlobalForScopeChain(cx) : cx->globalObject);
536 VOUCH_DOES_NOT_REQUIRE_STACK();
540 * Set a breakpoint here (break js::CompartmentChecker::fail) to debug
541 * compartment mismatches.
543 static void fail(JSCompartment *c1, JSCompartment *c2) {
544 printf("*** Compartment mismatch %p vs. %p\n", (void *) c1, (void *) c2);
545 JS_NOT_REACHED("compartment mismatched");
548 /* Note: should only be used when neither c1 nor c2 may be the default compartment. */
549 static void check(JSCompartment *c1, JSCompartment *c2) {
550 JS_ASSERT(c1 != c1->rt->atomsCompartment);
551 JS_ASSERT(c2 != c2->rt->atomsCompartment);
552 if (c1 != c2)
553 fail(c1, c2);
556 void check(JSCompartment *c) {
557 if (c && c != context->runtime->atomsCompartment) {
558 if (!compartment)
559 compartment = c;
560 else if (c != compartment)
561 fail(compartment, c);
565 void check(JSPrincipals *) { /* nothing for now */ }
567 void check(JSObject *obj) {
568 if (obj)
569 check(obj->compartment());
572 void check(JSString *str) {
573 if (!JSString::isStatic(str) && !str->isAtomized())
574 check(str->asCell()->compartment());
577 void check(const js::Value &v) {
578 if (v.isObject())
579 check(&v.toObject());
580 else if (v.isString())
581 check(v.toString());
584 void check(jsval v) {
585 check(Valueify(v));
588 void check(const ValueArray &arr) {
589 for (size_t i = 0; i < arr.length; i++)
590 check(arr.array[i]);
593 void check(const JSValueArray &arr) {
594 for (size_t i = 0; i < arr.length; i++)
595 check(arr.array[i]);
598 void check(jsid id) {
599 if (JSID_IS_OBJECT(id))
600 check(JSID_TO_OBJECT(id));
603 void check(JSIdArray *ida) {
604 if (ida) {
605 for (jsint i = 0; i < ida->length; i++) {
606 if (JSID_IS_OBJECT(ida->vector[i]))
607 check(ida->vector[i]);
612 void check(JSScript *script) {
613 if (script) {
614 check(script->compartment);
615 if (script->u.object)
616 check(script->u.object);
620 void check(JSStackFrame *fp) {
621 check(&fp->scopeChain());
625 #endif
628 * Don't perform these checks when called from a finalizer. The checking
629 * depends on other objects not having been swept yet.
631 #define START_ASSERT_SAME_COMPARTMENT() \
632 if (cx->runtime->gcRunning) \
633 return; \
634 CompartmentChecker c(cx)
636 template <class T1> inline void
637 assertSameCompartment(JSContext *cx, T1 t1)
639 #ifdef DEBUG
640 START_ASSERT_SAME_COMPARTMENT();
641 c.check(t1);
642 #endif
645 template <class T1, class T2> inline void
646 assertSameCompartment(JSContext *cx, T1 t1, T2 t2)
648 #ifdef DEBUG
649 START_ASSERT_SAME_COMPARTMENT();
650 c.check(t1);
651 c.check(t2);
652 #endif
655 template <class T1, class T2, class T3> inline void
656 assertSameCompartment(JSContext *cx, T1 t1, T2 t2, T3 t3)
658 #ifdef DEBUG
659 START_ASSERT_SAME_COMPARTMENT();
660 c.check(t1);
661 c.check(t2);
662 c.check(t3);
663 #endif
666 template <class T1, class T2, class T3, class T4> inline void
667 assertSameCompartment(JSContext *cx, T1 t1, T2 t2, T3 t3, T4 t4)
669 #ifdef DEBUG
670 START_ASSERT_SAME_COMPARTMENT();
671 c.check(t1);
672 c.check(t2);
673 c.check(t3);
674 c.check(t4);
675 #endif
678 template <class T1, class T2, class T3, class T4, class T5> inline void
679 assertSameCompartment(JSContext *cx, T1 t1, T2 t2, T3 t3, T4 t4, T5 t5)
681 #ifdef DEBUG
682 START_ASSERT_SAME_COMPARTMENT();
683 c.check(t1);
684 c.check(t2);
685 c.check(t3);
686 c.check(t4);
687 c.check(t5);
688 #endif
691 #undef START_ASSERT_SAME_COMPARTMENT
693 STATIC_PRECONDITION_ASSUME(ubound(vp) >= argc + 2)
694 JS_ALWAYS_INLINE bool
695 CallJSNative(JSContext *cx, js::Native native, uintN argc, js::Value *vp)
697 #ifdef DEBUG
698 JSBool alreadyThrowing = cx->isExceptionPending();
699 #endif
700 assertSameCompartment(cx, ValueArray(vp, argc + 2));
701 JSBool ok = native(cx, argc, vp);
702 if (ok) {
703 assertSameCompartment(cx, vp[0]);
704 JS_ASSERT_IF(!alreadyThrowing, !cx->isExceptionPending());
706 return ok;
709 extern JSBool CallOrConstructBoundFunction(JSContext *, uintN, js::Value *);
711 STATIC_PRECONDITION(ubound(vp) >= argc + 2)
712 JS_ALWAYS_INLINE bool
713 CallJSNativeConstructor(JSContext *cx, js::Native native, uintN argc, js::Value *vp)
715 #ifdef DEBUG
716 JSObject *callee = &vp[0].toObject();
717 #endif
719 JS_ASSERT(vp[1].isMagic());
720 if (!CallJSNative(cx, native, argc, vp))
721 return false;
724 * Native constructors must return non-primitive values on success.
725 * Although it is legal, if a constructor returns the callee, there is a
726 * 99.9999% chance it is a bug. If any valid code actually wants the
727 * constructor to return the callee, the assertion can be removed or
728 * (another) conjunct can be added to the antecedent.
730 * Proxies are exceptions to both rules: they can return primitives and
731 * they allow content to return the callee.
733 * CallOrConstructBoundFunction is an exception as well because we
734 * might have used bind on a proxy function.
736 * (new Object(Object)) returns the callee.
738 extern JSBool proxy_Construct(JSContext *, uintN, Value *);
739 JS_ASSERT_IF(native != proxy_Construct && native != js::CallOrConstructBoundFunction &&
740 (!callee->isFunction() || callee->getFunctionPrivate()->u.n.clasp != &js_ObjectClass),
741 !vp->isPrimitive() && callee != &vp[0].toObject());
743 return true;
746 JS_ALWAYS_INLINE bool
747 CallJSPropertyOp(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp)
749 assertSameCompartment(cx, obj, id, *vp);
750 JSBool ok = op(cx, obj, id, vp);
751 if (ok)
752 assertSameCompartment(cx, obj, *vp);
753 return ok;
756 JS_ALWAYS_INLINE bool
757 CallJSPropertyOpSetter(JSContext *cx, js::StrictPropertyOp op, JSObject *obj, jsid id,
758 JSBool strict, js::Value *vp)
760 assertSameCompartment(cx, obj, id, *vp);
761 return op(cx, obj, id, strict, vp);
764 inline bool
765 CallSetter(JSContext *cx, JSObject *obj, jsid id, js::StrictPropertyOp op, uintN attrs,
766 uintN shortid, JSBool strict, js::Value *vp)
768 if (attrs & JSPROP_SETTER)
769 return ExternalGetOrSet(cx, obj, id, CastAsObjectJsval(op), JSACC_WRITE, 1, vp, vp);
771 if (attrs & JSPROP_GETTER)
772 return js_ReportGetterOnlyAssignment(cx);
774 if (attrs & JSPROP_SHORTID)
775 id = INT_TO_JSID(shortid);
776 return CallJSPropertyOpSetter(cx, op, obj, id, strict, vp);
779 #ifdef JS_TRACER
781 * Reconstruct the JS stack and clear cx->tracecx. We must be currently in a
782 * _FAIL builtin from trace on cx or another context on the same thread. The
783 * machine code for the trace remains on the C stack when js_DeepBail returns.
785 * Implemented in jstracer.cpp.
787 JS_FORCES_STACK JS_FRIEND_API(void)
788 DeepBail(JSContext *cx);
789 #endif
791 static JS_INLINE void
792 LeaveTraceIfGlobalObject(JSContext *cx, JSObject *obj)
794 if (!obj->parent)
795 LeaveTrace(cx);
798 static JS_INLINE void
799 LeaveTraceIfArgumentsObject(JSContext *cx, JSObject *obj)
801 if (obj->isArguments())
802 LeaveTrace(cx);
805 static JS_INLINE JSBool
806 CanLeaveTrace(JSContext *cx)
808 JS_ASSERT(JS_ON_TRACE(cx));
809 #ifdef JS_TRACER
810 return JS_TRACE_MONITOR_ON_TRACE(cx)->bailExit != NULL;
811 #else
812 return JS_FALSE;
813 #endif
816 } /* namespace js */
818 inline void
819 JSContext::setPendingException(js::Value v) {
820 this->throwing = true;
821 this->exception = v;
822 assertSameCompartment(this, v);
825 #endif /* jscntxtinlines_h___ */