Bumping manifests a=b2g-bump
[gecko.git] / js / src / jspropertytree.cpp
blob8c338e22b3cec4bf9138a338a508e75c14ab1322
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jspropertytree.h"
9 #include "jscntxt.h"
10 #include "jsgc.h"
11 #include "jstypes.h"
13 #include "vm/Shape.h"
15 #include "jsgcinlines.h"
17 #include "vm/Shape-inl.h"
19 using namespace js;
20 using namespace js::gc;
22 inline HashNumber
23 ShapeHasher::hash(const Lookup &l)
25 return l.hash();
28 inline bool
29 ShapeHasher::match(const Key k, const Lookup &l)
31 return k->matches(l);
34 Shape *
35 PropertyTree::newShape(ExclusiveContext *cx)
37 Shape *shape = js_NewGCShape(cx);
38 if (!shape)
39 js_ReportOutOfMemory(cx);
40 return shape;
43 static KidsHash *
44 HashChildren(Shape *kid1, Shape *kid2)
46 KidsHash *hash = js_new<KidsHash>();
47 if (!hash || !hash->init(2)) {
48 js_delete(hash);
49 return nullptr;
52 JS_ALWAYS_TRUE(hash->putNew(StackShape(kid1), kid1));
53 JS_ALWAYS_TRUE(hash->putNew(StackShape(kid2), kid2));
54 return hash;
57 bool
58 PropertyTree::insertChild(ExclusiveContext *cx, Shape *parent, Shape *child)
60 JS_ASSERT(!parent->inDictionary());
61 JS_ASSERT(!child->parent);
62 JS_ASSERT(!child->inDictionary());
63 JS_ASSERT(child->compartment() == parent->compartment());
64 JS_ASSERT(cx->isInsideCurrentCompartment(this));
66 KidsPointer *kidp = &parent->kids;
68 if (kidp->isNull()) {
69 child->setParent(parent);
70 kidp->setShape(child);
71 return true;
74 if (kidp->isShape()) {
75 Shape *shape = kidp->toShape();
76 JS_ASSERT(shape != child);
77 JS_ASSERT(!shape->matches(child));
79 KidsHash *hash = HashChildren(shape, child);
80 if (!hash) {
81 js_ReportOutOfMemory(cx);
82 return false;
84 kidp->setHash(hash);
85 child->setParent(parent);
86 return true;
89 if (!kidp->toHash()->putNew(StackShape(child), child)) {
90 js_ReportOutOfMemory(cx);
91 return false;
94 child->setParent(parent);
95 return true;
98 void
99 Shape::removeChild(Shape *child)
101 JS_ASSERT(!child->inDictionary());
102 JS_ASSERT(child->parent == this);
104 KidsPointer *kidp = &kids;
106 if (kidp->isShape()) {
107 JS_ASSERT(kidp->toShape() == child);
108 kidp->setNull();
109 child->parent = nullptr;
110 return;
113 KidsHash *hash = kidp->toHash();
114 JS_ASSERT(hash->count() >= 2); /* otherwise kidp->isShape() should be true */
116 hash->remove(StackShape(child));
117 child->parent = nullptr;
119 if (hash->count() == 1) {
120 /* Convert from HASH form back to SHAPE form. */
121 KidsHash::Range r = hash->all();
122 Shape *otherChild = r.front();
123 JS_ASSERT((r.popFront(), r.empty())); /* No more elements! */
124 kidp->setShape(otherChild);
125 js_delete(hash);
129 Shape *
130 PropertyTree::getChild(ExclusiveContext *cx, Shape *parentArg, StackShape &unrootedChild)
132 RootedShape parent(cx, parentArg);
133 JS_ASSERT(parent);
135 Shape *existingShape = nullptr;
138 * The property tree has extremely low fan-out below its root in
139 * popular embeddings with real-world workloads. Patterns such as
140 * defining closures that capture a constructor's environment as
141 * getters or setters on the new object that is passed in as
142 * |this| can significantly increase fan-out below the property
143 * tree root -- see bug 335700 for details.
145 KidsPointer *kidp = &parent->kids;
146 if (kidp->isShape()) {
147 Shape *kid = kidp->toShape();
148 if (kid->matches(unrootedChild))
149 existingShape = kid;
150 } else if (kidp->isHash()) {
151 if (KidsHash::Ptr p = kidp->toHash()->lookup(unrootedChild))
152 existingShape = *p;
153 } else {
154 /* If kidp->isNull(), we always insert. */
157 #ifdef JSGC_INCREMENTAL
158 if (existingShape) {
159 JS::Zone *zone = existingShape->zone();
160 if (zone->needsIncrementalBarrier()) {
162 * We need a read barrier for the shape tree, since these are weak
163 * pointers.
165 Shape *tmp = existingShape;
166 MarkShapeUnbarriered(zone->barrierTracer(), &tmp, "read barrier");
167 JS_ASSERT(tmp == existingShape);
168 } else if (zone->isGCSweeping() && !existingShape->isMarked() &&
169 !existingShape->arenaHeader()->allocatedDuringIncremental)
172 * The shape we've found is unreachable and due to be finalized, so
173 * remove our weak reference to it and don't use it.
175 JS_ASSERT(parent->isMarked());
176 parent->removeChild(existingShape);
177 existingShape = nullptr;
178 } else if (existingShape->isMarked(gc::GRAY)) {
179 JS::UnmarkGrayGCThingRecursively(existingShape, JSTRACE_SHAPE);
182 #endif
184 if (existingShape)
185 return existingShape;
187 RootedGeneric<StackShape*> child(cx, &unrootedChild);
189 Shape *shape = newShape(cx);
190 if (!shape)
191 return nullptr;
193 new (shape) Shape(*child, parent->numFixedSlots());
195 if (!insertChild(cx, parent, shape))
196 return nullptr;
198 return shape;
201 Shape *
202 PropertyTree::lookupChild(ThreadSafeContext *cx, Shape *parent, const StackShape &child)
204 /* Keep this in sync with the logic of getChild above. */
205 Shape *shape = nullptr;
207 JS_ASSERT(parent);
209 KidsPointer *kidp = &parent->kids;
210 if (kidp->isShape()) {
211 Shape *kid = kidp->toShape();
212 if (kid->matches(child))
213 shape = kid;
214 } else if (kidp->isHash()) {
215 if (KidsHash::Ptr p = kidp->toHash()->readonlyThreadsafeLookup(child))
216 shape = *p;
217 } else {
218 return nullptr;
221 #if defined(JSGC_INCREMENTAL) && defined(DEBUG)
222 if (shape) {
223 JS::Zone *zone = shape->arenaHeader()->zone;
224 JS_ASSERT(!zone->needsIncrementalBarrier());
225 JS_ASSERT(!(zone->isGCSweeping() && !shape->isMarked() &&
226 !shape->arenaHeader()->allocatedDuringIncremental));
228 #endif
230 return shape;
233 void
234 Shape::sweep()
236 if (inDictionary())
237 return;
240 * We detach the child from the parent if the parent is reachable.
242 * Note that due to incremental sweeping, the parent pointer may point
243 * to the original reachable parent, or it may point to a new live
244 * object allocated in the same cell that used to hold the parent.
246 * There are three cases:
248 * Case 1: parent is not marked - parent is unreachable, may have been
249 * finalized, and the cell may subsequently have been
250 * reallocated to a compartment that is not being marked (cells
251 * are marked when allocated in a compartment that is currenly
252 * being marked by the collector).
254 * Case 2: parent is marked and is in a different compartment - parent
255 * has been freed and reallocated to compartment that was being
256 * marked.
258 * Case 3: parent is marked and is in the same compartment - parent is
259 * stil reachable and we need to detach from it.
261 if (parent && parent->isMarked() && parent->compartment() == compartment())
262 parent->removeChild(this);
265 void
266 Shape::finalize(FreeOp *fop)
268 if (!inDictionary() && kids.isHash())
269 fop->delete_(kids.toHash());
272 #ifdef JSGC_COMPACTING
274 void
275 Shape::fixupDictionaryShapeAfterMovingGC()
277 if (!listp)
278 return;
280 JS_ASSERT(!IsInsideNursery(reinterpret_cast<Cell *>(listp)));
281 AllocKind kind = reinterpret_cast<Cell *>(listp)->tenuredGetAllocKind();
282 JS_ASSERT(kind == FINALIZE_SHAPE || kind <= FINALIZE_OBJECT_LAST);
283 if (kind == FINALIZE_SHAPE) {
284 // listp points to the parent field of the next shape.
285 Shape *next = reinterpret_cast<Shape *>(uintptr_t(listp) -
286 offsetof(Shape, parent));
287 listp = &gc::MaybeForwarded(next)->parent;
288 } else {
289 // listp points to the shape_ field of an object.
290 JSObject *last = reinterpret_cast<JSObject *>(uintptr_t(listp) -
291 offsetof(JSObject, shape_));
292 listp = &gc::MaybeForwarded(last)->shape_;
296 void
297 Shape::fixupShapeTreeAfterMovingGC()
299 if (kids.isNull())
300 return;
302 if (kids.isShape()) {
303 if (gc::IsForwarded(kids.toShape()))
304 kids.setShape(gc::Forwarded(kids.toShape()));
305 return;
308 JS_ASSERT(kids.isHash());
309 KidsHash *kh = kids.toHash();
310 for (KidsHash::Enum e(*kh); !e.empty(); e.popFront()) {
311 Shape *key = e.front();
312 if (!IsForwarded(key))
313 continue;
315 key = Forwarded(key);
316 BaseShape *base = key->base();
317 if (IsForwarded(base))
318 base = Forwarded(base);
319 UnownedBaseShape *unowned = base->unowned();
320 if (IsForwarded(unowned))
321 unowned = Forwarded(unowned);
322 StackShape lookup(unowned,
323 const_cast<Shape *>(key)->propidRef(),
324 key->slotInfo & Shape::SLOT_MASK,
325 key->attrs,
326 key->flags);
327 e.rekeyFront(lookup, key);
331 void
332 Shape::fixupAfterMovingGC()
334 if (inDictionary())
335 fixupDictionaryShapeAfterMovingGC();
336 else
337 fixupShapeTreeAfterMovingGC();
340 #endif // JSGC_COMPACTING
342 #ifdef DEBUG
344 void
345 KidsPointer::checkConsistency(Shape *aKid) const
347 if (isShape()) {
348 JS_ASSERT(toShape() == aKid);
349 } else {
350 JS_ASSERT(isHash());
351 KidsHash *hash = toHash();
352 KidsHash::Ptr ptr = hash->lookup(StackShape(aKid));
353 JS_ASSERT(*ptr == aKid);
357 void
358 Shape::dump(JSContext *cx, FILE *fp) const
360 /* This is only used from gdb, so allowing GC here would just be confusing. */
361 gc::AutoSuppressGC suppress(cx);
363 jsid propid = this->propid();
365 JS_ASSERT(!JSID_IS_VOID(propid));
367 if (JSID_IS_INT(propid)) {
368 fprintf(fp, "[%ld]", (long) JSID_TO_INT(propid));
369 } else if (JSID_IS_ATOM(propid)) {
370 if (JSLinearString *str = JSID_TO_ATOM(propid))
371 FileEscapedString(fp, str, '"');
372 else
373 fputs("<error>", fp);
374 } else {
375 JS_ASSERT(JSID_IS_SYMBOL(propid));
376 JSID_TO_SYMBOL(propid)->dump(fp);
379 fprintf(fp, " g/s %p/%p slot %d attrs %x ",
380 JS_FUNC_TO_DATA_PTR(void *, base()->rawGetter),
381 JS_FUNC_TO_DATA_PTR(void *, base()->rawSetter),
382 hasSlot() ? slot() : -1, attrs);
384 if (attrs) {
385 int first = 1;
386 fputs("(", fp);
387 #define DUMP_ATTR(name, display) if (attrs & JSPROP_##name) fputs(&(" " #display)[first], fp), first = 0
388 DUMP_ATTR(ENUMERATE, enumerate);
389 DUMP_ATTR(READONLY, readonly);
390 DUMP_ATTR(PERMANENT, permanent);
391 DUMP_ATTR(GETTER, getter);
392 DUMP_ATTR(SETTER, setter);
393 DUMP_ATTR(SHARED, shared);
394 #undef DUMP_ATTR
395 fputs(") ", fp);
398 fprintf(fp, "flags %x ", flags);
399 if (flags) {
400 int first = 1;
401 fputs("(", fp);
402 #define DUMP_FLAG(name, display) if (flags & name) fputs(&(" " #display)[first], fp), first = 0
403 DUMP_FLAG(IN_DICTIONARY, in_dictionary);
404 #undef DUMP_FLAG
405 fputs(") ", fp);
409 void
410 Shape::dumpSubtree(JSContext *cx, int level, FILE *fp) const
412 if (!parent) {
413 JS_ASSERT(level == 0);
414 JS_ASSERT(JSID_IS_EMPTY(propid_));
415 fprintf(fp, "class %s emptyShape\n", getObjectClass()->name);
416 } else {
417 fprintf(fp, "%*sid ", level, "");
418 dump(cx, fp);
421 if (!kids.isNull()) {
422 ++level;
423 if (kids.isShape()) {
424 Shape *kid = kids.toShape();
425 JS_ASSERT(kid->parent == this);
426 kid->dumpSubtree(cx, level, fp);
427 } else {
428 const KidsHash &hash = *kids.toHash();
429 for (KidsHash::Range range = hash.all(); !range.empty(); range.popFront()) {
430 Shape *kid = range.front();
432 JS_ASSERT(kid->parent == this);
433 kid->dumpSubtree(cx, level, fp);
439 #endif