Bug 611140: add support to only run files when in certain timezones (r=jsudduth)
[tamarin-stm.git] / MMgc / GC-inlines.h
blob609eca1e04bf39318eeb9069820efbf21e06eba6
1 /* -*- Mode: C++; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 4 -*- */
2 /* vi: set ts=4 sw=4 expandtab: (add to ~/.vimrc: set modeline modelines=5) */
3 /* ***** BEGIN LICENSE BLOCK *****
4 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
6 * The contents of this file are subject to the Mozilla Public License Version
7 * 1.1 (the "License"); you may not use this file except in compliance with
8 * the License. You may obtain a copy of the License at
9 * http://www.mozilla.org/MPL/
11 * Software distributed under the License is distributed on an "AS IS" basis,
12 * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
13 * for the specific language governing rights and limitations under the
14 * License.
16 * The Original Code is [Open Source Virtual Machine.].
18 * The Initial Developer of the Original Code is
19 * Adobe System Incorporated.
20 * Portions created by the Initial Developer are Copyright (C) 2004-2006
21 * the Initial Developer. All Rights Reserved.
23 * Contributor(s):
24 * Adobe AS3 Team
25 * leon.sha@sun.com
27 * Alternatively, the contents of this file may be used under the terms of
28 * either the GNU General Public License Version 2 or later (the "GPL"), or
29 * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
30 * in which case the provisions of the GPL or the LGPL are applicable instead
31 * of those above. If you wish to allow use of your version of this file only
32 * under the terms of either the GPL or the LGPL, and not to allow others to
33 * use your version of this file under the terms of the MPL, indicate your
34 * decision by deleting the provisions above and replace them with the notice
35 * and other provisions required by the GPL or the LGPL. If you do not delete
36 * the provisions above, a recipient may use your version of this file under
37 * the terms of any one of the MPL, the GPL or the LGPL.
39 * ***** END LICENSE BLOCK ***** */
41 #ifndef __GC_inlines__
42 #define __GC_inlines__
44 // Inline functions for GCRoot, GC, GC::AllocaAutoPtr, GCWorkItem, Cleaner
45 // Inline functions for the write barrier are in WriteBarrier.h for now.
47 namespace MMgc
49 // GCRoot
51 REALLY_INLINE void *GCRoot::operator new(size_t size)
53 return FixedMalloc::GetFixedMalloc()->OutOfLineAlloc(size, MMgc::kZero);
56 REALLY_INLINE void GCRoot::operator delete (void *object)
58 FixedMalloc::GetFixedMalloc()->OutOfLineFree(object);
61 REALLY_INLINE void GCRoot::ClearMarkStackSentinelPointer()
63 markStackSentinel = NULL;
66 REALLY_INLINE GCWorkItem *GCRoot::GetMarkStackSentinelPointer()
68 return markStackSentinel;
71 REALLY_INLINE GCWorkItem GCRoot::GetWorkItem() const
73 return GCWorkItem(object, (uint32_t)size, GCWorkItem::kNonGCObject);
76 // GC
78 REALLY_INLINE void *GC::GetGCContextVariable(int var) const
80 return m_contextVars[var];
83 REALLY_INLINE void GC::SetGCContextVariable(int var, void *val)
85 m_contextVars[var] = val;
88 REALLY_INLINE avmplus::AvmCore *GC::core() const
90 return (avmplus::AvmCore*)GetGCContextVariable(GCV_AVMCORE);
93 REALLY_INLINE GC* GC::GetActiveGC() {
94 return GCHeap::GetGCHeap()->GetEnterFrame()->GetActiveGC();
97 REALLY_INLINE void GC::QueueCollection()
99 policy.queueFullCollection();
102 REALLY_INLINE void GC::SignalAllocWork(size_t size)
104 if (policy.signalAllocWork(size))
105 CollectionWork();
108 REALLY_INLINE void GC::SignalFreeWork(size_t size)
110 policy.signalFreeWork(size);
113 REALLY_INLINE void *GC::PleaseAlloc(size_t size, int flags)
115 return Alloc(size, flags | kCanFail);
118 // Normally extra will not be zero (overloaded 'new' operators take care of that)
119 // so the overflow check is not actually redundant.
121 REALLY_INLINE void *GC::AllocExtra(size_t size, size_t extra, int flags)
123 return Alloc(GCHeap::CheckForAllocSizeOverflow(size, extra), flags);
126 REALLY_INLINE void *GC::Calloc(size_t count, size_t elsize, int flags)
128 return Alloc(GCHeap::CheckForCallocSizeOverflow(count, elsize), flags);
131 #if defined _DEBUG || defined MMGC_MEMORY_PROFILER
132 #define SIZEARG size ,
133 #else
134 #define SIZEARG
135 #endif
137 // See comments around GC::Alloc that explain why the guard and table lookup for the
138 // small-allocator cases are correct.
140 REALLY_INLINE void *GC::AllocPtrZero(size_t size)
142 #if !defined _DEBUG && !defined AVMPLUS_SAMPLER
143 if (size <= kLargestAlloc)
144 return GetUserPointer(containsPointersAllocs[sizeClassIndex[(size-1)>>3]]->Alloc(SIZEARG GC::kContainsPointers|GC::kZero));
145 #endif
146 return Alloc(size, GC::kContainsPointers|GC::kZero);
149 REALLY_INLINE void *GC::AllocPtrZeroFinalized(size_t size)
151 #if !defined _DEBUG && !defined AVMPLUS_SAMPLER
152 if (size <= kLargestAlloc)
153 return GetUserPointer(containsPointersAllocs[sizeClassIndex[(size-1)>>3]]->Alloc(SIZEARG GC::kContainsPointers|GC::kZero|GC::kFinalize));
154 #endif
155 return Alloc(size, GC::kContainsPointers|GC::kZero|GC::kFinalize);
158 REALLY_INLINE void *GC::AllocRCObject(size_t size)
160 #if !defined _DEBUG && !defined AVMPLUS_SAMPLER
161 if (size <= kLargestAlloc)
162 return GetUserPointer(containsPointersRCAllocs[sizeClassIndex[(size-1)>>3]]->Alloc(SIZEARG GC::kContainsPointers|GC::kZero|GC::kRCObject|GC::kFinalize));
163 #endif
164 return Alloc(size, GC::kContainsPointers|GC::kZero|GC::kRCObject|GC::kFinalize);
167 REALLY_INLINE void* GC::AllocDouble()
169 #if !defined _DEBUG && !defined AVMPLUS_SAMPLER && !defined MMGC_MEMORY_PROFILER
170 return GetUserPointer(noPointersAllocs[0]->Alloc(0));
171 #else
172 return Alloc(8,0);
173 #endif
176 // For AllocExtra the trick is that we can compute (size|extra) quickly without risk of overflow
177 // and compare it to half the maximum small-alloc size (rounded down to 8 bytes), and if the guard
178 // passes then we can definitely take the quick path. Most allocations are small.
180 // As 'extra' won't usually be known at compile time the fallback case won't usually compile away,
181 // though, so we risk bloating the code slightly here.
183 REALLY_INLINE void *GC::AllocExtraPtrZero(size_t size, size_t extra)
185 #if !defined _DEBUG && !defined AVMPLUS_SAMPLER
186 if ((size|extra) <= (kLargestAlloc/2 & ~7)) {
187 size += extra;
188 return GetUserPointer(containsPointersAllocs[sizeClassIndex[(size-1)>>3]]->Alloc(SIZEARG GC::kContainsPointers|GC::kZero));
190 #endif
191 return OutOfLineAllocExtra(size, extra, GC::kContainsPointers|GC::kZero);
194 REALLY_INLINE void *GC::AllocExtraPtrZeroFinalized(size_t size, size_t extra)
196 #if !defined _DEBUG && !defined AVMPLUS_SAMPLER
197 if ((size|extra) <= (kLargestAlloc/2 & ~7)) {
198 size += extra;
199 return GetUserPointer(containsPointersAllocs[sizeClassIndex[(size-1)>>3]]->Alloc(SIZEARG GC::kContainsPointers|GC::kZero|GC::kFinalize));
201 #endif
202 return OutOfLineAllocExtra(size, extra, GC::kContainsPointers|GC::kZero|GC::kFinalize);
205 REALLY_INLINE void *GC::AllocExtraRCObject(size_t size, size_t extra)
207 #if !defined _DEBUG && !defined AVMPLUS_SAMPLER
208 if ((size|extra) <= kLargestAlloc/2) {
209 size += extra;
210 return GetUserPointer(containsPointersRCAllocs[sizeClassIndex[(size-1)>>3]]->Alloc(SIZEARG GC::kContainsPointers|GC::kZero|GC::kRCObject|GC::kFinalize));
212 #endif
213 return OutOfLineAllocExtra(size, extra, GC::kContainsPointers|GC::kZero|GC::kRCObject|GC::kFinalize);
216 #undef SIZEARG
218 // Implementations of operator delete call FreeNotNull directly.
219 REALLY_INLINE void GC::Free(const void *item)
221 if(item == NULL)
222 return;
223 FreeNotNull(item);
226 REALLY_INLINE void GC::FreeNotNull(const void *item)
228 GCAssert(item != NULL);
229 GCAssertMsg(onThread(), "GC called from a different thread or not associated with a thread, missing MMGC_GCENTER macro perhaps.");
230 GetBlockHeader(item)->alloc->Free(GetRealPointer(item));
233 REALLY_INLINE void GC::AddRCRootSegment(RCRootSegment *segment)
235 segment->next = rcRootSegments;
236 if (rcRootSegments)
237 rcRootSegments->prev = segment;
238 rcRootSegments = segment;
241 REALLY_INLINE void GC::RemoveRCRootSegment(RCRootSegment *segment)
243 if (segment->next != NULL)
244 segment->next->prev = segment->prev;
245 if (segment->prev != NULL)
246 segment->prev->next = segment->next;
247 else
248 rcRootSegments = segment->next;
251 /*static*/
252 REALLY_INLINE size_t GC::Size(const void *ptr)
254 return GetBlockHeader(ptr)->size - DebugSize();
257 /*static*/
258 REALLY_INLINE GC* GC::GetGC(const void *item)
260 GC *gc = GetBlockHeader(item)->gc;
261 // we don't want to rely on the gcheap thread local but it makes a good
262 // sanity check against misuse of this function
263 // GCAssert(gc == GCHeap::GetGCHeap()->GetActiveGC());
264 return gc;
267 #ifdef MMGC_FASTBITS
268 /*static*/
269 REALLY_INLINE gcbits_t& GC::GetGCBits(const void *realptr)
271 GCBlockHeader* block = GetBlockHeader(realptr);
272 return block->bits[(uintptr_t(realptr)& 0xFFF) >> block->bitsShift];
274 #else
275 /*static*/
276 REALLY_INLINE gcbits_t& GC::GetGCBits(const void *realptr)
278 if (GCLargeAlloc::IsLargeBlock(realptr))
279 return GCLargeAlloc::GetGCBits(realptr);
280 else
281 return GCAlloc::GetGCBits(realptr);
283 #endif
285 /*static*/
286 REALLY_INLINE bool GC::ContainsPointers(const void *userptr)
288 const void *realptr = GetRealPointer(userptr);
289 GCAssert(GetGC(userptr)->IsPointerToGCObject(realptr));
290 return GetBlockHeader(realptr)->containsPointers != 0;
293 /*static*/
294 REALLY_INLINE bool GC::IsRCObject(const void *userptr)
296 const void *realptr = GetRealPointer(userptr);
297 GCAssert(GetGC(userptr)->IsPointerToGCObject(realptr));
298 return GetBlockHeader(realptr)->rcobject != 0;
301 /*static*/
302 REALLY_INLINE int GC::GetMark(const void *userptr)
304 const void *realptr = GetRealPointer(userptr);
305 GCAssert(GetGC(realptr)->IsPointerToGCObject(realptr));
306 return GetGCBits(realptr) & kMark;
309 /*static*/
310 REALLY_INLINE int GC::SetMark(const void *userptr)
312 const void *realptr = GetRealPointer(userptr);
313 GCAssert(GetGC(realptr)->IsPointerToGCObject(realptr));
314 gcbits_t& bits = GetGCBits(realptr);
315 int set = bits & kMark;
316 bits |= kMark;
317 bits &= ~kQueued;
318 return set;
321 REALLY_INLINE int GC::GetQueued(const void *userptr)
323 const void *realptr = GetRealPointer(userptr);
324 GCAssert(GetGC(realptr)->IsPointerToGCObject(realptr));
325 return GetGCBits(realptr) & kQueued;
328 REALLY_INLINE void GC::ClearQueued(const void *userptr)
330 const void *realptr = GetRealPointer(userptr);
331 GCAssert(IsPointerToGCObject(realptr));
332 GetGCBits(realptr) &= ~kQueued;
335 /*static*/
336 REALLY_INLINE void GC::ClearFinalized(const void *userptr)
338 const void *realptr = GetRealPointer(userptr);
339 GCAssert(GetGC(realptr)->IsPointerToGCObject(realptr));
340 GetGCBits(realptr) &= ~kFinalizable;
343 /*static*/
344 REALLY_INLINE void GC::SetFinalize(const void *userptr)
346 const void *realptr = GetRealPointer(userptr);
347 GCAssert(GetGC(realptr)->IsPointerToGCObject(realptr));
348 GetGCBits(realptr) |= kFinalizable;
351 /*static*/
352 REALLY_INLINE int GC::IsFinalized(const void *userptr)
354 const void *realptr = GetRealPointer(userptr);
355 GCAssert(GetGC(realptr)->IsPointerToGCObject(realptr));
356 return GetGCBits(realptr) & kFinalizable;
359 /*static*/
360 REALLY_INLINE int GC::HasWeakRef(const void *userptr)
362 const void *realptr = GetRealPointer(userptr);
363 GCAssert(GetGC(realptr)->IsPointerToGCObject(realptr));
364 return GetGCBits(realptr) & kHasWeakRef;
367 REALLY_INLINE GCHeap *GC::GetGCHeap() const
369 return heap;
372 REALLY_INLINE void GC::ReapZCT(bool scanStack)
374 zct.Reap(scanStack);
377 REALLY_INLINE bool GC::Reaping()
379 return zct.IsReaping();
382 REALLY_INLINE bool GC::IncrementalMarking()
384 return marking;
387 REALLY_INLINE bool GC::Collecting()
389 return collecting;
392 REALLY_INLINE bool GC::Presweeping()
394 return presweeping;
397 REALLY_INLINE void *GC::FindBeginning(const void *gcItem)
399 return FindBeginningGuarded(gcItem);
402 REALLY_INLINE void *GC::FindBeginningFast(const void *gcItem)
404 PageMap::PageType bits = GetPageMapValue((uintptr_t)gcItem);
405 if (bits == PageMap::kGCAllocPage)
406 return GetUserPointer(GCAlloc::FindBeginning(gcItem));
407 while (bits == PageMap::kGCLargeAllocPageRest)
409 gcItem = (void*) ((uintptr_t)gcItem - GCHeap::kBlockSize);
410 bits = GetPageMapValue((uintptr_t)gcItem);
412 return GetUserPointer(GCLargeAlloc::FindBeginning(gcItem));
415 REALLY_INLINE bool GC::IsPointerToGCPage(const void *item)
417 return GetPageMapValueGuarded((uintptr_t)item) != 0;
420 REALLY_INLINE bool GC::IsPointerToGCObject(const void *realPtr)
422 return GetRealPointer(FindBeginningGuarded(realPtr, true)) == realPtr;
425 /*static*/
426 REALLY_INLINE double GC::duration(uint64_t start)
428 return (double(VMPI_getPerformanceCounter() - start) * 1000) / VMPI_getPerformanceFrequency();
431 /*static*/
432 REALLY_INLINE uint64_t GC::ticksToMicros(uint64_t ticks)
434 return (ticks*1000000)/VMPI_getPerformanceFrequency();
437 /*static*/
438 REALLY_INLINE uint64_t GC::ticksToMillis(uint64_t ticks)
440 return (ticks*1000)/VMPI_getPerformanceFrequency();
443 REALLY_INLINE uint64_t GC::bytesMarked()
445 return policy.bytesMarked();
448 REALLY_INLINE uint64_t GC::markTicks()
450 return policy.timeStartIncrementalMark + policy.timeIncrementalMark;
453 REALLY_INLINE uint32_t GC::markIncrements()
455 return (uint32_t)policy.countIncrementalMark;
458 REALLY_INLINE bool GC::Destroying()
460 return destroying;
463 REALLY_INLINE uintptr_t GC::GetStackTop() const
465 // temporary crutch until we're moved over to the MMGC_GCENTER system
466 if(stackEnter == NULL)
467 return VMPI_getThreadStackBase();
468 return GetStackEnter();
471 REALLY_INLINE uintptr_t GC::GetStackEnter() const
473 return (uintptr_t)stackEnter;
476 REALLY_INLINE GCAutoEnter *GC::GetAutoEnter()
478 return stackEnter;
481 REALLY_INLINE bool GC::onThread()
483 return VMPI_currentThread() == m_gcThread;
486 REALLY_INLINE void GC::FreeBits(uint32_t *bits, int sizeClass)
488 #ifdef _DEBUG
489 for(int i=0, n=noPointersAllocs[sizeClass]->m_numBitmapBytes; i<n;i++)
490 GCAssert(((uint8_t*)bits)[i] == 0);
491 #endif
492 *(uint32_t**)bits = m_bitsFreelists[sizeClass];
493 m_bitsFreelists[sizeClass] = bits;
496 REALLY_INLINE bool GC::IsMarkedThenMakeQueued(const void* userptr)
498 const void* realptr = GetRealPointer(userptr);
499 gcbits_t& bits = GetGCBits(realptr);
500 if (bits & kMark) {
501 bits ^= (kMark|kQueued);
502 return true;
504 return false;
507 REALLY_INLINE bool GC::IsQueued(const void* userptr)
509 const void* realptr = GetRealPointer(userptr);
510 return (GetGCBits(realptr) & kQueued) != 0;
513 REALLY_INLINE PageMap::PageType GC::GetPageMapValue(uintptr_t addr) const
515 GCAssert(pageMap.AddrIsMappable(addr));
516 return pageMap.AddrToVal(addr);
519 REALLY_INLINE PageMap::PageType GC::GetPageMapValueGuarded(uintptr_t addr)
521 if (pageMap.AddrIsMappable(addr))
522 return GetPageMapValue(addr);
523 MMGC_STATIC_ASSERT(PageMap::kNonGC == 0);
524 return PageMap::kNonGC;
527 REALLY_INLINE void GC::AddToSmallEmptyBlockList(GCAlloc::GCBlock *b)
529 b->next = smallEmptyPageList;
530 smallEmptyPageList = b;
533 REALLY_INLINE void GC::AddToLargeEmptyBlockList(GCLargeAlloc::LargeBlock *lb)
535 lb->next = largeEmptyPageList;
536 largeEmptyPageList = lb;
539 #ifdef MMGC_REFCOUNT_PROFILING
540 REALLY_INLINE void GC::AddToZCT(RCObject *obj, bool initial=false)
542 zct.Add(obj, initial);
545 REALLY_INLINE void GC::RemoveFromZCT(RCObject *obj, bool final=false)
547 zct.Remove(obj, final);
549 #else
550 REALLY_INLINE void GC::AddToZCT(RCObject *obj)
552 zct.Add(obj);
555 REALLY_INLINE void GC::RemoveFromZCT(RCObject *obj)
557 zct.Remove(obj);
559 #endif
561 REALLY_INLINE void GC::PreventImmediateReaping(RCObject* obj)
563 if (obj->InZCT())
564 zct.Remove(obj);
567 /*static*/
568 REALLY_INLINE const void *GC::Pointer(const void *p)
570 return (const void*)(((uintptr_t)p)&~7);
573 REALLY_INLINE size_t GC::GetNumBlocks()
575 return policy.blocksOwnedByGC();
578 REALLY_INLINE void* GC::allocaTop()
580 return stacktop;
583 REALLY_INLINE void GC::allocaPopTo(void* top)
585 if (top >= top_segment->start && top <= top_segment->limit)
586 stacktop = top;
587 else
588 allocaPopToSlow(top);
591 #ifdef DEBUGGER
592 REALLY_INLINE void* GC::GetAttachedSampler()
594 return m_sampler;
597 REALLY_INLINE void GC::SetAttachedSampler(void *sampler)
599 m_sampler = sampler;
601 #endif
603 REALLY_INLINE GC::AllocaAutoPtr::AllocaAutoPtr()
604 : gc(NULL)
605 , unwindPtr(NULL)
609 REALLY_INLINE GC::AllocaAutoPtr::~AllocaAutoPtr()
611 if (unwindPtr)
612 gc->allocaPopTo(unwindPtr);
615 REALLY_INLINE Cleaner::Cleaner()
619 REALLY_INLINE Cleaner::~Cleaner()
621 if(v)
622 VMPI_memset(v, 0, size);
623 v = 0;
624 size = 0;
627 // Disable copying
628 REALLY_INLINE Cleaner& Cleaner::operator=(const Cleaner& /*rhs*/)
630 return *this;
633 REALLY_INLINE void Cleaner::set(const void * _v, size_t _size)
635 this->v = (int*)_v;
636 this->size = _size;
639 REALLY_INLINE GCWorkItem::GCWorkItem(const void *p, uint32_t s, GCWorkItemType workItemType)
640 : ptr(p)
641 #ifdef MMGC_INTERIOR_PTRS
642 , _size(s | uint32_t(kHasInteriorPtrs) | uint32_t(workItemType))
643 #else
644 , _size(s | uint32_t(workItemType))
645 #endif
647 GCAssert((s & 3) == 0);
648 #ifdef _DEBUG
649 if (IsGCItem()) {
650 GCAssert(GC::GetGC(p)->FindBeginningGuarded(p) == p);
652 #endif
655 REALLY_INLINE GCWorkItem::GCWorkItem(const void *p, GCSentinelItemType type)
656 : iptr(uintptr_t(p) | type),
657 _size(kSentinelSize)
660 REALLY_INLINE void GCWorkItem::Clear()
662 iptr = kDeadItem;
663 // use sentinel so we're skipped off the fast path in MarkItem
664 _size = kSentinelSize;
667 REALLY_INLINE bool GCAutoEnter::Entered()
669 return m_gc != NULL;
673 #endif /* __GC_inlines__ */