1 /* ***** BEGIN LICENSE BLOCK *****
2 * Version: MPL 1.1/GPL 2.0/LGPL 2.1
4 * The contents of this file are subject to the Mozilla Public License Version 1.1 (the
5 * "License"); you may not use this file except in compliance with the License. You may obtain
6 * a copy of the License at http://www.mozilla.org/MPL/
8 * Software distributed under the License is distributed on an "AS IS" basis, WITHOUT
9 * WARRANTY OF ANY KIND, either express or implied. See the License for the specific
10 * language governing rights and limitations under the License.
12 * The Original Code is [Open Source Virtual Machine.]
14 * The Initial Developer of the Original Code is Adobe System Incorporated. Portions created
15 * by the Initial Developer are Copyright (C)[ 2004-2006 ] Adobe Systems Incorporated. All Rights
18 * Contributor(s): Adobe AS3 Team
19 * Andreas Gal <gal@mozilla.com>
20 * Asko Tontti <atontti@cc.hut.fi>
22 * Alternatively, the contents of this file may be used under the terms of either the GNU
23 * General Public License Version 2 or later (the "GPL"), or the GNU Lesser General Public
24 * License Version 2.1 or later (the "LGPL"), in which case the provisions of the GPL or the
25 * LGPL are applicable instead of those above. If you wish to allow use of your version of this
26 * file only under the terms of either the GPL or the LGPL, and not to allow others to use your
27 * version of this file under the terms of the MPL, indicate your decision by deleting provisions
28 * above and replace them with the notice and other provisions required by the GPL or the
29 * LGPL. If you do not delete the provisions above, a recipient may use your version of this file
30 * under the terms of any one of the MPL, the GPL or the LGPL.
32 ***** END LICENSE BLOCK ***** */
42 #if defined(AVMPLUS_UNIX)
49 #define FASTCALL JS_FASTCALL
51 #if defined(JS_NO_FASTCALL)
52 #define NJ_NO_FASTCALL
53 #if defined(AVMPLUS_IA32)
54 #define SIMULATE_FASTCALL(lr, state_ptr, frag_ptr, func_addr) \
58 : "c" (state_ptr), "d" (frag_ptr), "S" (func_addr) \
61 #endif /* defined(AVMPLUS_IA32) */
62 #endif /* defined(JS_NO_FASTCALL) */
78 void NanoAssertFail();
81 #define AvmAssert(x) assert(x)
82 #define AvmAssertMsg(x, y)
83 #define AvmDebugLog(x) printf x
87 * Can we just take a moment to think about what it means that MSVC doesn't have stdint.h in 2008?
88 * Thanks for your time.
90 typedef JSUint8
uint8_t;
91 typedef JSInt8
int8_t;
92 typedef JSUint16
uint16_t;
93 typedef JSInt16
int16_t;
94 typedef JSUint32
uint32_t;
95 typedef JSInt32
int32_t;
96 typedef JSUint64
uint64_t;
97 typedef JSInt64
int64_t;
102 #if defined(AVMPLUS_IA32)
103 #if defined(_MSC_VER)
104 __declspec(naked
) static inline __int64
rdtsc()
112 #elif defined(SOLARIS)
113 static inline unsigned long long rdtsc(void)
115 unsigned long long int x
;
116 asm volatile (".byte 0x0f, 0x31" : "=A" (x
));
119 #elif defined(__i386__)
120 static __inline__
unsigned long long rdtsc(void)
122 unsigned long long int x
;
123 __asm__
volatile (".byte 0x0f, 0x31" : "=A" (x
));
126 #endif /* compilers */
128 #elif defined(__x86_64__)
130 static __inline__
uint64_t rdtsc(void)
133 __asm__
__volatile__ ("rdtsc" : "=a"(lo
), "=d"(hi
));
134 return ( (uint64_t)lo
)|( ((uint64_t)hi
)<<32 );
137 #elif defined(__powerpc__)
139 typedef unsigned long long int unsigned long long;
141 static __inline__
unsigned long long rdtsc(void)
143 unsigned long long int result
=0;
144 unsigned long int upper
, lower
,tmp
;
152 : "=r"(upper
),"=r"(lower
),"=r"(tmp
)
156 result
= result
|lower
;
161 #endif /* architecture */
186 uint32 numGlobalSlots
;
187 uint32 numStackSlots
;
188 uint32 numStackSlotsBelowCurrentFrame
;
191 #if defined NJ_VERBOSE
205 GuardRecord
*outgoing
;
209 #if defined NJ_VERBOSE
215 #define GuardRecordSize(g) sizeof(GuardRecord)
216 #define SideExitSize(e) sizeof(SideExit)
225 operator new(size_t size
, GC
* gc
)
227 return calloc(1, size
);
230 static void operator delete (void *gcObject
)
236 #define MMGC_SUBCLASS_DECL : public GCObject
238 class GCFinalizedObject
: public GCObject
241 static void operator delete (void *gcObject
)
250 int32_t kNativePageSize
;
254 #if defined _SC_PAGE_SIZE
255 kNativePageSize
= sysconf(_SC_PAGE_SIZE
);
257 kNativePageSize
= 4096; // @todo: what is this?
262 Alloc(uint32_t pages
)
265 return VirtualAlloc(NULL
,
266 pages
* kNativePageSize
,
267 MEM_COMMIT
| MEM_RESERVE
,
268 PAGE_EXECUTE_READWRITE
);
269 #elif defined AVMPLUS_UNIX
271 * Don't use normal heap with mprotect+PROT_EXEC for executable code.
272 * SELinux and friends don't allow this.
275 pages
* kNativePageSize
,
276 PROT_READ
| PROT_WRITE
| PROT_EXEC
,
277 MAP_PRIVATE
| MAP_ANON
,
281 return valloc(pages
* kNativePageSize
);
286 Free(void* p
, uint32_t pages
)
289 VirtualFree(p
, 0, MEM_RELEASE
);
290 #elif defined AVMPLUS_UNIX
292 munmap((char*)p
, pages
* kNativePageSize
);
294 munmap(p
, pages
* kNativePageSize
);
309 Alloc(uint32_t bytes
)
311 return calloc(1, bytes
);
320 static inline GCHeap
*
330 #define MMGC_MEM_TYPE(x)
332 typedef int FunctionID
;
338 void* sp
; /* native stack pointer, stack[0] is spbase[0] */
339 void* rp
; /* call stack pointer */
340 void* gp
; /* global frame pointer */
341 JSContext
*cx
; /* current VM context handle */
342 void* eos
; /* first unusable word after the native stack */
343 void* eor
; /* first unusable word after the call stack */
344 nanojit::GuardRecord
* lastTreeExitGuard
; /* guard we exited on during a tree call */
345 nanojit::GuardRecord
* lastTreeCallGuard
; /* guard we want to grow from if the tree
346 call exit guard mismatched */
353 typedef class String AvmString
;
355 class StringNullTerminatedUTF8
360 StringNullTerminatedUTF8(GC
* gc
, String
* s
)
362 cstr
= strdup((const char*)s
);
365 ~StringNullTerminatedUTF8()
377 typedef String
* Stringp
;
379 class AvmConfiguration
383 memset(this, 0, sizeof(AvmConfiguration
));
385 verbose
= getenv("TRACEMONKEY") && strstr(getenv("TRACEMONKEY"), "verbose");
395 uint32_t quiet_opt
:1;
397 uint32_t verbose_addrs
:1;
398 uint32_t verbose_live
:1;
399 uint32_t verbose_exits
:1;
400 uint32_t show_stats
:1;
403 static const int kstrconst_emptyString
= 0;
409 const char* format(const void* ip
)
412 sprintf(buf
, "%p", ip
);
431 AvmConsole
& operator<<(const char* s
)
433 fprintf(stdout
, "%s", s
);
441 AvmInterpreter interp
;
444 static AvmConfiguration config
;
446 static String
* k_str
[];
447 static bool sse2_available
;
452 return sse2_available
;
458 return config
.quiet_opt
;
464 return config
.verbose
;
473 static inline String
* newString(const char* cstr
) {
474 return (String
*)strdup(cstr
);
477 static inline void freeString(String
* str
) {
478 return free((char*)str
);
492 * The List<T> template implements a simple List, which can
493 * be templated to support different types.
495 * Elements can be added to the end, modified in the middle,
496 * but no holes are allowed. That is for set(n, v) to work
499 * Note that [] operators are provided and you can violate the
500 * set properties using these operators, if you want a real
501 * list dont use the [] operators, if you want a general purpose
502 * array use the [] operators.
505 enum ListElementType
{ LIST_NonGCObjects
, LIST_GCObjects
};
507 template <typename T
, ListElementType kElementType
>
511 enum { kInitialCapacity
= 128 };
513 List(GC
*_gc
, uint32_t _capacity
=kInitialCapacity
) : data(NULL
), len(0), capacity(0)
515 ensureCapacity(_capacity
);
522 // zero out in case we are part of an RCObject
526 inline void destroy()
532 // 'this' steals the guts of 'that' and 'that' gets reset.
533 void FASTCALL
become(List
& that
)
537 this->data
= that
.data
;
538 this->len
= that
.len
;
539 this->capacity
= that
.capacity
;
545 uint32_t FASTCALL
add(T value
)
547 if (len
>= capacity
) {
554 inline bool isEmpty() const
559 inline uint32_t size() const
564 inline T
get(uint32_t index
) const
566 AvmAssert(index
< len
);
567 return *(T
*)(data
+ index
);
570 void FASTCALL
set(uint32_t index
, T value
)
572 AvmAssert(index
< capacity
);
577 AvmAssert(len
<= capacity
);
587 int FASTCALL
indexOf(T value
) const
589 for(uint32_t i
=0; i
<len
; i
++)
595 int FASTCALL
lastIndexOf(T value
) const
597 for(int32_t i
=len
-1; i
>=0; i
--)
608 T FASTCALL
removeLast()
611 return undef_list_val();
613 set(len
-1, undef_list_val());
618 inline T
operator[](uint32_t index
) const
620 AvmAssert(index
< capacity
);
624 void FASTCALL
ensureCapacity(uint32_t cap
)
626 if (cap
> capacity
) {
628 data
= (T
*)calloc(1, factor(cap
));
630 data
= (T
*)realloc(data
, factor(cap
));
631 zero_range(capacity
, cap
- capacity
);
637 void FASTCALL
insert(uint32_t index
, T value
, uint32_t count
= 1)
639 AvmAssert(index
<= len
);
640 AvmAssert(count
> 0);
641 ensureCapacity(len
+count
);
642 memmove(data
+ index
+ count
, data
+ index
, factor(len
- index
));
643 wbzm(index
, index
+count
, value
);
647 T FASTCALL
removeAt(uint32_t index
)
650 // dec the refcount on the one we're removing
651 wb(index
, undef_list_val());
652 memmove(data
+ index
, data
+ index
+ 1, factor(len
- index
- 1));
660 // growth is fast at first, then slows at larger list sizes.
662 const uint32_t curMax
= capacity
;
664 newMax
= kInitialCapacity
;
666 newMax
= curMax
* 3/2;
670 ensureCapacity(newMax
);
673 inline void do_wb_nongc(T
* slot
, T value
)
678 inline void do_wb_gc(GCObject
** slot
, const GCObject
** value
)
680 *slot
= (GCObject
*)*value
;
683 void FASTCALL
wb(uint32_t index
, T value
)
685 AvmAssert(index
< capacity
);
686 AvmAssert(data
!= NULL
);
687 T
* slot
= &data
[index
];
688 do_wb_nongc(slot
, value
);
691 // multiple wb call with the same value, and assumption that existing value is all zero bits,
693 // for (uint32_t u = index; u < index_end; ++u)
695 void FASTCALL
wbzm(uint32_t index
, uint32_t index_end
, T value
)
697 AvmAssert(index
< capacity
);
698 AvmAssert(index_end
<= capacity
);
699 AvmAssert(index
< index_end
);
700 AvmAssert(data
!= NULL
);
701 T
* slot
= data
+ index
;
702 for ( ; index
< index_end
; ++index
, ++slot
)
703 do_wb_nongc(slot
, value
);
706 inline uint32_t factor(uint32_t index
) const
708 return index
* sizeof(T
);
711 void FASTCALL
zero_range(uint32_t _first
, uint32_t _count
)
713 memset(data
+ _first
, 0, factor(_count
));
716 // stuff that needs specialization based on the type
717 static inline T
undef_list_val();
720 List(const List
& toCopy
); // unimplemented
721 void operator=(const List
& that
); // unimplemented
723 // ------------------------ DATA SECTION BEGIN
728 // ------------------------ DATA SECTION END
732 // stuff that needs specialization based on the type
733 template<typename T
, ListElementType kElementType
>
734 /* static */ inline T List
<T
, kElementType
>::undef_list_val() { return T(0); }
737 * The SortedMap<K,T> template implements an object that
738 * maps keys to values. The keys are sorted
739 * from smallest to largest in the map. Time of operations
741 * put() is O(1) if the key is higher than any existing
742 * key; O(logN) if the key already exists,
743 * and O(N) otherwise.
744 * get() is an O(logN) binary search.
746 * no duplicates are allowed.
748 template <class K
, class T
, ListElementType valType
>
749 class SortedMap
: public GCObject
752 enum { kInitialCapacity
= 64 };
754 SortedMap(GC
* gc
, int _capacity
=kInitialCapacity
)
755 : keys(gc
, _capacity
), values(gc
, _capacity
)
761 return keys
.size() == 0;
783 if (keys
.size() == 0 || k
> keys
.last())
801 i
= -i
- 1; // recover the insertion point
802 AvmAssert(keys
.size() != (uint32_t)i
);
813 return i
>= 0 ? values
[i
] : 0;
816 bool get(K k
, T
& v
) const
827 bool containsKey(K k
) const
830 return (i
>= 0) ? true : false;
841 T old
= values
.removeAt(i
);
846 T
removeFirst() { return isEmpty() ? (T
)0 : removeAt(0); }
847 T
removeLast() { return isEmpty() ? (T
)0 : removeAt(keys
.size()-1); }
848 T
first() const { return isEmpty() ? (T
)0 : values
[0]; }
849 T
last() const { return isEmpty() ? (T
)0 : values
[keys
.size()-1]; }
851 K
firstKey() const { return isEmpty() ? 0 : keys
[0]; }
852 K
lastKey() const { return isEmpty() ? 0 : keys
[keys
.size()-1]; }
855 T
at(int i
) const { return values
[i
]; }
856 K
keyAt(int i
) const { return keys
[i
]; }
858 int findNear(K k
) const {
860 return i
>= 0 ? i
: -i
-2;
863 List
<K
, LIST_NonGCObjects
> keys
;
864 List
<T
, valType
> values
;
869 int hi
= keys
.size()-1;
880 return i
; // key found
882 return -(lo
+ 1); // key not found, low is the insertion point
886 #define GCSortedMap SortedMap
889 * Bit vectors are an efficent method of keeping True/False information
890 * on a set of items or conditions. Class BitSet provides functions
891 * to manipulate individual bits in the vector.
893 * Since most vectors are rather small an array of longs is used by
894 * default to house the value of the bits. If more bits are needed
895 * then an array is allocated dynamically outside of this object.
897 * This object is not optimized for a fixed sized bit vector
898 * it instead allows for dynamically growing the bit vector.
903 enum { kUnit
= 8*sizeof(long),
904 kDefaultCapacity
= 4 };
908 capacity
= kDefaultCapacity
;
914 if (capacity
> kDefaultCapacity
)
920 if (capacity
> kDefaultCapacity
)
921 for(int i
=0; i
<capacity
; i
++)
924 for(int i
=0; i
<capacity
; i
++)
928 void set(GC
*gc
, int bitNbr
)
930 int index
= bitNbr
/ kUnit
;
931 int bit
= bitNbr
% kUnit
;
932 if (index
>= capacity
)
935 if (capacity
> kDefaultCapacity
)
936 bits
.ptr
[index
] |= (1<<bit
);
938 bits
.ar
[index
] |= (1<<bit
);
941 void clear(int bitNbr
)
943 int index
= bitNbr
/ kUnit
;
944 int bit
= bitNbr
% kUnit
;
945 if (index
< capacity
)
947 if (capacity
> kDefaultCapacity
)
948 bits
.ptr
[index
] &= ~(1<<bit
);
950 bits
.ar
[index
] &= ~(1<<bit
);
954 bool get(int bitNbr
) const
956 int index
= bitNbr
/ kUnit
;
957 int bit
= bitNbr
% kUnit
;
959 if (index
< capacity
)
961 if (capacity
> kDefaultCapacity
)
962 value
= ( bits
.ptr
[index
] & (1<<bit
) ) ? true : false;
964 value
= ( bits
.ar
[index
] & (1<<bit
) ) ? true : false;
970 // Grow the array until at least newCapacity big
971 void grow(GC
*gc
, int newCapacity
)
973 // create vector that is 2x bigger than requested
975 //MEMTAG("BitVector::Grow - long[]");
976 long* newBits
= (long*)calloc(1, newCapacity
* sizeof(long));
977 //memset(newBits, 0, newCapacity * sizeof(long));
980 if (capacity
> kDefaultCapacity
)
981 for(int i
=0; i
<capacity
; i
++)
982 newBits
[i
] = bits
.ptr
[i
];
984 for(int i
=0; i
<capacity
; i
++)
985 newBits
[i
] = bits
.ar
[i
];
987 // in with the new out with the old
988 if (capacity
> kDefaultCapacity
)
992 capacity
= newCapacity
;
995 // by default we use the array, but if the vector
996 // size grows beyond kDefaultCapacity we allocate
997 // space dynamically.
1001 long ar
[kDefaultCapacity
];