fixed #1537: Upgraded spidermonkey to FF17.0beta3 for win32 and iOS ports.

This commit is contained in:
James Chen 2012-10-31 11:03:42 +08:00
parent 8edf9826b1
commit b1ec3c8ab5
101 changed files with 3597 additions and 1153 deletions

View File

@ -129,7 +129,7 @@ class EncapsulatedPtr
public:
EncapsulatedPtr() : value(NULL) {}
explicit EncapsulatedPtr(T *v) : value(v) {}
EncapsulatedPtr(T *v) : value(v) {}
explicit EncapsulatedPtr(const EncapsulatedPtr<T> &v) : value(v.value) {}
~EncapsulatedPtr() { pre(); }
@ -222,34 +222,51 @@ class RelocatablePtr : public EncapsulatedPtr<T>
{
public:
RelocatablePtr() : EncapsulatedPtr<T>(NULL) {}
explicit RelocatablePtr(T *v) : EncapsulatedPtr<T>(v) { post(); }
explicit RelocatablePtr(const RelocatablePtr<T> &v)
: EncapsulatedPtr<T>(v) { post(); }
explicit RelocatablePtr(T *v) : EncapsulatedPtr<T>(v) {
if (v)
post();
}
explicit RelocatablePtr(const RelocatablePtr<T> &v) : EncapsulatedPtr<T>(v) {
if (this->value)
post();
}
~RelocatablePtr() {
this->pre();
relocate();
if (this->value)
relocate(this->value->compartment());
}
RelocatablePtr<T> &operator=(T *v) {
this->pre();
JS_ASSERT(!IsPoisonedPtr<T>(v));
this->value = v;
post();
if (v) {
this->value = v;
post();
} else if (this->value) {
JSCompartment *comp = this->value->compartment();
this->value = v;
relocate(comp);
}
return *this;
}
RelocatablePtr<T> &operator=(const RelocatablePtr<T> &v) {
this->pre();
JS_ASSERT(!IsPoisonedPtr<T>(v.value));
this->value = v.value;
post();
if (v.value) {
this->value = v.value;
post();
} else if (this->value) {
JSCompartment *comp = this->value->compartment();
this->value = v;
relocate(comp);
}
return *this;
}
protected:
void post() { T::writeBarrierRelocPost(this->value, (void *)&this->value); }
void relocate() { T::writeBarrierRelocated(this->value, (void *)&this->value); }
inline void post();
inline void relocate(JSCompartment *comp);
};
/*
@ -276,6 +293,9 @@ struct Shape;
class BaseShape;
namespace types { struct TypeObject; }
typedef EncapsulatedPtr<JSObject> EncapsulatedPtrObject;
typedef EncapsulatedPtr<JSScript> EncapsulatedPtrScript;
typedef RelocatablePtr<JSObject> RelocatablePtrObject;
typedef RelocatablePtr<JSScript> RelocatablePtrScript;
@ -303,6 +323,19 @@ struct HeapPtrHasher
template <class T>
struct DefaultHasher< HeapPtr<T> > : HeapPtrHasher<T> { };
template<class T>
struct EncapsulatedPtrHasher
{
typedef EncapsulatedPtr<T> Key;
typedef T *Lookup;
static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); }
static bool match(const Key &k, Lookup l) { return k.get() == l; }
};
template <class T>
struct DefaultHasher< EncapsulatedPtr<T> > : EncapsulatedPtrHasher<T> { };
class EncapsulatedValue : public ValueOperations<EncapsulatedValue>
{
protected:
@ -379,7 +412,7 @@ class RelocatableValue : public EncapsulatedValue
public:
explicit inline RelocatableValue();
explicit inline RelocatableValue(const Value &v);
explicit inline RelocatableValue(const RelocatableValue &v);
inline RelocatableValue(const RelocatableValue &v);
inline ~RelocatableValue();
inline RelocatableValue &operator=(const Value &v);
@ -414,7 +447,7 @@ class HeapSlot : public EncapsulatedValue
inline void set(JSCompartment *comp, JSObject *owner, uint32_t slot, const Value &v);
static inline void writeBarrierPost(JSObject *obj, uint32_t slot);
static inline void writeBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t slotno);
static inline void writeBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t slot);
private:
inline void post(JSObject *owner, uint32_t slot);
@ -428,8 +461,19 @@ class HeapSlot : public EncapsulatedValue
* single step.
*/
inline void
SlotRangeWriteBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t start, uint32_t count)
SlotRangeWriteBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t start, uint32_t count);
/*
* This is a post barrier for HashTables whose key can be moved during a GC.
*/
template <class Map, class Key>
inline void
HashTableWriteBarrierPost(JSCompartment *comp, const Map *map, const Key &key)
{
#ifdef JS_GCGENERATIONAL
if (key && comp->gcNursery.isInside(key))
comp->gcStoreBuffer.putGeneric(HashKeyRef(map, key));
#endif
}
static inline const Value *
@ -467,15 +511,16 @@ class EncapsulatedId
protected:
jsid value;
explicit EncapsulatedId() : value(JSID_VOID) {}
explicit inline EncapsulatedId(jsid id) : value(id) {}
~EncapsulatedId() {}
private:
EncapsulatedId(const EncapsulatedId &v) MOZ_DELETE;
EncapsulatedId &operator=(const EncapsulatedId &v) MOZ_DELETE;
public:
explicit EncapsulatedId() : value(JSID_VOID) {}
explicit EncapsulatedId(jsid id) : value(id) {}
~EncapsulatedId();
inline EncapsulatedId &operator=(const EncapsulatedId &v);
bool operator==(jsid id) const { return value == id; }
bool operator!=(jsid id) const { return value != id; }

View File

@ -423,38 +423,47 @@ struct ArenaHeader
* chunk. The latter allows to quickly check if the arena is allocated
* during the conservative GC scanning without searching the arena in the
* list.
*
* We use 8 bits for the allocKind so the compiler can use byte-level memory
* instructions to access it.
*/
size_t allocKind : 8;
/*
* When recursive marking uses too much stack the marking is delayed and
* the corresponding arenas are put into a stack using the following field
* as a linkage. To distinguish the bottom of the stack from the arenas
* not present in the stack we use an extra flag to tag arenas on the
* stack.
* When collecting we sometimes need to keep an auxillary list of arenas,
* for which we use the following fields. This happens for several reasons:
*
* When recursive marking uses too much stack the marking is delayed and the
* corresponding arenas are put into a stack. To distinguish the bottom of
* the stack from the arenas not present in the stack we use the
* markOverflow flag to tag arenas on the stack.
*
* Delayed marking is also used for arenas that we allocate into during an
* incremental GC. In this case, we intend to mark all the objects in the
* arena, and it's faster to do this marking in bulk.
*
* To minimize the ArenaHeader size we record the next delayed marking
* linkage as arenaAddress() >> ArenaShift and pack it with the allocKind
* field and hasDelayedMarking flag. We use 8 bits for the allocKind, not
* ArenaShift - 1, so the compiler can use byte-level memory instructions
* to access it.
* When sweeping we keep track of which arenas have been allocated since the
* end of the mark phase. This allows us to tell whether a pointer to an
* unmarked object is yet to be finalized or has already been reallocated.
* We set the allocatedDuringIncremental flag for this and clear it at the
* end of the sweep phase.
*
* To minimize the ArenaHeader size we record the next linkage as
* arenaAddress() >> ArenaShift and pack it with the allocKind field and the
* flags.
*/
public:
size_t hasDelayedMarking : 1;
size_t allocatedDuringIncremental : 1;
size_t markOverflow : 1;
size_t nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
size_t auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
static void staticAsserts() {
/* We must be able to fit the allockind into uint8_t. */
JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
/*
* nextDelayedMarkingpacking assumes that ArenaShift has enough bits
* auxNextLink packing assumes that ArenaShift has enough bits
* to cover allocKind and hasDelayedMarking.
*/
JS_STATIC_ASSERT(ArenaShift >= 8 + 1 + 1 + 1);
@ -487,7 +496,7 @@ struct ArenaHeader
markOverflow = 0;
allocatedDuringIncremental = 0;
hasDelayedMarking = 0;
nextDelayedMarking = 0;
auxNextLink = 0;
}
inline uintptr_t arenaAddress() const;
@ -519,6 +528,11 @@ struct ArenaHeader
inline ArenaHeader *getNextDelayedMarking() const;
inline void setNextDelayedMarking(ArenaHeader *aheader);
inline void unsetDelayedMarking();
inline ArenaHeader *getNextAllocDuringSweep() const;
inline void setNextAllocDuringSweep(ArenaHeader *aheader);
inline void unsetAllocDuringSweep();
};
struct Arena
@ -882,15 +896,48 @@ ArenaHeader::setFirstFreeSpan(const FreeSpan *span)
inline ArenaHeader *
ArenaHeader::getNextDelayedMarking() const
{
return &reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift)->aheader;
JS_ASSERT(hasDelayedMarking);
return &reinterpret_cast<Arena *>(auxNextLink << ArenaShift)->aheader;
}
inline void
ArenaHeader::setNextDelayedMarking(ArenaHeader *aheader)
{
JS_ASSERT(!(uintptr_t(aheader) & ArenaMask));
JS_ASSERT(!auxNextLink && !hasDelayedMarking);
hasDelayedMarking = 1;
nextDelayedMarking = aheader->arenaAddress() >> ArenaShift;
auxNextLink = aheader->arenaAddress() >> ArenaShift;
}
inline void
ArenaHeader::unsetDelayedMarking()
{
JS_ASSERT(hasDelayedMarking);
hasDelayedMarking = 0;
auxNextLink = 0;
}
inline ArenaHeader *
ArenaHeader::getNextAllocDuringSweep() const
{
JS_ASSERT(allocatedDuringIncremental);
return &reinterpret_cast<Arena *>(auxNextLink << ArenaShift)->aheader;
}
inline void
ArenaHeader::setNextAllocDuringSweep(ArenaHeader *aheader)
{
JS_ASSERT(!auxNextLink && !allocatedDuringIncremental);
allocatedDuringIncremental = 1;
auxNextLink = aheader->arenaAddress() >> ArenaShift;
}
inline void
ArenaHeader::unsetAllocDuringSweep()
{
JS_ASSERT(allocatedDuringIncremental);
allocatedDuringIncremental = 0;
auxNextLink = 0;
}
JS_ALWAYS_INLINE void

View File

@ -62,6 +62,7 @@ namespace JS {
* separate rooting analysis.
*/
template <typename T> class MutableHandle;
template <typename T> class Rooted;
template <typename T>
@ -79,6 +80,9 @@ struct NullPtr
static void * const constNullValue;
};
template <typename T>
class MutableHandle;
template <typename T>
class HandleBase {};
@ -108,6 +112,11 @@ class Handle : public HandleBase<T>
ptr = reinterpret_cast<const T *>(&NullPtr::constNullValue);
}
friend class MutableHandle<T>;
Handle(MutableHandle<T> handle) {
ptr = handle.address();
}
/*
* This may be called only if the location of the T is guaranteed
* to be marked (for some reason other than being a Rooted),
@ -130,6 +139,12 @@ class Handle : public HandleBase<T>
Handle(Rooted<S> &root,
typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy = 0);
/* Construct a read only handle from a mutable handle. */
template <typename S>
inline
Handle(MutableHandle<S> &root,
typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy = 0);
const T *address() const { return ptr; }
T get() const { return *ptr; }
@ -185,6 +200,19 @@ class MutableHandle : public MutableHandleBase<T>
*ptr = v;
}
/*
* This may be called only if the location of the T is guaranteed
* to be marked (for some reason other than being a Rooted),
* e.g., if it is guaranteed to be reachable from an implicit root.
*
* Create a MutableHandle from a raw location of a T.
*/
static MutableHandle fromMarkedLocation(T *p) {
MutableHandle h;
h.ptr = p;
return h;
}
T *address() const { return ptr; }
T get() const { return *ptr; }
@ -195,16 +223,33 @@ class MutableHandle : public MutableHandleBase<T>
MutableHandle() {}
T *ptr;
template <typename S>
void operator =(S v) MOZ_DELETE;
};
typedef MutableHandle<JSObject*> MutableHandleObject;
typedef MutableHandle<Value> MutableHandleValue;
/*
* Raw pointer used as documentation that a parameter does not need to be
* rooted.
*/
typedef JSObject * RawObject;
/*
* By default, pointers should use the inheritance hierarchy to find their
* ThingRootKind. Some pointer types are explicitly set in jspubtd.h so that
* Rooted<T> may be used without the class definition being available.
*/
template <typename T>
struct RootKind<T *> { static ThingRootKind rootKind() { return T::rootKind(); }; };
template <typename T>
struct RootMethods<T *>
{
static T *initial() { return NULL; }
static ThingRootKind kind() { return T::rootKind(); }
static ThingRootKind kind() { return RootKind<T *>::rootKind(); }
static bool poisoned(T *v) { return IsPoisonedPtr(v); }
};
@ -291,6 +336,14 @@ Handle<T>::Handle(Rooted<S> &root,
ptr = reinterpret_cast<const T *>(root.address());
}
template<typename T> template <typename S>
inline
Handle<T>::Handle(MutableHandle<S> &root,
typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy)
{
ptr = reinterpret_cast<const T *>(root.address());
}
template<typename T> template <typename S>
inline
MutableHandle<T>::MutableHandle(Rooted<S> *root,
@ -332,15 +385,7 @@ class SkipRoot
public:
template <typename T>
SkipRoot(JSContext *cx, const T *ptr
JS_GUARD_OBJECT_NOTIFIER_PARAM)
{
init(ContextFriendFields::get(cx), ptr, 1);
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
template <typename T>
SkipRoot(JSContext *cx, const T *ptr, size_t count
SkipRoot(JSContext *cx, const T *ptr, size_t count = 1
JS_GUARD_OBJECT_NOTIFIER_PARAM)
{
init(ContextFriendFields::get(cx), ptr, count);
@ -363,14 +408,7 @@ class SkipRoot
public:
template <typename T>
SkipRoot(JSContext *cx, const T *ptr
JS_GUARD_OBJECT_NOTIFIER_PARAM)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
template <typename T>
SkipRoot(JSContext *cx, const T *ptr, size_t count
SkipRoot(JSContext *cx, const T *ptr, size_t count = 1
JS_GUARD_OBJECT_NOTIFIER_PARAM)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
@ -381,6 +419,12 @@ class SkipRoot
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
/*
* This typedef is to annotate parameters that we have manually verified do not
* need rooting, as opposed to parameters that have not yet been considered.
*/
typedef JSObject *RawObject;
#ifdef DEBUG
JS_FRIEND_API(bool) IsRootingUnnecessaryForContext(JSContext *cx);
JS_FRIEND_API(void) SetRootingUnnecessaryForContext(JSContext *cx, bool value);
@ -389,14 +433,16 @@ JS_FRIEND_API(bool) RelaxRootChecksForContext(JSContext *cx);
class AssertRootingUnnecessary {
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
#ifdef DEBUG
JSContext *cx;
bool prev;
#endif
public:
AssertRootingUnnecessary(JSContext *cx JS_GUARD_OBJECT_NOTIFIER_PARAM)
: cx(cx)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
#ifdef DEBUG
this->cx = cx;
prev = IsRootingUnnecessaryForContext(cx);
SetRootingUnnecessaryForContext(cx, true);
#endif

View File

@ -80,6 +80,9 @@ struct Statistics {
counts[s]++;
}
int64_t beginSCC();
void endSCC(unsigned scc, int64_t start);
jschar *formatMessage();
jschar *formatJSON(uint64_t timestamp);
@ -134,10 +137,14 @@ struct Statistics {
/* Allocated space before the GC started. */
size_t preBytes;
/* Sweep times for SCCs of compartments. */
Vector<int64_t, 0, SystemAllocPolicy> sccTimes;
void beginGC();
void endGC();
int64_t gcDuration();
void gcDuration(int64_t *total, int64_t *maxPause);
void sccDurations(int64_t *total, int64_t *maxPause);
void printStats();
bool formatData(StatisticsSerializer &ss, uint64_t timestamp);
@ -168,6 +175,17 @@ struct AutoPhase {
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
struct AutoSCC {
AutoSCC(Statistics &stats, unsigned scc JS_GUARD_OBJECT_NOTIFIER_PARAM)
: stats(stats), scc(scc) { JS_GUARD_OBJECT_NOTIFIER_INIT; start = stats.beginSCC(); }
~AutoSCC() { stats.endSCC(scc, start); }
Statistics &stats;
unsigned scc;
int64_t start;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
} /* namespace gcstats */
} /* namespace js */

View File

@ -0,0 +1,398 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=78:
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifdef JSGC_GENERATIONAL
#ifndef jsgc_storebuffer_h___
#define jsgc_storebuffer_h___
#include "jsgc.h"
#include "jsalloc.h"
#include "gc/Marking.h"
namespace js {
namespace gc {
/*
* Note: this is a stub Nursery that does not actually contain a heap, just a
* set of pointers which are "inside" the nursery to implement verification.
*/
class Nursery
{
HashSet<void*, PointerHasher<void*, 3>, SystemAllocPolicy> nursery;
public:
Nursery() : nursery() {}
bool enable() {
if (!nursery.initialized())
return nursery.init();
return true;
}
void disable() {
if (!nursery.initialized())
return;
nursery.finish();
}
bool isInside(void *cell) const {
JS_ASSERT((uintptr_t(cell) & 0x3) == 0);
return nursery.initialized() && nursery.has(cell);
}
void insertPointer(void *cell) {
nursery.putNew(cell);
}
};
/*
* BufferableRef represents an abstract reference for use in the generational
* GC's remembered set. Entries in the store buffer that cannot be represented
* with the simple pointer-to-a-pointer scheme must derive from this class and
* use the generic store buffer interface.
*/
class BufferableRef
{
public:
virtual bool match(void *location) = 0;
virtual void mark(JSTracer *trc) = 0;
};
/*
* HashKeyRef represents a reference to a HashTable key. Manual HashTable
* barriers should should instantiate this template with their own table/key
* type to insert into the generic buffer with putGeneric.
*/
template <typename Map, typename Key>
class HashKeyRef : public BufferableRef
{
Map *map;
Key key;
typedef typename Map::Ptr Ptr;
public:
HashKeyRef(Map *m, const Key &k) : map(m), key(k) {}
bool match(void *location) {
Ptr p = map->lookup(key);
if (!p)
return false;
return &p->key == location;
}
void mark(JSTracer *trc) {}
};
/*
* The StoreBuffer observes all writes that occur in the system and performs
* efficient filtering of them to derive a remembered set for nursery GC.
*/
class StoreBuffer
{
/* TODO: profile to find the ideal size for these. */
static const size_t ValueBufferSize = 1 * 1024 * sizeof(Value *);
static const size_t CellBufferSize = 2 * 1024 * sizeof(Cell **);
static const size_t SlotBufferSize = 2 * 1024 * (sizeof(JSObject *) + sizeof(uint32_t));
static const size_t RelocValueBufferSize = 1 * 1024 * sizeof(Value *);
static const size_t RelocCellBufferSize = 1 * 1024 * sizeof(Cell **);
static const size_t GenericBufferSize = 1 * 1024 * sizeof(int);
static const size_t TotalSize = ValueBufferSize + CellBufferSize +
SlotBufferSize + RelocValueBufferSize + RelocCellBufferSize +
GenericBufferSize;
typedef HashSet<void *, PointerHasher<void *, 3>, SystemAllocPolicy> EdgeSet;
/*
* This buffer holds only a single type of edge. Using this buffer is more
* efficient than the generic buffer when many writes will be to the same
* type of edge: e.g. Value or Cell*.
*/
template<typename T>
class MonoTypeBuffer
{
friend class StoreBuffer;
StoreBuffer *owner;
Nursery *nursery;
T *base; /* Pointer to the start of the buffer. */
T *pos; /* Pointer to the current insertion position. */
T *top; /* Pointer to one element after the end. */
MonoTypeBuffer(StoreBuffer *owner, Nursery *nursery)
: owner(owner), nursery(nursery), base(NULL), pos(NULL), top(NULL)
{}
MonoTypeBuffer &operator=(const MonoTypeBuffer& other) MOZ_DELETE;
bool enable(uint8_t *region, size_t len);
void disable();
bool isEmpty() const { return pos == base; }
bool isFull() const { JS_ASSERT(pos <= top); return pos == top; }
/* Compaction algorithms. */
void compactNotInSet();
/*
* Attempts to reduce the usage of the buffer by removing unnecessary
* entries.
*/
virtual void compact();
/* Add one item to the buffer. */
void put(const T &v);
/* For verification. */
bool accumulateEdges(EdgeSet &edges);
};
/*
* Overrides the MonoTypeBuffer to support pointers that may be moved in
* memory outside of the GC's control.
*/
template <typename T>
class RelocatableMonoTypeBuffer : public MonoTypeBuffer<T>
{
friend class StoreBuffer;
RelocatableMonoTypeBuffer(StoreBuffer *owner, Nursery *nursery)
: MonoTypeBuffer<T>(owner, nursery)
{}
/* Override compaction to filter out removed items. */
void compactMoved();
virtual void compact();
/* Record a removal from the buffer. */
void unput(const T &v);
};
class GenericBuffer
{
friend class StoreBuffer;
StoreBuffer *owner;
Nursery *nursery;
uint8_t *base; /* Pointer to start of buffer. */
uint8_t *pos; /* Pointer to current buffer position. */
uint8_t *top; /* Pointer to one past the last entry. */
GenericBuffer(StoreBuffer *owner, Nursery *nursery)
: owner(owner), nursery(nursery)
{}
GenericBuffer &operator=(const GenericBuffer& other) MOZ_DELETE;
bool enable(uint8_t *region, size_t len);
void disable();
/* Check if a pointer is present in the buffer. */
bool containsEdge(void *location) const;
template <typename T>
void put(const T &t) {
/* Check if we have been enabled. */
if (!pos)
return;
/* Check for overflow. */
if (top - pos < (unsigned)(sizeof(unsigned) + sizeof(T))) {
owner->setOverflowed();
return;
}
*((unsigned *)pos) = sizeof(T);
pos += sizeof(unsigned);
T *p = (T *)pos;
new (p) T(t);
pos += sizeof(T);
}
};
class CellPtrEdge
{
friend class StoreBuffer;
friend class StoreBuffer::MonoTypeBuffer<CellPtrEdge>;
friend class StoreBuffer::RelocatableMonoTypeBuffer<CellPtrEdge>;
Cell **edge;
CellPtrEdge(Cell **v) : edge(v) {}
bool operator==(const CellPtrEdge &other) const { return edge == other.edge; }
bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; }
void *location() const { return (void *)edge; }
bool inRememberedSet(Nursery *n) {
return !n->isInside(edge) && n->isInside(*edge);
}
bool isNullEdge() const {
return !*edge;
}
CellPtrEdge tagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) | 1)); }
CellPtrEdge untagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) & ~1)); }
bool isTagged() const { return bool(uintptr_t(edge) & 1); }
};
class ValueEdge
{
friend class StoreBuffer;
friend class StoreBuffer::MonoTypeBuffer<ValueEdge>;
friend class StoreBuffer::RelocatableMonoTypeBuffer<ValueEdge>;
Value *edge;
ValueEdge(Value *v) : edge(v) {}
bool operator==(const ValueEdge &other) const { return edge == other.edge; }
bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
void *deref() const { return edge->isGCThing() ? edge->toGCThing() : NULL; }
void *location() const { return (void *)edge; }
bool inRememberedSet(Nursery *n) {
return !n->isInside(edge) && n->isInside(deref());
}
bool isNullEdge() const {
return !deref();
}
ValueEdge tagged() const { return ValueEdge((Value *)(uintptr_t(edge) | 1)); }
ValueEdge untagged() const { return ValueEdge((Value *)(uintptr_t(edge) & ~1)); }
bool isTagged() const { return bool(uintptr_t(edge) & 1); }
};
struct SlotEdge
{
friend class StoreBuffer;
friend class StoreBuffer::MonoTypeBuffer<SlotEdge>;
JSObject *object;
uint32_t offset;
SlotEdge(JSObject *object, uint32_t offset) : object(object), offset(offset) {}
bool operator==(const SlotEdge &other) const {
return object == other.object && offset == other.offset;
}
bool operator!=(const SlotEdge &other) const {
return object != other.object || offset != other.offset;
}
HeapSlot *slotLocation() const {
if (object->isDenseArray()) {
if (offset >= object->getDenseArrayInitializedLength())
return NULL;
return (HeapSlot *)&object->getDenseArrayElement(offset);
}
if (offset >= object->slotSpan())
return NULL;
return &object->getSlotRef(offset);
}
void *deref() const {
HeapSlot *loc = slotLocation();
return (loc && loc->isGCThing()) ? loc->toGCThing() : NULL;
}
void *location() const {
return (void *)slotLocation();
}
bool inRememberedSet(Nursery *n) {
return !n->isInside(object) && n->isInside(deref());
}
bool isNullEdge() const {
return !deref();
}
};
MonoTypeBuffer<ValueEdge> bufferVal;
MonoTypeBuffer<CellPtrEdge> bufferCell;
MonoTypeBuffer<SlotEdge> bufferSlot;
RelocatableMonoTypeBuffer<ValueEdge> bufferRelocVal;
RelocatableMonoTypeBuffer<CellPtrEdge> bufferRelocCell;
GenericBuffer bufferGeneric;
Nursery *nursery;
void *buffer;
bool overflowed;
bool enabled;
/* For the verifier. */
EdgeSet edgeSet;
/* For use by our owned buffers. */
void setOverflowed() { overflowed = true; }
public:
StoreBuffer(Nursery *n)
: bufferVal(this, n), bufferCell(this, n), bufferSlot(this, n),
bufferRelocVal(this, n), bufferRelocCell(this, n), bufferGeneric(this, n),
nursery(n), buffer(NULL), overflowed(false), enabled(false)
{}
bool enable();
void disable();
bool isEnabled() { return enabled; }
/* Get the overflowed status. */
bool hasOverflowed() const { return overflowed; }
/* Insert a single edge into the buffer/remembered set. */
void putValue(Value *v) {
bufferVal.put(v);
}
void putCell(Cell **o) {
bufferCell.put(o);
}
void putSlot(JSObject *obj, uint32_t slot) {
bufferSlot.put(SlotEdge(obj, slot));
}
/* Insert or update a single edge in the Relocatable buffer. */
void putRelocatableValue(Value *v) {
bufferRelocVal.put(v);
}
void putRelocatableCell(Cell **c) {
bufferRelocCell.put(c);
}
void removeRelocatableValue(Value *v) {
bufferRelocVal.unput(v);
}
void removeRelocatableCell(Cell **c) {
bufferRelocCell.unput(c);
}
/* Insert an entry into the generic buffer. */
template <typename T>
void putGeneric(const T &t) {
bufferGeneric.put(t);
}
/* For the verifier. */
bool coalesceForVerification();
void releaseVerificationData();
bool containsEdgeAt(void *loc) const;
};
} /* namespace gc */
} /* namespace js */
#endif /* jsgc_storebuffer_h___ */
#endif /* JSGC_GENERATIONAL */

View File

@ -1,4 +1,3 @@
/* js-config.h. Generated automatically by configure. */
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=78:
*

View File

@ -116,8 +116,8 @@ MSG_DEF(JSMSG_UNMATCHED_RIGHT_PAREN, 62, 0, JSEXN_SYNTAXERR, "unmatched ) in r
MSG_DEF(JSMSG_TOO_BIG_TO_ENCODE, 63, 0, JSEXN_INTERNALERR, "data are to big to encode")
MSG_DEF(JSMSG_ARG_INDEX_OUT_OF_RANGE, 64, 1, JSEXN_RANGEERR, "argument {0} accesses an index that is out of range")
MSG_DEF(JSMSG_SPREAD_TOO_LARGE, 65, 0, JSEXN_RANGEERR, "array too large due to spread operand(s)")
MSG_DEF(JSMSG_UNUSED66, 66, 0, JSEXN_NONE, "")
MSG_DEF(JSMSG_UNUSED67, 67, 0, JSEXN_NONE, "")
MSG_DEF(JSMSG_SOURCE_TOO_LONG, 66, 0, JSEXN_RANGEERR, "source is too long")
MSG_DEF(JSMSG_BAD_WEAKMAP_KEY, 67, 0, JSEXN_TYPEERR, "cannot use the given object as a weak map key")
MSG_DEF(JSMSG_BAD_SCRIPT_MAGIC, 68, 0, JSEXN_INTERNALERR, "bad script XDR magic number")
MSG_DEF(JSMSG_PAREN_BEFORE_FORMAL, 69, 0, JSEXN_SYNTAXERR, "missing ( before formal parameters")
MSG_DEF(JSMSG_MISSING_FORMAL, 70, 0, JSEXN_SYNTAXERR, "missing formal parameter")
@ -126,7 +126,7 @@ MSG_DEF(JSMSG_CURLY_BEFORE_BODY, 72, 0, JSEXN_SYNTAXERR, "missing { before
MSG_DEF(JSMSG_CURLY_AFTER_BODY, 73, 0, JSEXN_SYNTAXERR, "missing } after function body")
MSG_DEF(JSMSG_PAREN_BEFORE_COND, 74, 0, JSEXN_SYNTAXERR, "missing ( before condition")
MSG_DEF(JSMSG_PAREN_AFTER_COND, 75, 0, JSEXN_SYNTAXERR, "missing ) after condition")
MSG_DEF(JSMSG_DESTRUCT_DUP_ARG, 76, 0, JSEXN_SYNTAXERR, "duplicate argument is mixed with destructuring pattern")
MSG_DEF(JSMSG_BAD_DUP_ARGS, 76, 0, JSEXN_SYNTAXERR, "duplicate argument names not allowed in this context")
MSG_DEF(JSMSG_NAME_AFTER_DOT, 77, 0, JSEXN_SYNTAXERR, "missing name after . operator")
MSG_DEF(JSMSG_BRACKET_IN_INDEX, 78, 0, JSEXN_SYNTAXERR, "missing ] in index expression")
MSG_DEF(JSMSG_XML_WHOLE_PROGRAM, 79, 0, JSEXN_SYNTAXERR, "XML can't be the whole program")
@ -196,7 +196,7 @@ MSG_DEF(JSMSG_BAD_CLONE_FUNOBJ_SCOPE, 142, 0, JSEXN_TYPEERR, "bad cloned functio
MSG_DEF(JSMSG_SHARPVAR_TOO_BIG, 143, 0, JSEXN_SYNTAXERR, "overlarge sharp variable number")
MSG_DEF(JSMSG_ILLEGAL_CHARACTER, 144, 0, JSEXN_SYNTAXERR, "illegal character")
MSG_DEF(JSMSG_BAD_OCTAL, 145, 1, JSEXN_SYNTAXERR, "{0} is not a legal ECMA-262 octal constant")
MSG_DEF(JSMSG_BAD_INDIRECT_CALL, 146, 1, JSEXN_EVALERR, "function {0} must be called directly, and not by way of a function of another name")
MSG_DEF(JSMSG_UNUSED146, 146, 0, JSEXN_NONE, "")
MSG_DEF(JSMSG_UNCAUGHT_EXCEPTION, 147, 1, JSEXN_INTERNALERR, "uncaught exception: {0}")
MSG_DEF(JSMSG_INVALID_BACKREF, 148, 0, JSEXN_SYNTAXERR, "non-octal digit in an escape sequence that doesn't match a back-reference")
MSG_DEF(JSMSG_BAD_BACKREF, 149, 0, JSEXN_SYNTAXERR, "back-reference exceeds number of capturing parentheses")
@ -352,3 +352,11 @@ MSG_DEF(JSMSG_FUNCTION_ARGUMENTS_AND_REST, 298, 0, JSEXN_ERR, "the 'arguments' p
MSG_DEF(JSMSG_REST_WITH_DEFAULT, 299, 0, JSEXN_SYNTAXERR, "rest parameter may not have a default")
MSG_DEF(JSMSG_NONDEFAULT_FORMAL_AFTER_DEFAULT, 300, 0, JSEXN_SYNTAXERR, "parameter(s) with default followed by parameter without default")
MSG_DEF(JSMSG_YIELD_IN_DEFAULT, 301, 0, JSEXN_SYNTAXERR, "yield in default expression")
MSG_DEF(JSMSG_INTRINSIC_NOT_DEFINED, 302, 1, JSEXN_REFERENCEERR, "no intrinsic function {0}")
MSG_DEF(JSMSG_ALREADY_HAS_SOURCEMAP, 303, 1, JSEXN_ERR, "{0} is being assigned a source map, yet already has one")
MSG_DEF(JSMSG_PAR_ARRAY_BAD_ARG, 304, 1, JSEXN_TYPEERR, "invalid ParallelArray{0} argument")
MSG_DEF(JSMSG_PAR_ARRAY_BAD_PARTITION, 305, 0, JSEXN_ERR, "argument must be divisible by outermost dimension")
MSG_DEF(JSMSG_PAR_ARRAY_REDUCE_EMPTY, 306, 0, JSEXN_ERR, "cannot reduce empty ParallelArray object")
MSG_DEF(JSMSG_PAR_ARRAY_ALREADY_FLAT, 307, 0, JSEXN_ERR, "cannot flatten 1-dimensional ParallelArray object")
MSG_DEF(JSMSG_PAR_ARRAY_SCATTER_CONFLICT, 308, 0, JSEXN_ERR, "no conflict resolution function provided")
MSG_DEF(JSMSG_PAR_ARRAY_SCATTER_BOUNDS, 309, 0, JSEXN_ERR, "index in scatter vector out of bounds")

View File

@ -15,9 +15,6 @@ namespace js {
class TempAllocPolicy;
/* Integral types for all hash functions. */
typedef uint32_t HashNumber;
/*****************************************************************************/
namespace detail {
@ -217,9 +214,6 @@ class HashTable : private AllocPolicy
* this operation until the next call to |popFront()|.
*/
void rekeyFront(const Lookup &l, const Key &k) {
JS_ASSERT(&k != &HashPolicy::getKey(this->cur->t));
if (match(*this->cur, l))
return;
typename HashTableEntry<T>::NonConstT t = this->cur->t;
HashPolicy::setKey(t, const_cast<Key &>(k));
table.remove(*this->cur);
@ -288,7 +282,6 @@ class HashTable : private AllocPolicy
static const uint8_t sMinAlphaFrac = 64; /* (0x100 * .25) taken from jsdhash.h */
static const uint8_t sMaxAlphaFrac = 192; /* (0x100 * .75) taken from jsdhash.h */
static const uint8_t sInvMaxAlpha = 171; /* (ceil(0x100 / .75) >> 1) */
static const HashNumber sGoldenRatio = 0x9E3779B9U; /* taken from jsdhash.h */
static const HashNumber sFreeKey = Entry::sFreeKey;
static const HashNumber sRemovedKey = Entry::sRemovedKey;
static const HashNumber sCollisionBit = Entry::sCollisionBit;
@ -308,10 +301,7 @@ class HashTable : private AllocPolicy
static HashNumber prepareHash(const Lookup& l)
{
HashNumber keyHash = HashPolicy::hash(l);
/* Improve keyHash distribution. */
keyHash *= sGoldenRatio;
HashNumber keyHash = ScrambleHashCode(HashPolicy::hash(l));
/* Avoid reserved hash codes. */
if (!isLiveHash(keyHash))
@ -1003,6 +993,9 @@ template <class Key,
class AllocPolicy = TempAllocPolicy>
class HashMap
{
typedef typename tl::StaticAssert<tl::IsRelocatableHeapType<Key>::result>::result keyAssert;
typedef typename tl::StaticAssert<tl::IsRelocatableHeapType<Value>::result>::result valAssert;
public:
typedef typename HashPolicy::Lookup Lookup;

View File

@ -56,6 +56,7 @@ struct RuntimeSizes
, gcMarker(0)
, mathCache(0)
, scriptFilenames(0)
, scriptSources(0)
, compartmentObjects(0)
{}
@ -71,6 +72,7 @@ struct RuntimeSizes
size_t gcMarker;
size_t mathCache;
size_t scriptFilenames;
size_t scriptSources;
// This is the exception to the "RuntimeSizes doesn't measure things within
// compartments" rule. We combine the sizes of all the JSCompartment

View File

@ -355,6 +355,26 @@ JS_FLOOR_LOG2W(size_t n)
return js_FloorLog2wImpl(n);
}
/*
* JS_ROTATE_LEFT32
*
* There is no rotate operation in the C Language so the construct (a << 4) |
* (a >> 28) is used instead. Most compilers convert this to a rotate
* instruction but some versions of MSVC don't without a little help. To get
* MSVC to generate a rotate instruction, we have to use the _rotl intrinsic
* and use a pragma to make _rotl inline.
*
* MSVC in VS2005 will do an inline rotate instruction on the above construct.
*/
#if defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_AMD64) || \
defined(_M_X64))
#include <stdlib.h>
#pragma intrinsic(_rotl)
#define JS_ROTATE_LEFT32(a, bits) _rotl(a, bits)
#else
#define JS_ROTATE_LEFT32(a, bits) (((a) << (bits)) | ((a) >> (32 - (bits))))
#endif
JS_END_EXTERN_C
#ifdef __cplusplus
@ -599,11 +619,18 @@ public:
class UnwantedForeground : public Foreground {
};
template <typename T>
struct ScopedDeletePtrTraits
template<typename T>
struct ScopedFreePtrTraits
{
typedef T* type;
static T* empty() { return NULL; }
static void release(T* ptr) { Foreground::free_(ptr); }
};
SCOPED_TEMPLATE(ScopedFreePtr, ScopedFreePtrTraits)
template <typename T>
struct ScopedDeletePtrTraits : public ScopedFreePtrTraits<T>
{
typedef T *type;
static T *empty() { return NULL; }
static void release(T *ptr) { Foreground::delete_(ptr); }
};
SCOPED_TEMPLATE(ScopedDeletePtr, ScopedDeletePtrTraits)
@ -829,20 +856,7 @@ class MoveRef {
explicit MoveRef(T &t) : pointer(&t) { }
T &operator*() const { return *pointer; }
T *operator->() const { return pointer; }
#if defined(__GXX_EXPERIMENTAL_CXX0X__) && defined(__clang__)
/*
* If MoveRef is used in a rvalue position (which is expected), we can
* end up in a situation where, without this ifdef, we would try to pass
* a T& to a move constructor, which fails. It is not clear if the compiler
* should instead use the copy constructor, but for now this lets us build
* with clang. See bug 689066 and llvm.org/pr11003 for the details.
* Note: We can probably remove MoveRef completely once we are comfortable
* using c++11.
*/
operator T&& () const { return static_cast<T&&>(*pointer); }
#else
operator T& () const { return *pointer; }
#endif
private:
T *pointer;
};
@ -895,6 +909,51 @@ RoundUpPow2(size_t x)
return size_t(1) << JS_CEILING_LOG2W(x);
}
/* Integral types for all hash functions. */
typedef uint32_t HashNumber;
const unsigned HashNumberSizeBits = 32;
namespace detail {
/*
* Given a raw hash code, h, return a number that can be used to select a hash
* bucket.
*
* This function aims to produce as uniform an output distribution as possible,
* especially in the most significant (leftmost) bits, even though the input
* distribution may be highly nonrandom, given the constraints that this must
* be deterministic and quick to compute.
*
* Since the leftmost bits of the result are best, the hash bucket index is
* computed by doing ScrambleHashCode(h) / (2^32/N) or the equivalent
* right-shift, not ScrambleHashCode(h) % N or the equivalent bit-mask.
*
* FIXME: OrderedHashTable uses a bit-mask; see bug 775896.
*/
inline HashNumber
ScrambleHashCode(HashNumber h)
{
/*
* Simply returning h would not cause any hash tables to produce wrong
* answers. But it can produce pathologically bad performance: The caller
* right-shifts the result, keeping only the highest bits. The high bits of
* hash codes are very often completely entropy-free. (So are the lowest
* bits.)
*
* So we use Fibonacci hashing, as described in Knuth, The Art of Computer
* Programming, 6.4. This mixes all the bits of the input hash code h.
*
* The value of goldenRatio is taken from the hex
* expansion of the golden ratio, which starts 1.9E3779B9....
* This value is especially good if values with consecutive hash codes
* are stored in a hash table; see Knuth for details.
*/
static const HashNumber goldenRatio = 0x9E3779B9U;
return h * goldenRatio;
}
} /* namespace detail */
} /* namespace js */
namespace JS {
@ -910,7 +969,7 @@ namespace JS {
* a live integer value.
*/
inline void PoisonPtr(uintptr_t *v)
inline void PoisonPtr(void *v)
{
#if defined(JSGC_ROOT_ANALYSIS) && defined(DEBUG)
uint8_t *ptr = (uint8_t *) v + 3;

View File

@ -1 +1 @@
37b6af08d1e6059f152ae515d8d7422a346cf7ed
8a03481ec145a3a0e532637dd52bf80605b7a713

View File

@ -12,12 +12,12 @@
#include "jsalloc.h"
#include "jsapi.h"
#include "jsprvtd.h"
#include "jshash.h"
#include "jspubtd.h"
#include "jslock.h"
#include "gc/Barrier.h"
#include "js/HashTable.h"
#include "mozilla/HashFunctions.h"
struct JSIdArray {
int length;
@ -83,23 +83,15 @@ JSID_TO_ATOM(jsid id)
return (JSAtom *)JSID_TO_STRING(id);
}
JS_STATIC_ASSERT(sizeof(JSHashNumber) == 4);
JS_STATIC_ASSERT(sizeof(js::HashNumber) == 4);
JS_STATIC_ASSERT(sizeof(jsid) == JS_BYTES_PER_WORD);
namespace js {
static JS_ALWAYS_INLINE JSHashNumber
static JS_ALWAYS_INLINE js::HashNumber
HashId(jsid id)
{
JSHashNumber n =
#if JS_BYTES_PER_WORD == 4
JSHashNumber(JSID_BITS(id));
#elif JS_BYTES_PER_WORD == 8
JSHashNumber(JSID_BITS(id)) ^ JSHashNumber(JSID_BITS(id) >> 32);
#else
# error "Unsupported configuration"
#endif
return n * JS_GOLDEN_RATIO;
return HashGeneric(JSID_BITS(id));
}
static JS_ALWAYS_INLINE Value
@ -135,15 +127,6 @@ struct DefaultHasher<jsid>
}
#if JS_BYTES_PER_WORD == 4
# define ATOM_HASH(atom) ((JSHashNumber)(atom) >> 2)
#elif JS_BYTES_PER_WORD == 8
# define ATOM_HASH(atom) (((JSHashNumber)(uintptr_t)(atom) >> 3) ^ \
(JSHashNumber)((uintptr_t)(atom) >> 32))
#else
# error "Unsupported configuration"
#endif
/*
* Return a printable, lossless char[] representation of a string-type atom.
* The lifetime of the result matches the lifetime of bytes.
@ -342,29 +325,28 @@ extern const char js_send_str[];
extern const char js_getter_str[];
extern const char js_setter_str[];
namespace js {
/*
* Initialize atom state. Return true on success, false on failure to allocate
* memory. The caller must zero rt->atomState before calling this function and
* only call it after js_InitGC successfully returns.
*/
extern JSBool
js_InitAtomState(JSRuntime *rt);
InitAtomState(JSRuntime *rt);
/*
* Free and clear atom state including any interned string atoms. This
* function must be called before js_FinishGC.
*/
extern void
js_FinishAtomState(JSRuntime *rt);
FinishAtomState(JSRuntime *rt);
/*
* Atom tracing and garbage collection hooks.
*/
namespace js {
extern void
MarkAtomState(JSTracer *trc, bool markAll);
MarkAtomState(JSTracer *trc);
extern void
SweepAtomState(JSRuntime *rt);
@ -382,58 +364,32 @@ enum InternBehavior
InternAtom = true
};
} /* namespace js */
extern JSAtom *
Atomize(JSContext *cx, const char *bytes, size_t length,
js::InternBehavior ib = js::DoNotInternAtom,
js::FlationCoding fc = js::NormalEncoding);
extern JSAtom *
js_Atomize(JSContext *cx, const char *bytes, size_t length,
js::InternBehavior ib = js::DoNotInternAtom,
js::FlationCoding fc = js::NormalEncoding);
AtomizeChars(JSContext *cx, const jschar *chars, size_t length,
js::InternBehavior ib = js::DoNotInternAtom);
extern JSAtom *
js_AtomizeChars(JSContext *cx, const jschar *chars, size_t length,
js::InternBehavior ib = js::DoNotInternAtom);
extern JSAtom *
js_AtomizeString(JSContext *cx, JSString *str, js::InternBehavior ib = js::DoNotInternAtom);
/*
* Return an existing atom for the given char array or null if the char
* sequence is currently not atomized.
*/
extern JSAtom *
js_GetExistingStringAtom(JSContext *cx, const jschar *chars, size_t length);
#ifdef DEBUG
extern JS_FRIEND_API(void)
js_DumpAtoms(JSContext *cx, FILE *fp);
#endif
namespace js {
AtomizeString(JSContext *cx, JSString *str, js::InternBehavior ib = js::DoNotInternAtom);
inline JSAtom *
ToAtom(JSContext *cx, const js::Value &v);
bool
InternNonIntElementId(JSContext *cx, JSObject *obj, const Value &idval,
jsid *idp, Value *vp);
jsid *idp, MutableHandleValue vp);
inline bool
InternNonIntElementId(JSContext *cx, JSObject *obj, const Value &idval, jsid *idp)
{
Value dummy;
RootedValue dummy(cx);
return InternNonIntElementId(cx, obj, idval, idp, &dummy);
}
/*
* For all unmapped atoms recorded in al, add a mapping from the atom's index
* to its address. map->length must already be set to the number of atoms in
* the list and map->vector must point to pre-allocated memory.
*/
extern void
InitAtomMap(JSContext *cx, AtomIndexMap *indices, HeapPtr<JSAtom> *atoms);
template<XDRMode mode>
bool
XDRAtom(XDRState<mode> *xdr, JSAtom **atomp);

View File

@ -43,6 +43,7 @@ DEFINE_ATOM(call, "call")
DEFINE_ATOM(callee, "callee")
DEFINE_ATOM(caller, "caller")
DEFINE_ATOM(classPrototype, "prototype")
DEFINE_ATOM(columnNumber, "columnNumber")
DEFINE_ATOM(constructor, "constructor")
DEFINE_ATOM(each, "each")
DEFINE_ATOM(eval, "eval")
@ -53,7 +54,8 @@ DEFINE_ATOM(ignoreCase, "ignoreCase")
DEFINE_ATOM(index, "index")
DEFINE_ATOM(input, "input")
DEFINE_ATOM(toISOString, "toISOString")
DEFINE_ATOM(iterator, "__iterator__")
DEFINE_ATOM(iterator, "iterator")
DEFINE_ATOM(iteratorIntrinsic, "__iterator__")
DEFINE_ATOM(join, "join")
DEFINE_ATOM(lastIndex, "lastIndex")
DEFINE_ATOM(length, "length")
@ -121,6 +123,7 @@ DEFINE_PROTOTYPE_ATOM(WeakMap)
DEFINE_ATOM(buffer, "buffer")
DEFINE_ATOM(byteLength, "byteLength")
DEFINE_ATOM(byteOffset, "byteOffset")
DEFINE_ATOM(shape, "shape")
DEFINE_KEYWORD_ATOM(return)
DEFINE_KEYWORD_ATOM(throw)
DEFINE_ATOM(url, "url")
@ -148,3 +151,4 @@ DEFINE_ATOM(unescape, "unescape")
DEFINE_ATOM(uneval, "uneval")
DEFINE_ATOM(unwatch, "unwatch")
DEFINE_ATOM(watch, "watch")
DEFINE_ATOM(_CallFunction, "_CallFunction")

View File

@ -161,35 +161,35 @@ typedef JSBool
(* LookupSpecialOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid,
MutableHandleObject objp, MutableHandleShape propp);
typedef JSBool
(* DefineGenericOp)(JSContext *cx, HandleObject obj, HandleId id, const Value *value,
(* DefineGenericOp)(JSContext *cx, HandleObject obj, HandleId id, HandleValue value,
PropertyOp getter, StrictPropertyOp setter, unsigned attrs);
typedef JSBool
(* DefinePropOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, const Value *value,
(* DefinePropOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, HandleValue value,
PropertyOp getter, StrictPropertyOp setter, unsigned attrs);
typedef JSBool
(* DefineElementOp)(JSContext *cx, HandleObject obj, uint32_t index, const Value *value,
(* DefineElementOp)(JSContext *cx, HandleObject obj, uint32_t index, HandleValue value,
PropertyOp getter, StrictPropertyOp setter, unsigned attrs);
typedef JSBool
(* DefineSpecialOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, const Value *value,
(* DefineSpecialOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, HandleValue value,
PropertyOp getter, StrictPropertyOp setter, unsigned attrs);
typedef JSBool
(* GenericIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandleId id, Value *vp);
(* GenericIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandleId id, MutableHandleValue vp);
typedef JSBool
(* PropertyIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandlePropertyName name, Value *vp);
(* PropertyIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandlePropertyName name, MutableHandleValue vp);
typedef JSBool
(* ElementIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, uint32_t index, Value *vp);
(* ElementIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, uint32_t index, MutableHandleValue vp);
typedef JSBool
(* ElementIfPresentOp)(JSContext *cx, HandleObject obj, HandleObject receiver, uint32_t index, Value *vp, bool* present);
(* ElementIfPresentOp)(JSContext *cx, HandleObject obj, HandleObject receiver, uint32_t index, MutableHandleValue vp, bool* present);
typedef JSBool
(* SpecialIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandleSpecialId sid, Value *vp);
(* SpecialIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandleSpecialId sid, MutableHandleValue vp);
typedef JSBool
(* StrictGenericIdOp)(JSContext *cx, HandleObject obj, HandleId id, Value *vp, JSBool strict);
(* StrictGenericIdOp)(JSContext *cx, HandleObject obj, HandleId id, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* StrictPropertyIdOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, Value *vp, JSBool strict);
(* StrictPropertyIdOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* StrictElementIdOp)(JSContext *cx, HandleObject obj, uint32_t index, Value *vp, JSBool strict);
(* StrictElementIdOp)(JSContext *cx, HandleObject obj, uint32_t index, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* StrictSpecialIdOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, Value *vp, JSBool strict);
(* StrictSpecialIdOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* GenericAttributesOp)(JSContext *cx, HandleObject obj, HandleId id, unsigned *attrsp);
typedef JSBool
@ -199,11 +199,11 @@ typedef JSBool
typedef JSBool
(* SpecialAttributesOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, unsigned *attrsp);
typedef JSBool
(* DeletePropertyOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, Value *vp, JSBool strict);
(* DeletePropertyOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* DeleteElementOp)(JSContext *cx, HandleObject obj, uint32_t index, Value *vp, JSBool strict);
(* DeleteElementOp)(JSContext *cx, HandleObject obj, uint32_t index, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* DeleteSpecialOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, Value *vp, JSBool strict);
(* DeleteSpecialOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, MutableHandleValue vp, JSBool strict);
typedef JSType
(* TypeOfOp)(JSContext *cx, HandleObject obj);
@ -257,9 +257,22 @@ struct ClassExtension
* WeakMaps use this to override the wrapper disposal optimization.
*/
bool isWrappedNative;
/*
* If an object is used as a key in a weakmap, it may be desirable for the
* garbage collector to keep that object around longer than it otherwise
* would. A common case is when the key is a wrapper around an object in
* another compartment, and we want to avoid collecting the wrapper (and
* removing the weakmap entry) as long as the wrapped object is alive. In
* that case, the wrapped object is returned by the wrapper's
* weakmapKeyDelegateOp hook. As long as the wrapper is used as a weakmap
* key, it will not be collected (and remain in the weakmap) until the
* wrapped object is collected.
*/
JSWeakmapKeyDelegateOp weakmapKeyDelegateOp;
};
#define JS_NULL_CLASS_EXT {NULL,NULL,NULL,NULL,NULL,false}
#define JS_NULL_CLASS_EXT {NULL,NULL,NULL,NULL,NULL,false,NULL}
struct ObjectOps
{

View File

@ -221,12 +221,6 @@ JS_GetFrameAnnotation(JSContext *cx, JSStackFrame *fp);
extern JS_PUBLIC_API(void)
JS_SetFrameAnnotation(JSContext *cx, JSStackFrame *fp, void *annotation);
extern JS_PUBLIC_API(JSPrincipals*)
JS_GetPrincipalIfDummyFrame(JSContext *cx, JSStackFrame *fpArg);
extern JS_PUBLIC_API(JSBool)
JS_IsScriptFrame(JSContext *cx, JSStackFrame *fp);
extern JS_PUBLIC_API(JSObject *)
JS_GetFrameScopeChain(JSContext *cx, JSStackFrame *fp);

View File

@ -22,10 +22,10 @@ extern JS_FRIEND_API(JSString *)
JS_GetAnonymousString(JSRuntime *rt);
extern JS_FRIEND_API(JSObject *)
JS_FindCompilationScope(JSContext *cx, JSObject *obj);
JS_FindCompilationScope(JSContext *cx, JSRawObject obj);
extern JS_FRIEND_API(JSFunction *)
JS_GetObjectFunction(JSObject *obj);
JS_GetObjectFunction(JSRawObject obj);
extern JS_FRIEND_API(JSObject *)
JS_GetGlobalForFrame(JSStackFrame *fp);
@ -37,7 +37,7 @@ extern JS_FRIEND_API(JSObject *)
JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObject *parent);
extern JS_FRIEND_API(uint32_t)
JS_ObjectCountDynamicSlots(JSObject *obj);
JS_ObjectCountDynamicSlots(JSHandleObject obj);
extern JS_FRIEND_API(void)
JS_ShrinkGCBuffers(JSRuntime *rt);
@ -75,13 +75,18 @@ enum {
JS_TELEMETRY_GC_REASON,
JS_TELEMETRY_GC_IS_COMPARTMENTAL,
JS_TELEMETRY_GC_MS,
JS_TELEMETRY_GC_MAX_PAUSE_MS,
JS_TELEMETRY_GC_MARK_MS,
JS_TELEMETRY_GC_SWEEP_MS,
JS_TELEMETRY_GC_MARK_ROOTS_MS,
JS_TELEMETRY_GC_MARK_GRAY_MS,
JS_TELEMETRY_GC_SLICE_MS,
JS_TELEMETRY_GC_MMU_50,
JS_TELEMETRY_GC_RESET,
JS_TELEMETRY_GC_INCREMENTAL_DISABLED,
JS_TELEMETRY_GC_NON_INCREMENTAL
JS_TELEMETRY_GC_NON_INCREMENTAL,
JS_TELEMETRY_GC_SCC_SWEEP_TOTAL_MS,
JS_TELEMETRY_GC_SCC_SWEEP_MAX_PAUSE_MS
};
typedef void
@ -108,7 +113,7 @@ extern JS_FRIEND_API(JSObject *)
JS_CloneObject(JSContext *cx, JSObject *obj, JSObject *proto, JSObject *parent);
extern JS_FRIEND_API(JSBool)
js_GetterOnlyPropertyStub(JSContext *cx, JSHandleObject obj, JSHandleId id, JSBool strict, jsval *vp);
js_GetterOnlyPropertyStub(JSContext *cx, JSHandleObject obj, JSHandleId id, JSBool strict, JSMutableHandleValue vp);
JS_FRIEND_API(void)
js_ReportOverRecursed(JSContext *maybecx);
@ -159,6 +164,8 @@ struct JSFunctionSpecWithHelp {
#define JS_FN_HELP(name,call,nargs,flags,usage,help) \
{name, call, nargs, (flags) | JSPROP_ENUMERATE | JSFUN_STUB_GSOPS, usage, help}
#define JS_FS_HELP_END \
{NULL, NULL, 0, 0, NULL, NULL}
extern JS_FRIEND_API(bool)
JS_DefineFunctionsWithHelp(JSContext *cx, JSObject *obj, const JSFunctionSpecWithHelp *fs);
@ -169,6 +176,11 @@ JS_END_EXTERN_C
#ifdef __cplusplus
typedef bool (* JS_SourceHook)(JSContext *cx, JSScript *script, jschar **src, uint32_t *length);
extern JS_FRIEND_API(void)
JS_SetSourceHook(JSRuntime *rt, JS_SourceHook hook);
namespace js {
struct RuntimeFriendFields {
@ -213,7 +225,7 @@ class JS_FRIEND_API(AutoSwitchCompartment) {
public:
AutoSwitchCompartment(JSContext *cx, JSCompartment *newCompartment
JS_GUARD_OBJECT_NOTIFIER_PARAM);
AutoSwitchCompartment(JSContext *cx, JSObject *target JS_GUARD_OBJECT_NOTIFIER_PARAM);
AutoSwitchCompartment(JSContext *cx, JSHandleObject target JS_GUARD_OBJECT_NOTIFIER_PARAM);
~AutoSwitchCompartment();
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
@ -274,6 +286,9 @@ typedef void
extern JS_FRIEND_API(void)
VisitGrayWrapperTargets(JSCompartment *comp, GCThingCallback *callback, void *closure);
extern JS_FRIEND_API(JSObject *)
GetWeakmapKeyDelegate(JSObject *key);
/*
* Shadow declarations of JS internal structures, for access by inline access
* functions below. Do not use these structures in any other way. When adding
@ -318,6 +333,16 @@ struct Object {
}
};
struct Function {
Object base;
uint16_t nargs;
uint16_t flags;
/* Used only for natives */
Native native;
const JSJitInfo *jitinfo;
void *_1;
};
struct Atom {
size_t _;
const jschar *chars;
@ -339,35 +364,35 @@ extern JS_FRIEND_DATA(js::Class) XMLClass;
extern JS_FRIEND_DATA(js::Class) ObjectClass;
inline js::Class *
GetObjectClass(const JSObject *obj)
GetObjectClass(RawObject obj)
{
return reinterpret_cast<const shadow::Object*>(obj)->shape->base->clasp;
}
inline JSClass *
GetObjectJSClass(const JSObject *obj)
GetObjectJSClass(RawObject obj)
{
return js::Jsvalify(GetObjectClass(obj));
}
JS_FRIEND_API(bool)
IsScopeObject(JSObject *obj);
IsScopeObject(RawObject obj);
inline JSObject *
GetObjectParent(JSObject *obj)
GetObjectParent(RawObject obj)
{
JS_ASSERT(!IsScopeObject(obj));
return reinterpret_cast<shadow::Object*>(obj)->shape->base->parent;
}
JS_FRIEND_API(JSObject *)
GetObjectParentMaybeScope(JSObject *obj);
GetObjectParentMaybeScope(RawObject obj);
JS_FRIEND_API(JSObject *)
GetGlobalForObjectCrossCompartment(JSObject *obj);
GetGlobalForObjectCrossCompartment(RawObject obj);
JS_FRIEND_API(void)
NotifyAnimationActivity(JSObject *obj);
NotifyAnimationActivity(RawObject obj);
JS_FRIEND_API(bool)
IsOriginalScriptFunction(JSFunction *fun);
@ -391,19 +416,19 @@ InitClassWithReserved(JSContext *cx, JSObject *obj, JSObject *parent_proto,
JSPropertySpec *static_ps, JSFunctionSpec *static_fs);
JS_FRIEND_API(const Value &)
GetFunctionNativeReserved(JSObject *fun, size_t which);
GetFunctionNativeReserved(RawObject fun, size_t which);
JS_FRIEND_API(void)
SetFunctionNativeReserved(JSObject *fun, size_t which, const Value &val);
SetFunctionNativeReserved(RawObject fun, size_t which, const Value &val);
inline JSObject *
GetObjectProto(JSObject *obj)
GetObjectProto(RawObject obj)
{
return reinterpret_cast<const shadow::Object*>(obj)->type->proto;
}
inline void *
GetObjectPrivate(JSObject *obj)
GetObjectPrivate(RawObject obj)
{
const shadow::Object *nobj = reinterpret_cast<const shadow::Object*>(obj);
void **addr = reinterpret_cast<void**>(&nobj->fixedSlots()[nobj->numFixedSlots()]);
@ -415,17 +440,17 @@ GetObjectPrivate(JSObject *obj)
* within the maximum capacity for the object's fixed slots).
*/
inline const Value &
GetReservedSlot(const JSObject *obj, size_t slot)
GetReservedSlot(RawObject obj, size_t slot)
{
JS_ASSERT(slot < JSCLASS_RESERVED_SLOTS(GetObjectClass(obj)));
return reinterpret_cast<const shadow::Object *>(obj)->slotRef(slot);
}
JS_FRIEND_API(void)
SetReservedSlotWithBarrier(JSObject *obj, size_t slot, const Value &value);
SetReservedSlotWithBarrier(RawObject obj, size_t slot, const Value &value);
inline void
SetReservedSlot(JSObject *obj, size_t slot, const Value &value)
SetReservedSlot(RawObject obj, size_t slot, const Value &value)
{
JS_ASSERT(slot < JSCLASS_RESERVED_SLOTS(GetObjectClass(obj)));
shadow::Object *sobj = reinterpret_cast<shadow::Object *>(obj);
@ -436,22 +461,15 @@ SetReservedSlot(JSObject *obj, size_t slot, const Value &value)
}
JS_FRIEND_API(uint32_t)
GetObjectSlotSpan(JSObject *obj);
GetObjectSlotSpan(RawObject obj);
inline const Value &
GetObjectSlot(JSObject *obj, size_t slot)
GetObjectSlot(RawObject obj, size_t slot)
{
JS_ASSERT(slot < GetObjectSlotSpan(obj));
return reinterpret_cast<const shadow::Object *>(obj)->slotRef(slot);
}
inline Shape *
GetObjectShape(JSObject *obj)
{
shadow::Shape *shape = reinterpret_cast<const shadow::Object*>(obj)->shape;
return reinterpret_cast<Shape *>(shape);
}
inline const jschar *
GetAtomChars(JSAtom *atom)
{
@ -465,19 +483,19 @@ AtomToLinearString(JSAtom *atom)
}
static inline js::PropertyOp
CastAsJSPropertyOp(JSObject *object)
CastAsJSPropertyOp(RawObject object)
{
return JS_DATA_TO_FUNC_PTR(js::PropertyOp, object);
}
static inline js::StrictPropertyOp
CastAsJSStrictPropertyOp(JSObject *object)
CastAsJSStrictPropertyOp(RawObject object)
{
return JS_DATA_TO_FUNC_PTR(js::StrictPropertyOp, object);
}
JS_FRIEND_API(bool)
GetPropertyNames(JSContext *cx, JSObject *obj, unsigned flags, js::AutoIdVector *props);
GetPropertyNames(JSContext *cx, RawObject obj, unsigned flags, js::AutoIdVector *props);
JS_FRIEND_API(bool)
GetGeneric(JSContext *cx, JSObject *obj, JSObject *receiver, jsid id, Value *vp);
@ -489,7 +507,7 @@ JS_FRIEND_API(void)
SetPreserveWrapperCallback(JSRuntime *rt, PreserveWrapperCallback callback);
JS_FRIEND_API(bool)
IsObjectInContextCompartment(const JSObject *obj, const JSContext *cx);
IsObjectInContextCompartment(RawObject obj, const JSContext *cx);
/*
* NB: these flag bits are encoded into the bytecode stream in the immediate
@ -542,19 +560,66 @@ GetPCCountScriptContents(JSContext *cx, size_t script);
*
* For more detailed information, see vm/SPSProfiler.h
*/
struct ProfileEntry {
class ProfileEntry
{
/*
* These two fields are marked as 'volatile' so that the compiler doesn't
* re-order instructions which modify them. The operation in question is:
* All fields are marked volatile to prevent the compiler from re-ordering
* instructions. Namely this sequence:
*
* stack[i].string = str;
* (*size)++;
* entry[size] = ...;
* size++;
*
* If the size increment were re-ordered before the store of the string,
* then if sampling occurred there would be a bogus entry on the stack.
* If the size modification were somehow reordered before the stores, then
* if a sample were taken it would be examining bogus information.
*
* A ProfileEntry represents both a C++ profile entry and a JS one. Both use
* the string as a description, but JS uses the sp as NULL to indicate that
* it is a JS entry. The script_ is then only ever examined for a JS entry,
* and the idx is used by both, but with different meanings.
*/
const char * volatile string;
void * volatile sp;
const char * volatile string; // Descriptive string of this entry
void * volatile sp; // Relevant stack pointer for the entry
JSScript * volatile script_; // if js(), non-null script which is running
int32_t volatile idx; // if js(), idx of pc, otherwise line number
public:
/*
* All of these methods are marked with the 'volatile' keyword because SPS's
* representation of the stack is stored such that all ProfileEntry
* instances are volatile. These methods would not be available unless they
* were marked as volatile as well
*/
bool js() volatile {
JS_ASSERT_IF(sp == NULL, script_ != NULL);
return sp == NULL;
}
uint32_t line() volatile { JS_ASSERT(!js()); return idx; }
JSScript *script() volatile { JS_ASSERT(js()); return script_; }
void *stackAddress() volatile { return sp; }
const char *label() volatile { return string; }
void setLine(uint32_t line) volatile { JS_ASSERT(!js()); idx = line; }
void setLabel(const char *string) volatile { this->string = string; }
void setStackAddress(void *sp) volatile { this->sp = sp; }
void setScript(JSScript *script) volatile { script_ = script; }
/* we can't know the layout of JSScript, so look in vm/SPSProfiler.cpp */
JS_FRIEND_API(jsbytecode *) pc() volatile;
JS_FRIEND_API(void) setPC(jsbytecode *pc) volatile;
static size_t offsetOfString() { return offsetof(ProfileEntry, string); }
static size_t offsetOfStackAddress() { return offsetof(ProfileEntry, sp); }
static size_t offsetOfPCIdx() { return offsetof(ProfileEntry, idx); }
static size_t offsetOfScript() { return offsetof(ProfileEntry, script_); }
/*
* The index used in the entry can either be a line number or the offset of
* a pc into a script's code. To signify a NULL pc, use a -1 index. This is
* checked against in pc() and setPC() to set/get the right pc.
*/
static const int32_t NullPCIndex = -1;
};
JS_FRIEND_API(void)
@ -564,6 +629,9 @@ SetRuntimeProfilingStack(JSRuntime *rt, ProfileEntry *stack, uint32_t *size,
JS_FRIEND_API(void)
EnableRuntimeProfilingStack(JSRuntime *rt, bool enabled);
JS_FRIEND_API(jsbytecode*)
ProfilingGetPC(JSRuntime *rt, JSScript *script, void *ip);
#ifdef JS_THREADSAFE
JS_FRIEND_API(void *)
GetOwnerThread(const JSContext *cx);
@ -624,6 +692,7 @@ SizeOfJSContext();
D(DEBUG_GC) \
D(DEBUG_MODE_GC) \
D(TRANSPLANT) \
D(RESET) \
\
/* Reasons from Firefox */ \
D(DOM_WINDOW_UTILS) \
@ -695,7 +764,7 @@ extern JS_FRIEND_API(void)
ShrinkingGC(JSRuntime *rt, gcreason::Reason reason);
extern JS_FRIEND_API(void)
IncrementalGC(JSRuntime *rt, gcreason::Reason reason);
IncrementalGC(JSRuntime *rt, gcreason::Reason reason, int64_t millis = 0);
extern JS_FRIEND_API(void)
FinishIncrementalGC(JSRuntime *rt, gcreason::Reason reason);
@ -733,6 +802,30 @@ typedef void
extern JS_FRIEND_API(GCSliceCallback)
SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback);
typedef void
(* AnalysisPurgeCallback)(JSRuntime *rt, JSFlatString *desc);
extern JS_FRIEND_API(AnalysisPurgeCallback)
SetAnalysisPurgeCallback(JSRuntime *rt, AnalysisPurgeCallback callback);
/* Was the most recent GC run incrementally? */
extern JS_FRIEND_API(bool)
WasIncrementalGC(JSRuntime *rt);
typedef JSBool
(* DOMInstanceClassMatchesProto)(JSHandleObject protoObject, uint32_t protoID,
uint32_t depth);
struct JSDOMCallbacks {
DOMInstanceClassMatchesProto instanceClassMatchesProto;
};
typedef struct JSDOMCallbacks DOMCallbacks;
extern JS_FRIEND_API(void)
SetDOMCallbacks(JSRuntime *rt, const DOMCallbacks *callbacks);
extern JS_FRIEND_API(const DOMCallbacks *)
GetDOMCallbacks(JSRuntime *rt);
/*
* Signals a good place to do an incremental slice, because the browser is
* drawing a frame.
@ -753,7 +846,7 @@ extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSContext *cx);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnObject(JSObject *obj);
IsIncrementalBarrierNeededOnObject(RawObject obj);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnScript(JSScript *obj);
@ -821,21 +914,68 @@ CastToJSFreeOp(FreeOp *fop)
/* Implemented in jsexn.cpp. */
/*
* Get an error type name from a number.
* If no exception is associated, return NULL.
* Get an error type name from a JSExnType constant.
* Returns NULL for invalid arguments and JSEXN_INTERNALERR
*/
extern JS_FRIEND_API(const jschar*)
GetErrorTypeNameFromNumber(JSContext* cx, const unsigned errorNumber);
GetErrorTypeName(JSContext* cx, int16_t exnType);
/* Implemented in jswrapper.cpp. */
typedef enum NukedGlobalHandling {
NukeForGlobalObject,
DontNukeForGlobalObject
} NukedGlobalHandling;
typedef enum NukeReferencesToWindow {
NukeWindowReferences,
DontNukeWindowReferences
} NukeReferencesToWindow;
/*
* These filters are designed to be ephemeral stack classes, and thus don't
* do any rooting or holding of their members.
*/
struct CompartmentFilter {
virtual bool match(JSCompartment *c) const = 0;
};
struct AllCompartments : public CompartmentFilter {
virtual bool match(JSCompartment *c) const { return true; }
};
struct ContentCompartmentsOnly : public CompartmentFilter {
virtual bool match(JSCompartment *c) const {
return !IsSystemCompartment(c);
}
};
struct ChromeCompartmentsOnly : public CompartmentFilter {
virtual bool match(JSCompartment *c) const {
return IsSystemCompartment(c);
}
};
struct SingleCompartment : public CompartmentFilter {
JSCompartment *ours;
SingleCompartment(JSCompartment *c) : ours(c) {}
virtual bool match(JSCompartment *c) const { return c == ours; }
};
struct CompartmentsWithPrincipals : public CompartmentFilter {
JSPrincipals *principals;
CompartmentsWithPrincipals(JSPrincipals *p) : principals(p) {}
virtual bool match(JSCompartment *c) const {
return JS_GetCompartmentPrincipals(c) == principals;
}
};
extern JS_FRIEND_API(JSBool)
NukeChromeCrossCompartmentWrappersForGlobal(JSContext *cx, JSObject *obj,
NukedGlobalHandling nukeGlobal);
NukeCrossCompartmentWrappers(JSContext* cx,
const CompartmentFilter& sourceFilter,
const CompartmentFilter& targetFilter,
NukeReferencesToWindow nukeReferencesToWindow);
/* Specify information about ListBase proxies in the DOM, for use by ICs. */
JS_FRIEND_API(void)
SetListBaseInformation(void *listBaseHandlerFamily, uint32_t listBaseExpandoSlot);
void *GetListBaseHandlerFamily();
uint32_t GetListBaseExpandoSlot();
} /* namespace js */
@ -851,7 +991,7 @@ extern JS_FRIEND_API(JSBool)
js_DateIsValid(JSContext *cx, JSObject* obj);
extern JS_FRIEND_API(double)
js_DateGetMsecSinceEpoch(JSContext *cx, JSObject *obj);
js_DateGetMsecSinceEpoch(JSContext *cx, JSRawObject obj);
/* Implemented in jscntxt.cpp. */
@ -1043,6 +1183,35 @@ JS_IsFloat32Array(JSObject *obj, JSContext *cx);
extern JS_FRIEND_API(JSBool)
JS_IsFloat64Array(JSObject *obj, JSContext *cx);
/*
* Unwrap Typed arrays all at once. Return NULL without throwing if the object
* cannot be viewed as the correct typed array, or the typed array object on
* success, filling both outparameters.
*/
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsInt8Array(JSContext *cx, JSObject *obj, uint32_t *length, int8_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsUint8Array(JSContext *cx, JSObject *obj, uint32_t *length, uint8_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsUint8ClampedArray(JSContext *cx, JSObject *obj, uint32_t *length, uint8_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsInt16Array(JSContext *cx, JSObject *obj, uint32_t *length, int16_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsUint16Array(JSContext *cx, JSObject *obj, uint32_t *length, uint16_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsInt32Array(JSContext *cx, JSObject *obj, uint32_t *length, int32_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsUint32Array(JSContext *cx, JSObject *obj, uint32_t *length, uint32_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsFloat32Array(JSContext *cx, JSObject *obj, uint32_t *length, float **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsFloat64Array(JSContext *cx, JSObject *obj, uint32_t *length, double **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsArrayBufferView(JSContext *cx, JSObject *obj, uint32_t *length, uint8_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsArrayBuffer(JSContext *cx, JSObject *obj, uint32_t *length, uint8_t **data);
/*
* Get the type of elements in a typed array.
*
@ -1213,4 +1382,42 @@ JS_GetDataViewByteLength(JSObject *obj, JSContext *cx);
JS_FRIEND_API(void *)
JS_GetDataViewData(JSObject *obj, JSContext *cx);
#ifdef __cplusplus
/*
* This struct contains metadata passed from the DOM to the JS Engine for JIT
* optimizations on DOM property accessors. Eventually, this should be made
* available to general JSAPI users, but we are not currently ready to do so.
*/
typedef bool
(* JSJitPropertyOp)(JSContext *cx, JSHandleObject thisObj,
void *specializedThis, JS::Value *vp);
typedef bool
(* JSJitMethodOp)(JSContext *cx, JSHandleObject thisObj,
void *specializedThis, unsigned argc, JS::Value *vp);
struct JSJitInfo {
JSJitPropertyOp op;
uint32_t protoID;
uint32_t depth;
bool isInfallible; /* Is op fallible? Getters only */
bool isConstant; /* Getting a construction-time constant? */
};
static JS_ALWAYS_INLINE const JSJitInfo *
FUNCTION_VALUE_TO_JITINFO(const JS::Value& v)
{
JS_ASSERT(js::GetObjectClass(&v.toObject()) == &js::FunctionClass);
return reinterpret_cast<js::shadow::Function *>(&v.toObject())->jitinfo;
}
static JS_ALWAYS_INLINE void
SET_JITINFO(JSFunction * func, const JSJitInfo *info)
{
js::shadow::Function *fun = reinterpret_cast<js::shadow::Function *>(func);
/* JS_ASSERT(func->isNative()). 0x4000 is JSFUN_INTERPRETED */
JS_ASSERT(!(fun->flags & 0x4000));
fun->jitinfo = info;
}
#endif /* __cplusplus */
#endif /* jsfriendapi_h___ */

View File

@ -41,6 +41,7 @@ namespace js {
class GCHelperThread;
struct Shape;
struct SliceBudget;
namespace gc {
@ -48,6 +49,8 @@ enum State {
NO_INCREMENTAL,
MARK_ROOTS,
MARK,
SWEEP,
SWEEP_END,
INVALID
};
@ -112,36 +115,100 @@ MapAllocToTraceKind(AllocKind thingKind)
return map[thingKind];
}
static inline bool
IsNurseryAllocable(AllocKind kind)
{
JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT);
static const bool map[FINALIZE_LIMIT] = {
false, /* FINALIZE_OBJECT0 */
true, /* FINALIZE_OBJECT0_BACKGROUND */
false, /* FINALIZE_OBJECT2 */
true, /* FINALIZE_OBJECT2_BACKGROUND */
false, /* FINALIZE_OBJECT4 */
true, /* FINALIZE_OBJECT4_BACKGROUND */
false, /* FINALIZE_OBJECT8 */
true, /* FINALIZE_OBJECT8_BACKGROUND */
false, /* FINALIZE_OBJECT12 */
true, /* FINALIZE_OBJECT12_BACKGROUND */
false, /* FINALIZE_OBJECT16 */
true, /* FINALIZE_OBJECT16_BACKGROUND */
false, /* FINALIZE_SCRIPT */
false, /* FINALIZE_SHAPE */
false, /* FINALIZE_BASE_SHAPE */
false, /* FINALIZE_TYPE_OBJECT */
#if JS_HAS_XML_SUPPORT
false, /* FINALIZE_XML */
#endif
true, /* FINALIZE_SHORT_STRING */
true, /* FINALIZE_STRING */
false /* FINALIZE_EXTERNAL_STRING */
};
return map[kind];
}
static inline bool
IsBackgroundFinalized(AllocKind kind)
{
JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT);
static const bool map[FINALIZE_LIMIT] = {
false, /* FINALIZE_OBJECT0 */
true, /* FINALIZE_OBJECT0_BACKGROUND */
false, /* FINALIZE_OBJECT2 */
true, /* FINALIZE_OBJECT2_BACKGROUND */
false, /* FINALIZE_OBJECT4 */
true, /* FINALIZE_OBJECT4_BACKGROUND */
false, /* FINALIZE_OBJECT8 */
true, /* FINALIZE_OBJECT8_BACKGROUND */
false, /* FINALIZE_OBJECT12 */
true, /* FINALIZE_OBJECT12_BACKGROUND */
false, /* FINALIZE_OBJECT16 */
true, /* FINALIZE_OBJECT16_BACKGROUND */
false, /* FINALIZE_SCRIPT */
false, /* FINALIZE_SHAPE */
false, /* FINALIZE_BASE_SHAPE */
false, /* FINALIZE_TYPE_OBJECT */
#if JS_HAS_XML_SUPPORT
false, /* FINALIZE_XML */
#endif
true, /* FINALIZE_SHORT_STRING */
true, /* FINALIZE_STRING */
false /* FINALIZE_EXTERNAL_STRING */
};
return map[kind];
}
inline JSGCTraceKind
GetGCThingTraceKind(const void *thing);
/*
* ArenaList::head points to the start of the list. Normally cursor points
* to the first arena in the list with some free things and all arenas
* before cursor are fully allocated. However, as the arena currently being
* allocated from is considered full while its list of free spans is moved
* into the freeList, during the GC or cell enumeration, when an
* unallocated freeList is moved back to the arena, we can see an arena
* with some free cells before the cursor. The cursor is an indirect
* pointer to allow for efficient list insertion at the cursor point and
* other list manipulations.
*/
struct ArenaList {
ArenaHeader *head;
ArenaHeader **cursor;
ArenaList() {
clear();
}
void clear() {
head = NULL;
cursor = &head;
}
void insert(ArenaHeader *arena);
};
struct ArenaLists {
/*
* ArenaList::head points to the start of the list. Normally cursor points
* to the first arena in the list with some free things and all arenas
* before cursor are fully allocated. However, as the arena currently being
* allocated from is considered full while its list of free spans is moved
* into the freeList, during the GC or cell enumeration, when an
* unallocated freeList is moved back to the arena, we can see an arena
* with some free cells before the cursor. The cursor is an indirect
* pointer to allow for efficient list insertion at the cursor point and
* other list manipulations.
*/
struct ArenaList {
ArenaHeader *head;
ArenaHeader **cursor;
ArenaList() {
clear();
}
void clear() {
head = NULL;
cursor = &head;
}
};
private:
/*
* For each arena kind its free list is represented as the first span with
@ -180,12 +247,18 @@ struct ArenaLists {
volatile uintptr_t backgroundFinalizeState[FINALIZE_LIMIT];
public:
/* For each arena kind, a list of arenas remaining to be swept. */
ArenaHeader *arenaListsToSweep[FINALIZE_LIMIT];
public:
ArenaLists() {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
freeLists[i].initAsEmpty();
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
backgroundFinalizeState[i] = BFS_DONE;
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
arenaListsToSweep[i] = NULL;
}
~ArenaLists() {
@ -211,6 +284,10 @@ struct ArenaLists {
return arenaLists[thingKind].head;
}
ArenaHeader *getFirstArenaToSweep(AllocKind thingKind) const {
return arenaListsToSweep[thingKind];
}
bool arenaListsAreEmpty() const {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
/*
@ -225,6 +302,10 @@ struct ArenaLists {
return true;
}
bool arenasAreFull(AllocKind thingKind) const {
return !*arenaLists[thingKind].cursor;
}
void unmarkAll() {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
/* The background finalization must have stopped at this point. */
@ -238,7 +319,8 @@ struct ArenaLists {
}
bool doneBackgroundFinalize(AllocKind kind) const {
return backgroundFinalizeState[kind] == BFS_DONE;
return backgroundFinalizeState[kind] == BFS_DONE ||
backgroundFinalizeState[kind] == BFS_JUST_FINISHED;
}
/*
@ -333,16 +415,18 @@ struct ArenaLists {
JS_ASSERT(freeLists[kind].isEmpty());
}
void finalizeObjects(FreeOp *fop);
void finalizeStrings(FreeOp *fop);
void finalizeShapes(FreeOp *fop);
void finalizeScripts(FreeOp *fop);
void queueObjectsForSweep(FreeOp *fop);
void queueStringsForSweep(FreeOp *fop);
void queueShapesForSweep(FreeOp *fop);
void queueScriptsForSweep(FreeOp *fop);
static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead);
bool foregroundFinalize(FreeOp *fop, AllocKind thingKind, SliceBudget &sliceBudget);
static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead, bool onBackgroundThread);
private:
inline void finalizeNow(FreeOp *fop, AllocKind thingKind);
inline void finalizeLater(FreeOp *fop, AllocKind thingKind);
inline void queueForForegroundSweep(FreeOp *fop, AllocKind thingKind);
inline void queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind);
inline void *allocateFromArena(JSCompartment *comp, AllocKind thingKind);
};
@ -478,7 +562,7 @@ extern void
GC(JSRuntime *rt, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, js::gcreason::Reason reason);
GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, js::gcreason::Reason reason, int64_t millis = 0);
extern void
GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, js::gcreason::Reason reason);
@ -539,8 +623,6 @@ class GCHelperThread {
void **freeCursor;
void **freeCursorEnd;
Vector<js::gc::ArenaHeader *, 64, js::SystemAllocPolicy> finalizeVector;
bool backgroundAllocation;
friend struct js::gc::ArenaLists;
@ -584,10 +666,10 @@ class GCHelperThread {
/* Must be called with the GC lock taken. */
void startBackgroundShrink();
/* Must be called with the GC lock taken. */
/* Must be called without the GC lock taken. */
void waitBackgroundSweepEnd();
/* Must be called with the GC lock taken. */
/* Must be called without the GC lock taken. */
void waitBackgroundSweepOrAllocEnd();
/* Must be called with the GC lock taken. */
@ -625,9 +707,6 @@ class GCHelperThread {
else
replenishAndFreeLater(ptr);
}
/* Must be called with the GC lock taken. */
bool prepareForBackgroundSweep();
};
@ -1071,22 +1150,33 @@ RunDebugGC(JSContext *cx);
void
SetDeterministicGC(JSContext *cx, bool enabled);
void
SetValidateGC(JSContext *cx, bool enabled);
const int ZealPokeValue = 1;
const int ZealAllocValue = 2;
const int ZealFrameGCValue = 3;
const int ZealVerifierValue = 4;
const int ZealFrameVerifierValue = 5;
const int ZealVerifierPreValue = 4;
const int ZealFrameVerifierPreValue = 5;
const int ZealStackRootingSafeValue = 6;
const int ZealStackRootingValue = 7;
const int ZealIncrementalRootsThenFinish = 8;
const int ZealIncrementalMarkAllThenFinish = 9;
const int ZealIncrementalMultipleSlices = 10;
const int ZealVerifierPostValue = 11;
const int ZealFrameVerifierPostValue = 12;
const int ZealPurgeAnalysisValue = 13;
enum VerifierType {
PreBarrierVerifier,
PostBarrierVerifier
};
#ifdef JS_GC_ZEAL
/* Check that write barriers have been used correctly. See jsgc.cpp. */
void
VerifyBarriers(JSRuntime *rt);
VerifyBarriers(JSRuntime *rt, VerifierType type);
void
MaybeVerifyBarriers(JSContext *cx, bool always = false);
@ -1094,7 +1184,7 @@ MaybeVerifyBarriers(JSContext *cx, bool always = false);
#else
static inline void
VerifyBarriers(JSRuntime *rt)
VerifyBarriers(JSRuntime *rt, VerifierType type)
{
}

View File

@ -1,120 +0,0 @@
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef jshash_h___
#define jshash_h___
/*
* API to portable hash table code.
*/
#include <stddef.h>
#include <stdio.h>
#include "jstypes.h"
JS_BEGIN_EXTERN_C
typedef uint32_t JSHashNumber;
typedef struct JSHashEntry JSHashEntry;
typedef struct JSHashTable JSHashTable;
#define JS_HASH_BITS 32
#define JS_GOLDEN_RATIO 0x9E3779B9U
typedef JSHashNumber (* JSHashFunction)(const void *key);
typedef int (* JSHashComparator)(const void *v1, const void *v2);
typedef int (* JSHashEnumerator)(JSHashEntry *he, int i, void *arg);
/* Flag bits in JSHashEnumerator's return value */
#define HT_ENUMERATE_NEXT 0 /* continue enumerating entries */
#define HT_ENUMERATE_STOP 1 /* stop enumerating entries */
#define HT_ENUMERATE_REMOVE 2 /* remove and free the current entry */
typedef struct JSHashAllocOps {
void * (*allocTable)(void *pool, size_t size);
void (*freeTable)(void *pool, void *item, size_t size);
JSHashEntry * (*allocEntry)(void *pool, const void *key);
void (*freeEntry)(void *pool, JSHashEntry *he, unsigned flag);
} JSHashAllocOps;
#define HT_FREE_VALUE 0 /* just free the entry's value */
#define HT_FREE_ENTRY 1 /* free value and entire entry */
struct JSHashEntry {
JSHashEntry *next; /* hash chain linkage */
JSHashNumber keyHash; /* key hash function result */
const void *key; /* ptr to opaque key */
void *value; /* ptr to opaque value */
};
struct JSHashTable {
JSHashEntry **buckets; /* vector of hash buckets */
uint32_t nentries; /* number of entries in table */
uint32_t shift; /* multiplicative hash shift */
JSHashFunction keyHash; /* key hash function */
JSHashComparator keyCompare; /* key comparison function */
JSHashComparator valueCompare; /* value comparison function */
JSHashAllocOps *allocOps; /* allocation operations */
void *allocPriv; /* allocation private data */
#ifdef JS_HASHMETER
uint32_t nlookups; /* total number of lookups */
uint32_t nsteps; /* number of hash chains traversed */
uint32_t ngrows; /* number of table expansions */
uint32_t nshrinks; /* number of table contractions */
#endif
};
/*
* Create a new hash table.
* If allocOps is null, use default allocator ops built on top of malloc().
*/
extern JS_PUBLIC_API(JSHashTable *)
JS_NewHashTable(uint32_t n, JSHashFunction keyHash,
JSHashComparator keyCompare, JSHashComparator valueCompare,
JSHashAllocOps *allocOps, void *allocPriv);
extern JS_PUBLIC_API(void)
JS_HashTableDestroy(JSHashTable *ht);
/* Low level access methods */
extern JS_PUBLIC_API(JSHashEntry **)
JS_HashTableRawLookup(JSHashTable *ht, JSHashNumber keyHash, const void *key);
#ifdef __cplusplus
extern JS_PUBLIC_API(JSHashEntry *)
JS_HashTableRawAdd(JSHashTable *ht, JSHashEntry **&hep, JSHashNumber keyHash,
const void *key, void *value);
#endif
extern JS_PUBLIC_API(void)
JS_HashTableRawRemove(JSHashTable *ht, JSHashEntry **hep, JSHashEntry *he);
/* Higher level access methods */
extern JS_PUBLIC_API(JSHashEntry *)
JS_HashTableAdd(JSHashTable *ht, const void *key, void *value);
extern JS_PUBLIC_API(JSBool)
JS_HashTableRemove(JSHashTable *ht, const void *key);
extern JS_PUBLIC_API(int)
JS_HashTableEnumerateEntries(JSHashTable *ht, JSHashEnumerator f, void *arg);
extern JS_PUBLIC_API(void *)
JS_HashTableLookup(JSHashTable *ht, const void *key);
extern JS_PUBLIC_API(int)
JS_HashTableDump(JSHashTable *ht, JSHashEnumerator dump, FILE *fp);
/* General-purpose C string hash function. */
extern JS_PUBLIC_API(JSHashNumber)
JS_HashString(const void *key);
/* Stub function just returns v1 == v2 */
extern JS_PUBLIC_API(int)
JS_CompareValues(const void *v1, const void *v2);
JS_END_EXTERN_C
#endif /* jshash_h___ */

View File

@ -16,10 +16,10 @@
# include "prthread.h"
# include "prinit.h"
# define JS_ATOMIC_INCREMENT(p) PR_ATOMIC_INCREMENT((PRInt32 *)(p))
# define JS_ATOMIC_DECREMENT(p) PR_ATOMIC_DECREMENT((PRInt32 *)(p))
# define JS_ATOMIC_ADD(p,v) PR_ATOMIC_ADD((PRInt32 *)(p), (PRInt32)(v))
# define JS_ATOMIC_SET(p,v) PR_ATOMIC_SET((PRInt32 *)(p), (PRInt32)(v))
# define JS_ATOMIC_INCREMENT(p) PR_ATOMIC_INCREMENT((int32_t *)(p))
# define JS_ATOMIC_DECREMENT(p) PR_ATOMIC_DECREMENT((int32_t *)(p))
# define JS_ATOMIC_ADD(p,v) PR_ATOMIC_ADD((int32_t *)(p), (int32_t)(v))
# define JS_ATOMIC_SET(p,v) PR_ATOMIC_SET((int32_t *)(p), (int32_t)(v))
#else /* JS_THREADSAFE */

View File

@ -7,6 +7,7 @@
#include "jsprvtd.h"
#include "jspubtd.h"
#include "jsapi.h"
#include "js/Vector.h"
@ -18,7 +19,7 @@ js_InitJSONClass(JSContext *cx, JSObject *obj);
extern JSBool
js_Stringify(JSContext *cx, js::MutableHandleValue vp,
JSObject *replacer, js::Value space,
JSObject *replacer, js::Value space,
js::StringBuffer &sb);
// Avoid build errors on certain platforms that define these names as constants
@ -37,8 +38,8 @@ enum DecodingMode { STRICT, LEGACY };
namespace js {
extern JS_FRIEND_API(JSBool)
ParseJSONWithReviver(JSContext *cx, const jschar *chars, size_t length, const Value &filter,
Value *vp, DecodingMode decodingMode = STRICT);
ParseJSONWithReviver(JSContext *cx, const jschar *chars, size_t length, HandleValue filter,
MutableHandleValue vp, DecodingMode decodingMode = STRICT);
} /* namespace js */

View File

@ -115,7 +115,7 @@ class JS_FRIEND_API(PerfMeasurement)
* global object). The JS-visible API is identical to the C++ API.
*/
extern JS_FRIEND_API(JSObject*)
RegisterPerfMeasurement(JSContext *cx, JSObject *global);
RegisterPerfMeasurement(JSContext *cx, JSRawObject global);
/*
* Given a jsval which contains an instance of the aforementioned

View File

@ -64,6 +64,7 @@ JS_PROTO(WeakMap, 36, js_InitWeakMapClass)
JS_PROTO(Map, 37, js_InitMapClass)
JS_PROTO(Set, 38, js_InitSetClass)
JS_PROTO(DataView, 39, js_InitTypedArrayClasses)
JS_PROTO(ParallelArray, 40, js_InitParallelArrayClass)
#undef XML_INIT
#undef NAMESPACE_INIT

View File

@ -13,7 +13,7 @@
namespace js {
class Wrapper;
class JS_FRIEND_API(Wrapper);
/*
* A proxy is a JSObject that implements generic behavior by providing custom
@ -48,10 +48,19 @@ class Wrapper;
*/
class JS_FRIEND_API(BaseProxyHandler) {
void *mFamily;
bool mHasPrototype;
protected:
// Subclasses may set this in their constructor.
void setHasPrototype(bool hasPrototype) { mHasPrototype = hasPrototype; };
public:
explicit BaseProxyHandler(void *family);
virtual ~BaseProxyHandler();
bool hasPrototype() {
return mHasPrototype;
}
inline void *family() {
return mFamily;
}
@ -102,7 +111,7 @@ class JS_FRIEND_API(BaseProxyHandler) {
/* Spidermonkey extensions. */
virtual bool call(JSContext *cx, JSObject *proxy, unsigned argc, Value *vp);
virtual bool construct(JSContext *cx, JSObject *proxy, unsigned argc, Value *argv, Value *rval);
virtual bool nativeCall(JSContext *cx, JSObject *proxy, Class *clasp, Native native, CallArgs args);
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl, CallArgs args);
virtual bool hasInstance(JSContext *cx, JSObject *proxy, const Value *vp, bool *bp);
virtual JSType typeOf(JSContext *cx, JSObject *proxy);
virtual bool objectClassIs(JSObject *obj, ESClassValue classValue, JSContext *cx);
@ -114,6 +123,10 @@ class JS_FRIEND_API(BaseProxyHandler) {
virtual void finalize(JSFreeOp *fop, JSObject *proxy);
virtual bool getElementIfPresent(JSContext *cx, JSObject *obj, JSObject *receiver,
uint32_t index, Value *vp, bool *present);
virtual bool getPrototypeOf(JSContext *cx, JSObject *proxy, JSObject **proto);
/* See comment for weakmapKeyDelegateOp in jsclass.h. */
virtual JSObject *weakmapKeyDelegate(JSObject *proxy);
};
/*
@ -150,8 +163,8 @@ class JS_PUBLIC_API(IndirectProxyHandler) : public BaseProxyHandler {
Value *vp) MOZ_OVERRIDE;
virtual bool construct(JSContext *cx, JSObject *proxy, unsigned argc,
Value *argv, Value *rval) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, JSObject *proxy, Class *clasp,
Native native, CallArgs args) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl,
CallArgs args) MOZ_OVERRIDE;
virtual bool hasInstance(JSContext *cx, JSObject *proxy, const Value *vp,
bool *bp) MOZ_OVERRIDE;
virtual JSType typeOf(JSContext *cx, JSObject *proxy) MOZ_OVERRIDE;
@ -166,6 +179,7 @@ class JS_PUBLIC_API(IndirectProxyHandler) : public BaseProxyHandler {
Value *vp) MOZ_OVERRIDE;
virtual bool iteratorNext(JSContext *cx, JSObject *proxy,
Value *vp) MOZ_OVERRIDE;
virtual JSObject *weakmapKeyDelegate(JSObject *proxy);
};
/*
@ -215,18 +229,18 @@ class Proxy {
/* ES5 Harmony derived proxy traps. */
static bool has(JSContext *cx, JSObject *proxy, jsid id, bool *bp);
static bool hasOwn(JSContext *cx, JSObject *proxy, jsid id, bool *bp);
static bool get(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id, Value *vp);
static bool getElementIfPresent(JSContext *cx, JSObject *proxy, JSObject *receiver,
uint32_t index, Value *vp, bool *present);
static bool set(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id, bool strict,
Value *vp);
static bool get(JSContext *cx, HandleObject proxy, HandleObject receiver, HandleId id, MutableHandleValue vp);
static bool getElementIfPresent(JSContext *cx, HandleObject proxy, HandleObject receiver,
uint32_t index, MutableHandleValue vp, bool *present);
static bool set(JSContext *cx, HandleObject proxy, HandleObject receiver, HandleId id, bool strict,
MutableHandleValue vp);
static bool keys(JSContext *cx, JSObject *proxy, AutoIdVector &props);
static bool iterate(JSContext *cx, JSObject *proxy, unsigned flags, Value *vp);
static bool iterate(JSContext *cx, HandleObject proxy, unsigned flags, MutableHandleValue vp);
/* Spidermonkey extensions. */
static bool call(JSContext *cx, JSObject *proxy, unsigned argc, Value *vp);
static bool construct(JSContext *cx, JSObject *proxy, unsigned argc, Value *argv, Value *rval);
static bool nativeCall(JSContext *cx, JSObject *proxy, Class *clasp, Native native, CallArgs args);
static bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl, CallArgs args);
static bool hasInstance(JSContext *cx, JSObject *proxy, const Value *vp, bool *bp);
static JSType typeOf(JSContext *cx, JSObject *proxy);
static bool objectClassIs(JSObject *obj, ESClassValue classValue, JSContext *cx);
@ -247,17 +261,17 @@ inline bool IsFunctionProxyClass(const Class *clasp)
return clasp == &js::FunctionProxyClass;
}
inline bool IsObjectProxy(const JSObject *obj)
inline bool IsObjectProxy(RawObject obj)
{
return IsObjectProxyClass(GetObjectClass(obj));
}
inline bool IsFunctionProxy(const JSObject *obj)
inline bool IsFunctionProxy(RawObject obj)
{
return IsFunctionProxyClass(GetObjectClass(obj));
}
inline bool IsProxy(const JSObject *obj)
inline bool IsProxy(RawObject obj)
{
Class *clasp = GetObjectClass(obj);
return IsObjectProxyClass(clasp) || IsFunctionProxyClass(clasp);
@ -272,56 +286,56 @@ const uint32_t JSSLOT_PROXY_CALL = 4;
const uint32_t JSSLOT_PROXY_CONSTRUCT = 5;
inline BaseProxyHandler *
GetProxyHandler(const JSObject *obj)
GetProxyHandler(RawObject obj)
{
JS_ASSERT(IsProxy(obj));
return (BaseProxyHandler *) GetReservedSlot(obj, JSSLOT_PROXY_HANDLER).toPrivate();
}
inline const Value &
GetProxyPrivate(const JSObject *obj)
GetProxyPrivate(RawObject obj)
{
JS_ASSERT(IsProxy(obj));
return GetReservedSlot(obj, JSSLOT_PROXY_PRIVATE);
}
inline JSObject *
GetProxyTargetObject(const JSObject *obj)
GetProxyTargetObject(RawObject obj)
{
JS_ASSERT(IsProxy(obj));
return GetProxyPrivate(obj).toObjectOrNull();
}
inline const Value &
GetProxyCall(const JSObject *obj)
GetProxyCall(RawObject obj)
{
JS_ASSERT(IsFunctionProxy(obj));
return GetReservedSlot(obj, JSSLOT_PROXY_CALL);
}
inline const Value &
GetProxyExtra(const JSObject *obj, size_t n)
GetProxyExtra(RawObject obj, size_t n)
{
JS_ASSERT(IsProxy(obj));
return GetReservedSlot(obj, JSSLOT_PROXY_EXTRA + n);
}
inline void
SetProxyHandler(JSObject *obj, BaseProxyHandler *handler)
SetProxyHandler(RawObject obj, BaseProxyHandler *handler)
{
JS_ASSERT(IsProxy(obj));
SetReservedSlot(obj, JSSLOT_PROXY_HANDLER, PrivateValue(handler));
}
inline void
SetProxyPrivate(JSObject *obj, const Value &value)
SetProxyPrivate(RawObject obj, const Value &value)
{
JS_ASSERT(IsProxy(obj));
SetReservedSlot(obj, JSSLOT_PROXY_PRIVATE, value);
}
inline void
SetProxyExtra(JSObject *obj, size_t n, const Value &extra)
SetProxyExtra(RawObject obj, size_t n, const Value &extra)
{
JS_ASSERT(IsProxy(obj));
JS_ASSERT(n <= 1);

View File

@ -82,7 +82,6 @@ class JSExtensibleString;
class JSExternalString;
class JSLinearString;
class JSFixedString;
class JSStaticAtom;
class JSRope;
class JSAtom;
class JSWrapper;
@ -131,24 +130,10 @@ class StackSpace;
class ContextStack;
class ScriptFrameIter;
struct BytecodeEmitter;
struct Definition;
struct FunctionBox;
struct ObjectBox;
struct ParseNode;
struct Parser;
struct SharedContext;
class TokenStream;
struct Token;
struct TokenPos;
struct TokenPtr;
struct TreeContext;
class UpvarCookie;
class Proxy;
class BaseProxyHandler;
class DirectWrapper;
class CrossCompartmentWrapper;
class JS_FRIEND_API(BaseProxyHandler);
class JS_FRIEND_API(DirectWrapper);
class JS_FRIEND_API(CrossCompartmentWrapper);
class TempAllocPolicy;
class RuntimeAllocPolicy;
@ -172,13 +157,6 @@ class Bindings;
struct StackBaseShape;
struct StackShape;
class MultiDeclRange;
class ParseMapPool;
class DefinitionList;
typedef InlineMap<JSAtom *, Definition *, 24> AtomDefnMap;
typedef InlineMap<JSAtom *, jsatomid, 24> AtomIndexMap;
typedef Vector<UpvarCookie, 8> UpvarCookies;
class Breakpoint;
class BreakpointSite;
class Debugger;
@ -197,6 +175,22 @@ typedef JSPropertyOp PropertyOp;
typedef JSStrictPropertyOp StrictPropertyOp;
typedef JSPropertyDescriptor PropertyDescriptor;
namespace frontend {
struct BytecodeEmitter;
struct Definition;
struct FunctionBox;
struct ObjectBox;
struct Token;
struct TokenPos;
struct TokenPtr;
class TokenStream;
struct Parser;
class ParseMapPool;
struct ParseNode;
} /* namespace frontend */
namespace analyze {
struct LifetimeVariable;

View File

@ -240,6 +240,21 @@ enum ThingRootKind
THING_ROOT_LIMIT
};
template <typename T>
struct RootKind;
/*
* Specifically mark the ThingRootKind of externally visible types, so that
* JSAPI users may use JSRooted... types without having the class definition
* available.
*/
template <> struct RootKind<JSObject *> { static ThingRootKind rootKind() { return THING_ROOT_OBJECT; }; };
template <> struct RootKind<JSFunction *> { static ThingRootKind rootKind() { return THING_ROOT_OBJECT; }; };
template <> struct RootKind<JSString *> { static ThingRootKind rootKind() { return THING_ROOT_STRING; }; };
template <> struct RootKind<JSScript *> { static ThingRootKind rootKind() { return THING_ROOT_SCRIPT; }; };
template <> struct RootKind<jsid> { static ThingRootKind rootKind() { return THING_ROOT_ID; }; };
template <> struct RootKind<Value> { static ThingRootKind rootKind() { return THING_ROOT_VALUE; }; };
struct ContextFriendFields {
JSRuntime *const runtime;

View File

@ -147,8 +147,6 @@
***********************************************************************/
#define JS_HOWMANY(x,y) (((x)+(y)-1)/(y))
#define JS_ROUNDUP(x,y) (JS_HOWMANY(x,y)*(y))
#define JS_MIN(x,y) ((x)<(y)?(x):(y))
#define JS_MAX(x,y) ((x)>(y)?(x):(y))
#include "jscpucfg.h"

View File

@ -15,6 +15,10 @@
#include "js/Utility.h"
#ifdef USE_ZLIB
#include "zlib.h"
#endif
/* Forward declarations. */
struct JSContext;
@ -335,41 +339,43 @@ ClearAllBitArrayElements(size_t *array, size_t length)
array[i] = 0;
}
} /* namespace js */
#endif /* __cplusplus */
#ifdef USE_ZLIB
class Compressor
{
/* Number of bytes we should hand to zlib each compressMore() call. */
static const size_t CHUNKSIZE = 2048;
z_stream zs;
const unsigned char *inp;
size_t inplen;
public:
Compressor(const unsigned char *inp, size_t inplen, unsigned char *out)
: inp(inp),
inplen(inplen)
{
JS_ASSERT(inplen > 0);
zs.opaque = NULL;
zs.next_in = (Bytef *)inp;
zs.avail_in = 0;
zs.next_out = out;
zs.avail_out = inplen;
}
bool init();
/* Compress some of the input. Return true if it should be called again. */
bool compressMore();
/* Finalize compression. Return the length of the compressed input. */
size_t finish();
};
/*
* JS_ROTATE_LEFT32
*
* There is no rotate operation in the C Language so the construct (a << 4) |
* (a >> 28) is used instead. Most compilers convert this to a rotate
* instruction but some versions of MSVC don't without a little help. To get
* MSVC to generate a rotate instruction, we have to use the _rotl intrinsic
* and use a pragma to make _rotl inline.
*
* MSVC in VS2005 will do an inline rotate instruction on the above construct.
* Decompress a string. The caller must know the length of the output and
* allocate |out| to a string of that length.
*/
#if defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_AMD64) || \
defined(_M_X64))
#include <stdlib.h>
#pragma intrinsic(_rotl)
#define JS_ROTATE_LEFT32(a, bits) _rotl(a, bits)
#else
#define JS_ROTATE_LEFT32(a, bits) (((a) << (bits)) | ((a) >> (32 - (bits))))
bool DecompressString(const unsigned char *inp, size_t inplen,
unsigned char *out, size_t outlen);
#endif
/* Static control-flow checks. */
#ifdef NS_STATIC_CHECKING
/* Trigger a control flow check to make sure that code flows through label */
inline __attribute__ ((unused)) void MUST_FLOW_THROUGH(const char *label) {}
/* Avoid unused goto-label warnings. */
# define MUST_FLOW_LABEL(label) goto label; label:
#else
# define MUST_FLOW_THROUGH(label) ((void) 0)
# define MUST_FLOW_LABEL(label)
#endif
} /* namespace js */
#endif /* __cplusplus */
/* Crash diagnostics */
#ifdef DEBUG

View File

@ -218,6 +218,7 @@ typedef enum JSWhyMagic
JS_OVERWRITTEN_CALLEE, /* arguments.callee has been overwritten */
JS_FORWARD_TO_CALL_OBJECT, /* args object element stored in call object */
JS_BLOCK_NEEDS_CLONE, /* value of static block object slot */
JS_HASH_KEY_EMPTY, /* see class js::HashableValue */
JS_GENERIC_MAGIC /* for local use */
} JSWhyMagic;

View File

@ -64,9 +64,9 @@ class JS_FRIEND_API(Wrapper)
static JSObject *New(JSContext *cx, JSObject *obj, JSObject *proto,
JSObject *parent, Wrapper *handler);
static Wrapper *wrapperHandler(const JSObject *wrapper);
static Wrapper *wrapperHandler(RawObject wrapper);
static JSObject *wrappedObject(const JSObject *wrapper);
static JSObject *wrappedObject(RawObject wrapper);
explicit Wrapper(unsigned flags);
@ -166,7 +166,7 @@ class JS_FRIEND_API(IndirectWrapper) : public Wrapper,
class JS_FRIEND_API(DirectWrapper) : public Wrapper, public DirectProxyHandler
{
public:
explicit DirectWrapper(unsigned flags);
explicit DirectWrapper(unsigned flags, bool hasPrototype = false);
virtual ~DirectWrapper();
@ -206,7 +206,8 @@ class JS_FRIEND_API(DirectWrapper) : public Wrapper, public DirectProxyHandler
/* Spidermonkey extensions. */
virtual bool call(JSContext *cx, JSObject *wrapper, unsigned argc, Value *vp) MOZ_OVERRIDE;
virtual bool construct(JSContext *cx, JSObject *wrapper, unsigned argc, Value *argv, Value *rval) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, JSObject *wrapper, Class *clasp, Native native, CallArgs args) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl,
CallArgs args) MOZ_OVERRIDE;
virtual bool hasInstance(JSContext *cx, JSObject *wrapper, const Value *vp, bool *bp) MOZ_OVERRIDE;
virtual JSString *obj_toString(JSContext *cx, JSObject *wrapper) MOZ_OVERRIDE;
virtual JSString *fun_toString(JSContext *cx, JSObject *wrapper, unsigned indent) MOZ_OVERRIDE;
@ -214,6 +215,7 @@ class JS_FRIEND_API(DirectWrapper) : public Wrapper, public DirectProxyHandler
Value *vp) MOZ_OVERRIDE;
static DirectWrapper singleton;
static DirectWrapper singletonWithPrototype;
static void *getWrapperFamily();
};
@ -222,7 +224,7 @@ class JS_FRIEND_API(DirectWrapper) : public Wrapper, public DirectProxyHandler
class JS_FRIEND_API(CrossCompartmentWrapper) : public DirectWrapper
{
public:
CrossCompartmentWrapper(unsigned flags);
CrossCompartmentWrapper(unsigned flags, bool hasPrototype = false);
virtual ~CrossCompartmentWrapper();
@ -249,7 +251,8 @@ class JS_FRIEND_API(CrossCompartmentWrapper) : public DirectWrapper
/* Spidermonkey extensions. */
virtual bool call(JSContext *cx, JSObject *wrapper, unsigned argc, Value *vp) MOZ_OVERRIDE;
virtual bool construct(JSContext *cx, JSObject *wrapper, unsigned argc, Value *argv, Value *rval) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, JSObject *wrapper, Class *clasp, Native native, CallArgs args) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl,
CallArgs args) MOZ_OVERRIDE;
virtual bool hasInstance(JSContext *cx, JSObject *wrapper, const Value *vp, bool *bp) MOZ_OVERRIDE;
virtual JSString *obj_toString(JSContext *cx, JSObject *wrapper) MOZ_OVERRIDE;
virtual JSString *fun_toString(JSContext *cx, JSObject *wrapper, unsigned indent) MOZ_OVERRIDE;
@ -258,6 +261,7 @@ class JS_FRIEND_API(CrossCompartmentWrapper) : public DirectWrapper
virtual bool iteratorNext(JSContext *cx, JSObject *wrapper, Value *vp);
static CrossCompartmentWrapper singleton;
static CrossCompartmentWrapper singletonWithPrototype;
};
/*
@ -275,7 +279,8 @@ class JS_FRIEND_API(SecurityWrapper) : public Base
public:
SecurityWrapper(unsigned flags);
virtual bool nativeCall(JSContext *cx, JSObject *wrapper, Class *clasp, Native native, CallArgs args) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl,
CallArgs args) MOZ_OVERRIDE;
virtual bool objectClassIs(JSObject *obj, ESClassValue classValue, JSContext *cx) MOZ_OVERRIDE;
virtual bool regexp_toShared(JSContext *cx, JSObject *proxy, RegExpGuard *g) MOZ_OVERRIDE;
};
@ -283,25 +288,6 @@ class JS_FRIEND_API(SecurityWrapper) : public Base
typedef SecurityWrapper<DirectWrapper> SameCompartmentSecurityWrapper;
typedef SecurityWrapper<CrossCompartmentWrapper> CrossCompartmentSecurityWrapper;
/*
* A hacky class that lets a friend force a fake frame. We must already be
* in the compartment of |target| when we enter the forced frame.
*/
class JS_FRIEND_API(ForceFrame)
{
public:
JSContext * const context;
JSObject * const target;
private:
DummyFrameGuard *frame;
public:
ForceFrame(JSContext *cx, JSObject *target);
~ForceFrame();
bool enter();
};
class JS_FRIEND_API(DeadObjectProxy) : public BaseProxyHandler
{
public:
@ -323,7 +309,8 @@ class JS_FRIEND_API(DeadObjectProxy) : public BaseProxyHandler
/* Spidermonkey extensions. */
virtual bool call(JSContext *cx, JSObject *proxy, unsigned argc, Value *vp);
virtual bool construct(JSContext *cx, JSObject *proxy, unsigned argc, Value *argv, Value *rval);
virtual bool nativeCall(JSContext *cx, JSObject *proxy, Class *clasp, Native native, CallArgs args);
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl,
CallArgs args) MOZ_OVERRIDE;
virtual bool hasInstance(JSContext *cx, JSObject *proxy, const Value *vp, bool *bp);
virtual bool objectClassIs(JSObject *obj, ESClassValue classValue, JSContext *cx);
virtual JSString *obj_toString(JSContext *cx, JSObject *proxy);
@ -347,7 +334,7 @@ TransparentObjectWrapper(JSContext *cx, JSObject *obj, JSObject *wrappedProto, J
extern JS_FRIEND_DATA(int) sWrapperFamily;
inline bool
IsWrapper(const JSObject *obj)
IsWrapper(RawObject obj)
{
return IsProxy(obj) && GetProxyHandler(obj)->family() == &sWrapperFamily;
}
@ -366,8 +353,13 @@ UnwrapObject(JSObject *obj, bool stopAtOuter = true, unsigned *flagsp = NULL);
JS_FRIEND_API(JSObject *)
UnwrapObjectChecked(JSContext *cx, JSObject *obj);
// Unwrap only the outermost security wrapper, with the same semantics as
// above. This is the checked version of Wrapper::wrappedObject.
JS_FRIEND_API(JSObject *)
UnwrapOneChecked(JSContext *cx, JSObject *obj);
JS_FRIEND_API(bool)
IsCrossCompartmentWrapper(const JSObject *obj);
IsCrossCompartmentWrapper(RawObject obj);
JSObject *
NewDeadProxyObject(JSContext *cx, JSObject *parent);
@ -384,37 +376,6 @@ RemapAllWrappersForObject(JSContext *cx, JSObject *oldTarget,
// API to recompute all cross-compartment wrappers whose source and target
// match the given filters.
//
// These filters are designed to be ephemeral stack classes, and thus don't
// do any rooting or holding of their members.
struct CompartmentFilter {
virtual bool match(JSCompartment *c) const = 0;
};
struct AllCompartments : public CompartmentFilter {
virtual bool match(JSCompartment *c) const { return true; }
};
struct ContentCompartmentsOnly : public CompartmentFilter {
virtual bool match(JSCompartment *c) const {
return !IsSystemCompartment(c);
}
};
struct SingleCompartment : public CompartmentFilter {
JSCompartment *ours;
SingleCompartment(JSCompartment *c) : ours(c) {}
virtual bool match(JSCompartment *c) const { return c == ours; }
};
struct CompartmentsWithPrincipals : public CompartmentFilter {
JSPrincipals *principals;
CompartmentsWithPrincipals(JSPrincipals *p) : principals(p) {}
virtual bool match(JSCompartment *c) const {
return JS_GetCompartmentPrincipals(c) == principals;
}
};
JS_FRIEND_API(bool)
RecomputeWrappers(JSContext *cx, const CompartmentFilter &sourceFilter,
const CompartmentFilter &targetFilter);

View File

@ -70,6 +70,10 @@
# define MOZ_HAVE_CXX11_OVERRIDE
# define MOZ_HAVE_CXX11_FINAL final
# endif
# if __has_extension(cxx_strong_enums)
# define MOZ_HAVE_CXX11_ENUM_TYPE
# define MOZ_HAVE_CXX11_STRONG_ENUMS
# endif
# if __has_attribute(noinline)
# define MOZ_HAVE_NEVER_INLINE __attribute__((noinline))
# endif
@ -89,6 +93,8 @@
# endif
# if __GNUC_MINOR__ >= 4
# define MOZ_HAVE_CXX11_DELETE
# define MOZ_HAVE_CXX11_ENUM_TYPE
# define MOZ_HAVE_CXX11_STRONG_ENUMS
# endif
# endif
# else
@ -108,6 +114,10 @@
# define MOZ_HAVE_CXX11_OVERRIDE
/* MSVC currently spells "final" as "sealed". */
# define MOZ_HAVE_CXX11_FINAL sealed
# define MOZ_HAVE_CXX11_ENUM_TYPE
# endif
# if _MSC_VER >= 1700
# define MOZ_HAVE_CXX11_STRONG_ENUMS
# endif
# define MOZ_HAVE_NEVER_INLINE __declspec(noinline)
# define MOZ_HAVE_NORETURN __declspec(noreturn)
@ -298,6 +308,167 @@
# define MOZ_FINAL /* no support */
#endif
/**
* MOZ_ENUM_TYPE specifies the underlying numeric type for an enum. It's
* specified by placing MOZ_ENUM_TYPE(type) immediately after the enum name in
* its declaration, and before the opening curly brace, like
*
* enum MyEnum MOZ_ENUM_TYPE(uint16_t)
* {
* A,
* B = 7,
* C
* };
*
* In supporting compilers, the macro will expand to ": uint16_t". The
* compiler will allocate exactly two bytes for MyEnum, and will require all
* enumerators to have values between 0 and 65535. (Thus specifying "B =
* 100000" instead of "B = 7" would fail to compile.) In old compilers, the
* macro expands to the empty string, and the underlying type is generally
* undefined.
*/
#ifdef MOZ_HAVE_CXX11_ENUM_TYPE
# define MOZ_ENUM_TYPE(type) : type
#else
# define MOZ_ENUM_TYPE(type) /* no support */
#endif
/**
* MOZ_BEGIN_ENUM_CLASS and MOZ_END_ENUM_CLASS provide access to the
* strongly-typed enumeration feature of C++11 ("enum class"). If supported
* by the compiler, an enum defined using these macros will not be implicitly
* converted to any other type, and its enumerators will be scoped using the
* enumeration name. Place MOZ_BEGIN_ENUM_CLASS(EnumName, type) in place of
* "enum EnumName {", and MOZ_END_ENUM_CLASS(EnumName) in place of the closing
* "};". For example,
*
* MOZ_BEGIN_ENUM_CLASS(Enum, int32_t)
* A, B = 6
* MOZ_END_ENUM_CLASS(Enum)
*
* This will make "Enum::A" and "Enum::B" appear in the global scope, but "A"
* and "B" will not. In compilers that support C++11 strongly-typed
* enumerations, implicit conversions of Enum values to numeric types will
* fail. In other compilers, Enum itself will actually be defined as a class,
* and some implicit conversions will fail while others will succeed.
*
* The type argument specifies the underlying type for the enum where
* supported, as with MOZ_ENUM_TYPE(). For simplicity, it is currently
* mandatory. As with MOZ_ENUM_TYPE(), it will do nothing on compilers that do
* not support it.
*/
#if defined(MOZ_HAVE_CXX11_STRONG_ENUMS)
/* All compilers that support strong enums also support an explicit
* underlying type, so no extra check is needed */
# define MOZ_BEGIN_ENUM_CLASS(Name, type) enum class Name : type {
# define MOZ_END_ENUM_CLASS(Name) };
#else
/**
* We need Name to both name a type, and scope the provided enumerator
* names. Namespaces and classes both provide scoping, but namespaces
* aren't types, so we need to use a class that wraps the enum values. We
* have an implicit conversion from the inner enum type to the class, so
* statements like
*
* Enum x = Enum::A;
*
* will still work. We need to define an implicit conversion from the class
* to the inner enum as well, so that (for instance) switch statements will
* work. This means that the class can be implicitly converted to a numeric
* value as well via the enum type, since C++ allows an implicit
* user-defined conversion followed by a standard conversion to still be
* implicit.
*
* We have an explicit constructor from int defined, so that casts like
* (Enum)7 will still work. We also have a zero-argument constructor with
* no arguments, so declaration without initialization (like "Enum foo;")
* will work.
*
* Additionally, we'll delete as many operators as possible for the inner
* enum type, so statements like this will still fail:
*
* f(5 + Enum::B); // deleted operator+
*
* But we can't prevent things like this, because C++ doesn't allow
* overriding conversions or assignment operators for enums:
*
* int x = Enum::A;
* int f()
* {
* return Enum::A;
* }
*/
# define MOZ_BEGIN_ENUM_CLASS(Name, type) \
class Name \
{ \
public: \
enum Enum MOZ_ENUM_TYPE(type) \
{
# define MOZ_END_ENUM_CLASS(Name) \
}; \
Name() {} \
Name(Enum aEnum) : mEnum(aEnum) {} \
explicit Name(int num) : mEnum((Enum)num) {} \
operator Enum() const { return mEnum; } \
private: \
Enum mEnum; \
}; \
inline int operator+(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator+(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator-(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator-(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator*(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator*(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator/(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator/(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator%(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator%(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator+(const Name::Enum&) MOZ_DELETE; \
inline int operator-(const Name::Enum&) MOZ_DELETE; \
inline int& operator++(Name::Enum&) MOZ_DELETE; \
inline int operator++(Name::Enum&, int) MOZ_DELETE; \
inline int& operator--(Name::Enum&) MOZ_DELETE; \
inline int operator--(Name::Enum&, int) MOZ_DELETE; \
inline bool operator==(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator==(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator!=(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator!=(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator>(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator>(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator<(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator<(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator>=(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator>=(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator<=(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator<=(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator!(const Name::Enum&) MOZ_DELETE; \
inline bool operator&&(const bool&, const Name::Enum&) MOZ_DELETE; \
inline bool operator&&(const Name::Enum&, const bool&) MOZ_DELETE; \
inline bool operator||(const bool&, const Name::Enum&) MOZ_DELETE; \
inline bool operator||(const Name::Enum&, const bool&) MOZ_DELETE; \
inline int operator~(const Name::Enum&) MOZ_DELETE; \
inline int operator&(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator&(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator|(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator|(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator^(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator^(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator<<(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator<<(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator>>(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator>>(const Name::Enum&, const int&) MOZ_DELETE; \
inline int& operator+=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator-=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator*=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator/=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator%=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator&=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator|=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator^=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator<<=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator>>=(int&, const Name::Enum&) MOZ_DELETE;
#endif
/**
* MOZ_WARN_UNUSED_RESULT tells the compiler to emit a warning if a function's
* return value is not used by the caller.

View File

@ -0,0 +1,15 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* mfbt math constants. */
#ifndef mozilla_Constants_h_
#define mozilla_Constants_h_
#ifndef M_PI
# define M_PI 3.14159265358979323846
#endif
#endif /* mozilla_Constants_h_ */

View File

@ -179,6 +179,14 @@ AddToHash(uint32_t hash, A* a)
return detail::AddUintptrToHash<sizeof(uintptr_t)>(hash, uintptr_t(a));
}
template<>
MOZ_WARN_UNUSED_RESULT
inline uint32_t
AddToHash(uint32_t hash, uintptr_t a)
{
return detail::AddUintptrToHash<sizeof(uintptr_t)>(hash, a);
}
template<typename A, typename B>
MOZ_WARN_UNUSED_RESULT
uint32_t

View File

@ -0,0 +1,47 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* mfbt maths algorithms. */
#ifndef mozilla_MathAlgorithms_h_
#define mozilla_MathAlgorithms_h_
#include "mozilla/Assertions.h"
namespace mozilla {
// Greatest Common Divisor
template<typename IntegerType>
MOZ_ALWAYS_INLINE IntegerType
EuclidGCD(IntegerType a, IntegerType b)
{
// Euclid's algorithm; O(N) in the worst case. (There are better
// ways, but we don't need them for the current use of this algo.)
MOZ_ASSERT(a > 0);
MOZ_ASSERT(b > 0);
while (a != b) {
if (a > b) {
a = a - b;
} else {
b = b - a;
}
}
return a;
}
// Least Common Multiple
template<typename IntegerType>
MOZ_ALWAYS_INLINE IntegerType
EuclidLCM(IntegerType a, IntegerType b)
{
// Divide first to reduce overflow risk.
return (a / EuclidGCD(a, b)) * b;
}
} /* namespace mozilla */
#endif /* mozilla_MathAlgorithms_h_ */

View File

@ -0,0 +1,46 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* Implements a workaround for compilers which do not support the C++11 nullptr
* constant.
*/
#ifndef mozilla_NullPtr_h_
#define mozilla_NullPtr_h_
#if defined(__clang__)
# ifndef __has_extension
# define __has_extension __has_feature
# endif
# if __has_extension(cxx_nullptr)
# define MOZ_HAVE_CXX11_NULLPTR
# endif
#elif defined(__GNUC__)
# if defined(_GXX_EXPERIMENTAL_CXX0X__) || __cplusplus >= 201103L
# if (__GNUC__ * 1000 + __GNU_MINOR__) >= 4006
# define MOZ_HAVE_CXX11_NULLPTR
# endif
# endif
#elif _MSC_VER >= 1600
# define MOZ_HAVE_CXX11_NULLPTR
#endif
/**
* Use C++11 nullptr if available; otherwise use __null for gcc, or a 0 literal
* with the correct size to match the size of a pointer on a given platform.
*/
#ifndef MOZ_HAVE_CXX11_NULLPTR
# if defined(__GNUC__)
# define nullptr __null
# elif defined(_WIN64)
# define nullptr 0LL
# else
# define nullptr 0L
# endif
#endif
#endif /* mozilla_NullPtr_h_ */

View File

@ -0,0 +1,46 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* Simple class for computing SHA1. */
/*
* To compute the SHA1 of a buffer using this class you should write something
* like:
* void SHA1(const uint8_t* buf, unsigned size, uint8_t hash[20])
* {
* SHA1Sum S;
* S.update(buf, size);
* S.finish(hash);
* }
* If there are multiple buffers or chunks, the update method can be called
* multiple times and the SHA1 is computed on the concatenation of all the
* buffers passed to it.
* The finish method may only be called once and cannot be followed by calls
* to update.
*/
#ifndef mozilla_SHA1_h_
#define mozilla_SHA1_h_
#include "mozilla/StandardInteger.h"
namespace mozilla {
class SHA1Sum {
union {
uint32_t w[16]; /* input buffer */
uint8_t b[64];
} u;
uint64_t size; /* count of hashed bytes. */
unsigned H[22]; /* 5 state variables, 16 tmp values, 1 extra */
bool mDone;
public:
static const unsigned int HashSize = 20;
SHA1Sum();
void update(const uint8_t *dataIn, uint32_t len);
void finish(uint8_t hashout[20]);
};
}
#endif /* mozilla_SHA1_h_ */

View File

@ -0,0 +1,139 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
/* Weak pointer functionality, implemented as a mixin for use with any class. */
/**
* SupportsWeakPtr lets you have a pointer to an object 'Foo' without affecting
* its lifetime. It works by creating a single shared reference counted object
* (WeakReference) that each WeakPtr will access 'Foo' through. This lets 'Foo'
* clear the pointer in the WeakReference without having to know about all of
* the WeakPtrs to it and allows the WeakReference to live beyond the lifetime
* of 'Foo'.
*
* The overhead of WeakPtr is that accesses to 'Foo' becomes an additional
* dereference, and an additional heap allocated pointer sized object shared
* between all of the WeakPtrs.
*
* Example of usage:
*
* // To have a class C support weak pointers, inherit from SupportsWeakPtr<C>.
* class C : public SupportsWeakPtr<C>
* {
* public:
* int num;
* void act();
* };
*
* C* ptr = new C();
*
* // Get weak pointers to ptr. The first time asWeakPtr is called
* // a reference counted WeakReference object is created that
* // can live beyond the lifetime of 'ptr'. The WeakReference
* // object will be notified of 'ptr's destruction.
* WeakPtr<C> weak = ptr->asWeakPtr();
* WeakPtr<C> other = ptr->asWeakPtr();
*
* // Test a weak pointer for validity before using it.
* if (weak) {
* weak->num = 17;
* weak->act();
* }
*
* // Destroying the underlying object clears weak pointers to it.
* delete ptr;
*
* MOZ_ASSERT(!weak, "Deleting |ptr| clears weak pointers to it.");
* MOZ_ASSERT(!other, "Deleting |ptr| clears all weak pointers to it.");
*
* WeakPtr is typesafe and may be used with any class. It is not required that
* the class be reference-counted or allocated in any particular way.
*
* The API was loosely inspired by Chromium's weak_ptr.h:
* http://src.chromium.org/svn/trunk/src/base/memory/weak_ptr.h
*/
#ifndef mozilla_WeakPtr_h_
#define mozilla_WeakPtr_h_
#include "mozilla/Assertions.h"
#include "mozilla/NullPtr.h"
#include "mozilla/RefPtr.h"
#include "mozilla/TypeTraits.h"
namespace mozilla {
template <typename T> class WeakPtr;
template <typename T>
class SupportsWeakPtr
{
public:
WeakPtr<T> asWeakPtr() {
if (!weakRef)
weakRef = new WeakReference(static_cast<T*>(this));
return WeakPtr<T>(weakRef);
}
protected:
~SupportsWeakPtr() {
MOZ_STATIC_ASSERT((IsBaseOf<SupportsWeakPtr<T>, T>::value), "T must derive from SupportsWeakPtr<T>");
if (weakRef)
weakRef->detach();
}
private:
friend class WeakPtr<T>;
// This can live beyond the lifetime of the class derived from SupportsWeakPtr.
class WeakReference : public RefCounted<WeakReference>
{
public:
explicit WeakReference(T* ptr) : ptr(ptr) {}
T* get() const {
return ptr;
}
private:
friend class WeakPtr<T>;
friend class SupportsWeakPtr<T>;
void detach() {
ptr = nullptr;
}
T* ptr;
};
RefPtr<WeakReference> weakRef;
};
template <typename T>
class WeakPtr
{
public:
WeakPtr(const WeakPtr<T>& o) : ref(o.ref) {}
WeakPtr() : ref(nullptr) {}
operator T*() const {
return ref->get();
}
T& operator*() const {
return *ref->get();
}
T* operator->() const {
return ref->get();
}
private:
friend class SupportsWeakPtr<T>;
explicit WeakPtr(const RefPtr<typename SupportsWeakPtr<T>::WeakReference> &o) : ref(o) {}
RefPtr<typename SupportsWeakPtr<T>::WeakReference> ref;
};
} // namespace mozilla
#endif /* ifdef mozilla_WeakPtr_h_ */

View File

@ -1 +1 @@
013d16a49272220cc82f430c2cf72039b2abd4c3
2559ff4626d1357e05dd1f64c9875b2a51c26957

View File

View File

@ -129,7 +129,7 @@ class EncapsulatedPtr
public:
EncapsulatedPtr() : value(NULL) {}
explicit EncapsulatedPtr(T *v) : value(v) {}
EncapsulatedPtr(T *v) : value(v) {}
explicit EncapsulatedPtr(const EncapsulatedPtr<T> &v) : value(v.value) {}
~EncapsulatedPtr() { pre(); }
@ -222,34 +222,51 @@ class RelocatablePtr : public EncapsulatedPtr<T>
{
public:
RelocatablePtr() : EncapsulatedPtr<T>(NULL) {}
explicit RelocatablePtr(T *v) : EncapsulatedPtr<T>(v) { post(); }
explicit RelocatablePtr(const RelocatablePtr<T> &v)
: EncapsulatedPtr<T>(v) { post(); }
explicit RelocatablePtr(T *v) : EncapsulatedPtr<T>(v) {
if (v)
post();
}
explicit RelocatablePtr(const RelocatablePtr<T> &v) : EncapsulatedPtr<T>(v) {
if (this->value)
post();
}
~RelocatablePtr() {
this->pre();
relocate();
if (this->value)
relocate(this->value->compartment());
}
RelocatablePtr<T> &operator=(T *v) {
this->pre();
JS_ASSERT(!IsPoisonedPtr<T>(v));
this->value = v;
post();
if (v) {
this->value = v;
post();
} else if (this->value) {
JSCompartment *comp = this->value->compartment();
this->value = v;
relocate(comp);
}
return *this;
}
RelocatablePtr<T> &operator=(const RelocatablePtr<T> &v) {
this->pre();
JS_ASSERT(!IsPoisonedPtr<T>(v.value));
this->value = v.value;
post();
if (v.value) {
this->value = v.value;
post();
} else if (this->value) {
JSCompartment *comp = this->value->compartment();
this->value = v;
relocate(comp);
}
return *this;
}
protected:
void post() { T::writeBarrierRelocPost(this->value, (void *)&this->value); }
void relocate() { T::writeBarrierRelocated(this->value, (void *)&this->value); }
inline void post();
inline void relocate(JSCompartment *comp);
};
/*
@ -276,6 +293,9 @@ struct Shape;
class BaseShape;
namespace types { struct TypeObject; }
typedef EncapsulatedPtr<JSObject> EncapsulatedPtrObject;
typedef EncapsulatedPtr<JSScript> EncapsulatedPtrScript;
typedef RelocatablePtr<JSObject> RelocatablePtrObject;
typedef RelocatablePtr<JSScript> RelocatablePtrScript;
@ -303,6 +323,19 @@ struct HeapPtrHasher
template <class T>
struct DefaultHasher< HeapPtr<T> > : HeapPtrHasher<T> { };
template<class T>
struct EncapsulatedPtrHasher
{
typedef EncapsulatedPtr<T> Key;
typedef T *Lookup;
static HashNumber hash(Lookup obj) { return DefaultHasher<T *>::hash(obj); }
static bool match(const Key &k, Lookup l) { return k.get() == l; }
};
template <class T>
struct DefaultHasher< EncapsulatedPtr<T> > : EncapsulatedPtrHasher<T> { };
class EncapsulatedValue : public ValueOperations<EncapsulatedValue>
{
protected:
@ -379,7 +412,7 @@ class RelocatableValue : public EncapsulatedValue
public:
explicit inline RelocatableValue();
explicit inline RelocatableValue(const Value &v);
explicit inline RelocatableValue(const RelocatableValue &v);
inline RelocatableValue(const RelocatableValue &v);
inline ~RelocatableValue();
inline RelocatableValue &operator=(const Value &v);
@ -414,7 +447,7 @@ class HeapSlot : public EncapsulatedValue
inline void set(JSCompartment *comp, JSObject *owner, uint32_t slot, const Value &v);
static inline void writeBarrierPost(JSObject *obj, uint32_t slot);
static inline void writeBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t slotno);
static inline void writeBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t slot);
private:
inline void post(JSObject *owner, uint32_t slot);
@ -428,8 +461,19 @@ class HeapSlot : public EncapsulatedValue
* single step.
*/
inline void
SlotRangeWriteBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t start, uint32_t count)
SlotRangeWriteBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t start, uint32_t count);
/*
* This is a post barrier for HashTables whose key can be moved during a GC.
*/
template <class Map, class Key>
inline void
HashTableWriteBarrierPost(JSCompartment *comp, const Map *map, const Key &key)
{
#ifdef JS_GCGENERATIONAL
if (key && comp->gcNursery.isInside(key))
comp->gcStoreBuffer.putGeneric(HashKeyRef(map, key));
#endif
}
static inline const Value *
@ -467,15 +511,16 @@ class EncapsulatedId
protected:
jsid value;
explicit EncapsulatedId() : value(JSID_VOID) {}
explicit inline EncapsulatedId(jsid id) : value(id) {}
~EncapsulatedId() {}
private:
EncapsulatedId(const EncapsulatedId &v) MOZ_DELETE;
EncapsulatedId &operator=(const EncapsulatedId &v) MOZ_DELETE;
public:
explicit EncapsulatedId() : value(JSID_VOID) {}
explicit EncapsulatedId(jsid id) : value(id) {}
~EncapsulatedId();
inline EncapsulatedId &operator=(const EncapsulatedId &v);
bool operator==(jsid id) const { return value == id; }
bool operator!=(jsid id) const { return value != id; }

View File

@ -423,38 +423,47 @@ struct ArenaHeader
* chunk. The latter allows to quickly check if the arena is allocated
* during the conservative GC scanning without searching the arena in the
* list.
*
* We use 8 bits for the allocKind so the compiler can use byte-level memory
* instructions to access it.
*/
size_t allocKind : 8;
/*
* When recursive marking uses too much stack the marking is delayed and
* the corresponding arenas are put into a stack using the following field
* as a linkage. To distinguish the bottom of the stack from the arenas
* not present in the stack we use an extra flag to tag arenas on the
* stack.
* When collecting we sometimes need to keep an auxillary list of arenas,
* for which we use the following fields. This happens for several reasons:
*
* When recursive marking uses too much stack the marking is delayed and the
* corresponding arenas are put into a stack. To distinguish the bottom of
* the stack from the arenas not present in the stack we use the
* markOverflow flag to tag arenas on the stack.
*
* Delayed marking is also used for arenas that we allocate into during an
* incremental GC. In this case, we intend to mark all the objects in the
* arena, and it's faster to do this marking in bulk.
*
* To minimize the ArenaHeader size we record the next delayed marking
* linkage as arenaAddress() >> ArenaShift and pack it with the allocKind
* field and hasDelayedMarking flag. We use 8 bits for the allocKind, not
* ArenaShift - 1, so the compiler can use byte-level memory instructions
* to access it.
* When sweeping we keep track of which arenas have been allocated since the
* end of the mark phase. This allows us to tell whether a pointer to an
* unmarked object is yet to be finalized or has already been reallocated.
* We set the allocatedDuringIncremental flag for this and clear it at the
* end of the sweep phase.
*
* To minimize the ArenaHeader size we record the next linkage as
* arenaAddress() >> ArenaShift and pack it with the allocKind field and the
* flags.
*/
public:
size_t hasDelayedMarking : 1;
size_t allocatedDuringIncremental : 1;
size_t markOverflow : 1;
size_t nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
size_t auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
static void staticAsserts() {
/* We must be able to fit the allockind into uint8_t. */
JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
/*
* nextDelayedMarkingpacking assumes that ArenaShift has enough bits
* auxNextLink packing assumes that ArenaShift has enough bits
* to cover allocKind and hasDelayedMarking.
*/
JS_STATIC_ASSERT(ArenaShift >= 8 + 1 + 1 + 1);
@ -487,7 +496,7 @@ struct ArenaHeader
markOverflow = 0;
allocatedDuringIncremental = 0;
hasDelayedMarking = 0;
nextDelayedMarking = 0;
auxNextLink = 0;
}
inline uintptr_t arenaAddress() const;
@ -519,6 +528,11 @@ struct ArenaHeader
inline ArenaHeader *getNextDelayedMarking() const;
inline void setNextDelayedMarking(ArenaHeader *aheader);
inline void unsetDelayedMarking();
inline ArenaHeader *getNextAllocDuringSweep() const;
inline void setNextAllocDuringSweep(ArenaHeader *aheader);
inline void unsetAllocDuringSweep();
};
struct Arena
@ -882,15 +896,48 @@ ArenaHeader::setFirstFreeSpan(const FreeSpan *span)
inline ArenaHeader *
ArenaHeader::getNextDelayedMarking() const
{
return &reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift)->aheader;
JS_ASSERT(hasDelayedMarking);
return &reinterpret_cast<Arena *>(auxNextLink << ArenaShift)->aheader;
}
inline void
ArenaHeader::setNextDelayedMarking(ArenaHeader *aheader)
{
JS_ASSERT(!(uintptr_t(aheader) & ArenaMask));
JS_ASSERT(!auxNextLink && !hasDelayedMarking);
hasDelayedMarking = 1;
nextDelayedMarking = aheader->arenaAddress() >> ArenaShift;
auxNextLink = aheader->arenaAddress() >> ArenaShift;
}
inline void
ArenaHeader::unsetDelayedMarking()
{
JS_ASSERT(hasDelayedMarking);
hasDelayedMarking = 0;
auxNextLink = 0;
}
inline ArenaHeader *
ArenaHeader::getNextAllocDuringSweep() const
{
JS_ASSERT(allocatedDuringIncremental);
return &reinterpret_cast<Arena *>(auxNextLink << ArenaShift)->aheader;
}
inline void
ArenaHeader::setNextAllocDuringSweep(ArenaHeader *aheader)
{
JS_ASSERT(!auxNextLink && !allocatedDuringIncremental);
allocatedDuringIncremental = 1;
auxNextLink = aheader->arenaAddress() >> ArenaShift;
}
inline void
ArenaHeader::unsetAllocDuringSweep()
{
JS_ASSERT(allocatedDuringIncremental);
allocatedDuringIncremental = 0;
auxNextLink = 0;
}
JS_ALWAYS_INLINE void

View File

@ -62,6 +62,7 @@ namespace JS {
* separate rooting analysis.
*/
template <typename T> class MutableHandle;
template <typename T> class Rooted;
template <typename T>
@ -79,6 +80,9 @@ struct NullPtr
static void * const constNullValue;
};
template <typename T>
class MutableHandle;
template <typename T>
class HandleBase {};
@ -108,6 +112,11 @@ class Handle : public HandleBase<T>
ptr = reinterpret_cast<const T *>(&NullPtr::constNullValue);
}
friend class MutableHandle<T>;
Handle(MutableHandle<T> handle) {
ptr = handle.address();
}
/*
* This may be called only if the location of the T is guaranteed
* to be marked (for some reason other than being a Rooted),
@ -130,6 +139,12 @@ class Handle : public HandleBase<T>
Handle(Rooted<S> &root,
typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy = 0);
/* Construct a read only handle from a mutable handle. */
template <typename S>
inline
Handle(MutableHandle<S> &root,
typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy = 0);
const T *address() const { return ptr; }
T get() const { return *ptr; }
@ -185,6 +200,19 @@ class MutableHandle : public MutableHandleBase<T>
*ptr = v;
}
/*
* This may be called only if the location of the T is guaranteed
* to be marked (for some reason other than being a Rooted),
* e.g., if it is guaranteed to be reachable from an implicit root.
*
* Create a MutableHandle from a raw location of a T.
*/
static MutableHandle fromMarkedLocation(T *p) {
MutableHandle h;
h.ptr = p;
return h;
}
T *address() const { return ptr; }
T get() const { return *ptr; }
@ -195,16 +223,33 @@ class MutableHandle : public MutableHandleBase<T>
MutableHandle() {}
T *ptr;
template <typename S>
void operator =(S v) MOZ_DELETE;
};
typedef MutableHandle<JSObject*> MutableHandleObject;
typedef MutableHandle<Value> MutableHandleValue;
/*
* Raw pointer used as documentation that a parameter does not need to be
* rooted.
*/
typedef JSObject * RawObject;
/*
* By default, pointers should use the inheritance hierarchy to find their
* ThingRootKind. Some pointer types are explicitly set in jspubtd.h so that
* Rooted<T> may be used without the class definition being available.
*/
template <typename T>
struct RootKind<T *> { static ThingRootKind rootKind() { return T::rootKind(); }; };
template <typename T>
struct RootMethods<T *>
{
static T *initial() { return NULL; }
static ThingRootKind kind() { return T::rootKind(); }
static ThingRootKind kind() { return RootKind<T *>::rootKind(); }
static bool poisoned(T *v) { return IsPoisonedPtr(v); }
};
@ -291,6 +336,14 @@ Handle<T>::Handle(Rooted<S> &root,
ptr = reinterpret_cast<const T *>(root.address());
}
template<typename T> template <typename S>
inline
Handle<T>::Handle(MutableHandle<S> &root,
typename mozilla::EnableIf<mozilla::IsConvertible<S, T>::value, int>::Type dummy)
{
ptr = reinterpret_cast<const T *>(root.address());
}
template<typename T> template <typename S>
inline
MutableHandle<T>::MutableHandle(Rooted<S> *root,
@ -332,15 +385,7 @@ class SkipRoot
public:
template <typename T>
SkipRoot(JSContext *cx, const T *ptr
JS_GUARD_OBJECT_NOTIFIER_PARAM)
{
init(ContextFriendFields::get(cx), ptr, 1);
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
template <typename T>
SkipRoot(JSContext *cx, const T *ptr, size_t count
SkipRoot(JSContext *cx, const T *ptr, size_t count = 1
JS_GUARD_OBJECT_NOTIFIER_PARAM)
{
init(ContextFriendFields::get(cx), ptr, count);
@ -363,14 +408,7 @@ class SkipRoot
public:
template <typename T>
SkipRoot(JSContext *cx, const T *ptr
JS_GUARD_OBJECT_NOTIFIER_PARAM)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
template <typename T>
SkipRoot(JSContext *cx, const T *ptr, size_t count
SkipRoot(JSContext *cx, const T *ptr, size_t count = 1
JS_GUARD_OBJECT_NOTIFIER_PARAM)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
@ -381,6 +419,12 @@ class SkipRoot
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
/*
* This typedef is to annotate parameters that we have manually verified do not
* need rooting, as opposed to parameters that have not yet been considered.
*/
typedef JSObject *RawObject;
#ifdef DEBUG
JS_FRIEND_API(bool) IsRootingUnnecessaryForContext(JSContext *cx);
JS_FRIEND_API(void) SetRootingUnnecessaryForContext(JSContext *cx, bool value);
@ -389,14 +433,16 @@ JS_FRIEND_API(bool) RelaxRootChecksForContext(JSContext *cx);
class AssertRootingUnnecessary {
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
#ifdef DEBUG
JSContext *cx;
bool prev;
#endif
public:
AssertRootingUnnecessary(JSContext *cx JS_GUARD_OBJECT_NOTIFIER_PARAM)
: cx(cx)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
#ifdef DEBUG
this->cx = cx;
prev = IsRootingUnnecessaryForContext(cx);
SetRootingUnnecessaryForContext(cx, true);
#endif

View File

@ -80,6 +80,9 @@ struct Statistics {
counts[s]++;
}
int64_t beginSCC();
void endSCC(unsigned scc, int64_t start);
jschar *formatMessage();
jschar *formatJSON(uint64_t timestamp);
@ -134,10 +137,14 @@ struct Statistics {
/* Allocated space before the GC started. */
size_t preBytes;
/* Sweep times for SCCs of compartments. */
Vector<int64_t, 0, SystemAllocPolicy> sccTimes;
void beginGC();
void endGC();
int64_t gcDuration();
void gcDuration(int64_t *total, int64_t *maxPause);
void sccDurations(int64_t *total, int64_t *maxPause);
void printStats();
bool formatData(StatisticsSerializer &ss, uint64_t timestamp);
@ -168,6 +175,17 @@ struct AutoPhase {
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
struct AutoSCC {
AutoSCC(Statistics &stats, unsigned scc JS_GUARD_OBJECT_NOTIFIER_PARAM)
: stats(stats), scc(scc) { JS_GUARD_OBJECT_NOTIFIER_INIT; start = stats.beginSCC(); }
~AutoSCC() { stats.endSCC(scc, start); }
Statistics &stats;
unsigned scc;
int64_t start;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
} /* namespace gcstats */
} /* namespace js */

View File

@ -0,0 +1,398 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=78:
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifdef JSGC_GENERATIONAL
#ifndef jsgc_storebuffer_h___
#define jsgc_storebuffer_h___
#include "jsgc.h"
#include "jsalloc.h"
#include "gc/Marking.h"
namespace js {
namespace gc {
/*
* Note: this is a stub Nursery that does not actually contain a heap, just a
* set of pointers which are "inside" the nursery to implement verification.
*/
class Nursery
{
HashSet<void*, PointerHasher<void*, 3>, SystemAllocPolicy> nursery;
public:
Nursery() : nursery() {}
bool enable() {
if (!nursery.initialized())
return nursery.init();
return true;
}
void disable() {
if (!nursery.initialized())
return;
nursery.finish();
}
bool isInside(void *cell) const {
JS_ASSERT((uintptr_t(cell) & 0x3) == 0);
return nursery.initialized() && nursery.has(cell);
}
void insertPointer(void *cell) {
nursery.putNew(cell);
}
};
/*
* BufferableRef represents an abstract reference for use in the generational
* GC's remembered set. Entries in the store buffer that cannot be represented
* with the simple pointer-to-a-pointer scheme must derive from this class and
* use the generic store buffer interface.
*/
class BufferableRef
{
public:
virtual bool match(void *location) = 0;
virtual void mark(JSTracer *trc) = 0;
};
/*
* HashKeyRef represents a reference to a HashTable key. Manual HashTable
* barriers should should instantiate this template with their own table/key
* type to insert into the generic buffer with putGeneric.
*/
template <typename Map, typename Key>
class HashKeyRef : public BufferableRef
{
Map *map;
Key key;
typedef typename Map::Ptr Ptr;
public:
HashKeyRef(Map *m, const Key &k) : map(m), key(k) {}
bool match(void *location) {
Ptr p = map->lookup(key);
if (!p)
return false;
return &p->key == location;
}
void mark(JSTracer *trc) {}
};
/*
* The StoreBuffer observes all writes that occur in the system and performs
* efficient filtering of them to derive a remembered set for nursery GC.
*/
class StoreBuffer
{
/* TODO: profile to find the ideal size for these. */
static const size_t ValueBufferSize = 1 * 1024 * sizeof(Value *);
static const size_t CellBufferSize = 2 * 1024 * sizeof(Cell **);
static const size_t SlotBufferSize = 2 * 1024 * (sizeof(JSObject *) + sizeof(uint32_t));
static const size_t RelocValueBufferSize = 1 * 1024 * sizeof(Value *);
static const size_t RelocCellBufferSize = 1 * 1024 * sizeof(Cell **);
static const size_t GenericBufferSize = 1 * 1024 * sizeof(int);
static const size_t TotalSize = ValueBufferSize + CellBufferSize +
SlotBufferSize + RelocValueBufferSize + RelocCellBufferSize +
GenericBufferSize;
typedef HashSet<void *, PointerHasher<void *, 3>, SystemAllocPolicy> EdgeSet;
/*
* This buffer holds only a single type of edge. Using this buffer is more
* efficient than the generic buffer when many writes will be to the same
* type of edge: e.g. Value or Cell*.
*/
template<typename T>
class MonoTypeBuffer
{
friend class StoreBuffer;
StoreBuffer *owner;
Nursery *nursery;
T *base; /* Pointer to the start of the buffer. */
T *pos; /* Pointer to the current insertion position. */
T *top; /* Pointer to one element after the end. */
MonoTypeBuffer(StoreBuffer *owner, Nursery *nursery)
: owner(owner), nursery(nursery), base(NULL), pos(NULL), top(NULL)
{}
MonoTypeBuffer &operator=(const MonoTypeBuffer& other) MOZ_DELETE;
bool enable(uint8_t *region, size_t len);
void disable();
bool isEmpty() const { return pos == base; }
bool isFull() const { JS_ASSERT(pos <= top); return pos == top; }
/* Compaction algorithms. */
void compactNotInSet();
/*
* Attempts to reduce the usage of the buffer by removing unnecessary
* entries.
*/
virtual void compact();
/* Add one item to the buffer. */
void put(const T &v);
/* For verification. */
bool accumulateEdges(EdgeSet &edges);
};
/*
* Overrides the MonoTypeBuffer to support pointers that may be moved in
* memory outside of the GC's control.
*/
template <typename T>
class RelocatableMonoTypeBuffer : public MonoTypeBuffer<T>
{
friend class StoreBuffer;
RelocatableMonoTypeBuffer(StoreBuffer *owner, Nursery *nursery)
: MonoTypeBuffer<T>(owner, nursery)
{}
/* Override compaction to filter out removed items. */
void compactMoved();
virtual void compact();
/* Record a removal from the buffer. */
void unput(const T &v);
};
class GenericBuffer
{
friend class StoreBuffer;
StoreBuffer *owner;
Nursery *nursery;
uint8_t *base; /* Pointer to start of buffer. */
uint8_t *pos; /* Pointer to current buffer position. */
uint8_t *top; /* Pointer to one past the last entry. */
GenericBuffer(StoreBuffer *owner, Nursery *nursery)
: owner(owner), nursery(nursery)
{}
GenericBuffer &operator=(const GenericBuffer& other) MOZ_DELETE;
bool enable(uint8_t *region, size_t len);
void disable();
/* Check if a pointer is present in the buffer. */
bool containsEdge(void *location) const;
template <typename T>
void put(const T &t) {
/* Check if we have been enabled. */
if (!pos)
return;
/* Check for overflow. */
if (top - pos < (unsigned)(sizeof(unsigned) + sizeof(T))) {
owner->setOverflowed();
return;
}
*((unsigned *)pos) = sizeof(T);
pos += sizeof(unsigned);
T *p = (T *)pos;
new (p) T(t);
pos += sizeof(T);
}
};
class CellPtrEdge
{
friend class StoreBuffer;
friend class StoreBuffer::MonoTypeBuffer<CellPtrEdge>;
friend class StoreBuffer::RelocatableMonoTypeBuffer<CellPtrEdge>;
Cell **edge;
CellPtrEdge(Cell **v) : edge(v) {}
bool operator==(const CellPtrEdge &other) const { return edge == other.edge; }
bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; }
void *location() const { return (void *)edge; }
bool inRememberedSet(Nursery *n) {
return !n->isInside(edge) && n->isInside(*edge);
}
bool isNullEdge() const {
return !*edge;
}
CellPtrEdge tagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) | 1)); }
CellPtrEdge untagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) & ~1)); }
bool isTagged() const { return bool(uintptr_t(edge) & 1); }
};
class ValueEdge
{
friend class StoreBuffer;
friend class StoreBuffer::MonoTypeBuffer<ValueEdge>;
friend class StoreBuffer::RelocatableMonoTypeBuffer<ValueEdge>;
Value *edge;
ValueEdge(Value *v) : edge(v) {}
bool operator==(const ValueEdge &other) const { return edge == other.edge; }
bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
void *deref() const { return edge->isGCThing() ? edge->toGCThing() : NULL; }
void *location() const { return (void *)edge; }
bool inRememberedSet(Nursery *n) {
return !n->isInside(edge) && n->isInside(deref());
}
bool isNullEdge() const {
return !deref();
}
ValueEdge tagged() const { return ValueEdge((Value *)(uintptr_t(edge) | 1)); }
ValueEdge untagged() const { return ValueEdge((Value *)(uintptr_t(edge) & ~1)); }
bool isTagged() const { return bool(uintptr_t(edge) & 1); }
};
struct SlotEdge
{
friend class StoreBuffer;
friend class StoreBuffer::MonoTypeBuffer<SlotEdge>;
JSObject *object;
uint32_t offset;
SlotEdge(JSObject *object, uint32_t offset) : object(object), offset(offset) {}
bool operator==(const SlotEdge &other) const {
return object == other.object && offset == other.offset;
}
bool operator!=(const SlotEdge &other) const {
return object != other.object || offset != other.offset;
}
HeapSlot *slotLocation() const {
if (object->isDenseArray()) {
if (offset >= object->getDenseArrayInitializedLength())
return NULL;
return (HeapSlot *)&object->getDenseArrayElement(offset);
}
if (offset >= object->slotSpan())
return NULL;
return &object->getSlotRef(offset);
}
void *deref() const {
HeapSlot *loc = slotLocation();
return (loc && loc->isGCThing()) ? loc->toGCThing() : NULL;
}
void *location() const {
return (void *)slotLocation();
}
bool inRememberedSet(Nursery *n) {
return !n->isInside(object) && n->isInside(deref());
}
bool isNullEdge() const {
return !deref();
}
};
MonoTypeBuffer<ValueEdge> bufferVal;
MonoTypeBuffer<CellPtrEdge> bufferCell;
MonoTypeBuffer<SlotEdge> bufferSlot;
RelocatableMonoTypeBuffer<ValueEdge> bufferRelocVal;
RelocatableMonoTypeBuffer<CellPtrEdge> bufferRelocCell;
GenericBuffer bufferGeneric;
Nursery *nursery;
void *buffer;
bool overflowed;
bool enabled;
/* For the verifier. */
EdgeSet edgeSet;
/* For use by our owned buffers. */
void setOverflowed() { overflowed = true; }
public:
StoreBuffer(Nursery *n)
: bufferVal(this, n), bufferCell(this, n), bufferSlot(this, n),
bufferRelocVal(this, n), bufferRelocCell(this, n), bufferGeneric(this, n),
nursery(n), buffer(NULL), overflowed(false), enabled(false)
{}
bool enable();
void disable();
bool isEnabled() { return enabled; }
/* Get the overflowed status. */
bool hasOverflowed() const { return overflowed; }
/* Insert a single edge into the buffer/remembered set. */
void putValue(Value *v) {
bufferVal.put(v);
}
void putCell(Cell **o) {
bufferCell.put(o);
}
void putSlot(JSObject *obj, uint32_t slot) {
bufferSlot.put(SlotEdge(obj, slot));
}
/* Insert or update a single edge in the Relocatable buffer. */
void putRelocatableValue(Value *v) {
bufferRelocVal.put(v);
}
void putRelocatableCell(Cell **c) {
bufferRelocCell.put(c);
}
void removeRelocatableValue(Value *v) {
bufferRelocVal.unput(v);
}
void removeRelocatableCell(Cell **c) {
bufferRelocCell.unput(c);
}
/* Insert an entry into the generic buffer. */
template <typename T>
void putGeneric(const T &t) {
bufferGeneric.put(t);
}
/* For the verifier. */
bool coalesceForVerification();
void releaseVerificationData();
bool containsEdgeAt(void *loc) const;
};
} /* namespace gc */
} /* namespace js */
#endif /* jsgc_storebuffer_h___ */
#endif /* JSGC_GENERATIONAL */

View File

@ -1,69 +1,68 @@
/* js-config.h. Generated automatically by configure. */
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=78:
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef js_config_h___
#define js_config_h___
/* Definitions set at build time that affect SpiderMonkey's public API.
This header file is generated by the SpiderMonkey configure script,
and installed along with jsapi.h. */
/* Define to 1 if SpiderMonkey should support multi-threaded clients. */
/* #undef JS_THREADSAFE */
/* Define to 1 if SpiderMonkey should include ctypes support. */
/* #undef JS_HAS_CTYPES */
/* Define to 1 if SpiderMonkey should support the ability to perform
entirely too much GC. */
/* #undef JS_GC_ZEAL */
/* Define to 1 if the <endian.h> header is present and
useable. See jscpucfg.h. */
/* #undef JS_HAVE_ENDIAN_H */
/* Define to 1 if the <machine/endian.h> header is present and
useable. See jscpucfg.h. */
/* #undef JS_HAVE_MACHINE_ENDIAN_H */
/* Define to 1 if the <sys/isa_defs.h> header is present and
useable. See jscpucfg.h. */
/* #undef JS_HAVE_SYS_ISA_DEFS_H */
/* Define to 1 if the <sys/types.h> defines int8_t, etc. */
/* #undef JS_SYS_TYPES_H_DEFINES_EXACT_SIZE_TYPES */
/* Define to 1 if the N-byte __intN types are defined by the
compiler. */
#define JS_HAVE___INTN 1
/* Define to 1 if #including <stddef.h> provides definitions for
intptr_t and uintptr_t. */
#define JS_STDDEF_H_HAS_INTPTR_T 1
/* Define to 1 if #including <crtdefs.h> provides definitions for
intptr_t and uintptr_t. */
/* #undef JS_CRTDEFS_H_HAS_INTPTR_T */
/* The configure script defines these if it doesn't #define
JS_HAVE_STDINT_H. */
/* #undef JS_INT8_TYPE */
/* #undef JS_INT16_TYPE */
/* #undef JS_INT32_TYPE */
/* #undef JS_INT64_TYPE */
/* #undef JS_INTPTR_TYPE */
/* #undef JS_BYTES_PER_WORD */
/* Some mozilla code uses JS-friend APIs that depend on JS_METHODJIT being
correct. */
/* #undef JS_METHODJIT */
/* Define to 1 to enable support for E4X (ECMA-357), 0 to disable it. */
#define JS_HAS_XML_SUPPORT 1
#endif /* js_config_h___ */
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=78:
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef js_config_h___
#define js_config_h___
/* Definitions set at build time that affect SpiderMonkey's public API.
This header file is generated by the SpiderMonkey configure script,
and installed along with jsapi.h. */
/* Define to 1 if SpiderMonkey should support multi-threaded clients. */
/* #undef JS_THREADSAFE */
/* Define to 1 if SpiderMonkey should include ctypes support. */
/* #undef JS_HAS_CTYPES */
/* Define to 1 if SpiderMonkey should support the ability to perform
entirely too much GC. */
/* #undef JS_GC_ZEAL */
/* Define to 1 if the <endian.h> header is present and
useable. See jscpucfg.h. */
/* #undef JS_HAVE_ENDIAN_H */
/* Define to 1 if the <machine/endian.h> header is present and
useable. See jscpucfg.h. */
/* #undef JS_HAVE_MACHINE_ENDIAN_H */
/* Define to 1 if the <sys/isa_defs.h> header is present and
useable. See jscpucfg.h. */
/* #undef JS_HAVE_SYS_ISA_DEFS_H */
/* Define to 1 if the <sys/types.h> defines int8_t, etc. */
/* #undef JS_SYS_TYPES_H_DEFINES_EXACT_SIZE_TYPES */
/* Define to 1 if the N-byte __intN types are defined by the
compiler. */
#define JS_HAVE___INTN 1
/* Define to 1 if #including <stddef.h> provides definitions for
intptr_t and uintptr_t. */
#define JS_STDDEF_H_HAS_INTPTR_T 1
/* Define to 1 if #including <crtdefs.h> provides definitions for
intptr_t and uintptr_t. */
/* #undef JS_CRTDEFS_H_HAS_INTPTR_T */
/* The configure script defines these if it doesn't #define
JS_HAVE_STDINT_H. */
/* #undef JS_INT8_TYPE */
/* #undef JS_INT16_TYPE */
/* #undef JS_INT32_TYPE */
/* #undef JS_INT64_TYPE */
/* #undef JS_INTPTR_TYPE */
/* #undef JS_BYTES_PER_WORD */
/* Some mozilla code uses JS-friend APIs that depend on JS_METHODJIT being
correct. */
/* #undef JS_METHODJIT */
/* Define to 1 to enable support for E4X (ECMA-357), 0 to disable it. */
#define JS_HAS_XML_SUPPORT 1
#endif /* js_config_h___ */

16
scripting/javascript/spidermonkey-win32/include/js.msg Normal file → Executable file
View File

@ -116,8 +116,8 @@ MSG_DEF(JSMSG_UNMATCHED_RIGHT_PAREN, 62, 0, JSEXN_SYNTAXERR, "unmatched ) in r
MSG_DEF(JSMSG_TOO_BIG_TO_ENCODE, 63, 0, JSEXN_INTERNALERR, "data are to big to encode")
MSG_DEF(JSMSG_ARG_INDEX_OUT_OF_RANGE, 64, 1, JSEXN_RANGEERR, "argument {0} accesses an index that is out of range")
MSG_DEF(JSMSG_SPREAD_TOO_LARGE, 65, 0, JSEXN_RANGEERR, "array too large due to spread operand(s)")
MSG_DEF(JSMSG_UNUSED66, 66, 0, JSEXN_NONE, "")
MSG_DEF(JSMSG_UNUSED67, 67, 0, JSEXN_NONE, "")
MSG_DEF(JSMSG_SOURCE_TOO_LONG, 66, 0, JSEXN_RANGEERR, "source is too long")
MSG_DEF(JSMSG_BAD_WEAKMAP_KEY, 67, 0, JSEXN_TYPEERR, "cannot use the given object as a weak map key")
MSG_DEF(JSMSG_BAD_SCRIPT_MAGIC, 68, 0, JSEXN_INTERNALERR, "bad script XDR magic number")
MSG_DEF(JSMSG_PAREN_BEFORE_FORMAL, 69, 0, JSEXN_SYNTAXERR, "missing ( before formal parameters")
MSG_DEF(JSMSG_MISSING_FORMAL, 70, 0, JSEXN_SYNTAXERR, "missing formal parameter")
@ -126,7 +126,7 @@ MSG_DEF(JSMSG_CURLY_BEFORE_BODY, 72, 0, JSEXN_SYNTAXERR, "missing { before
MSG_DEF(JSMSG_CURLY_AFTER_BODY, 73, 0, JSEXN_SYNTAXERR, "missing } after function body")
MSG_DEF(JSMSG_PAREN_BEFORE_COND, 74, 0, JSEXN_SYNTAXERR, "missing ( before condition")
MSG_DEF(JSMSG_PAREN_AFTER_COND, 75, 0, JSEXN_SYNTAXERR, "missing ) after condition")
MSG_DEF(JSMSG_DESTRUCT_DUP_ARG, 76, 0, JSEXN_SYNTAXERR, "duplicate argument is mixed with destructuring pattern")
MSG_DEF(JSMSG_BAD_DUP_ARGS, 76, 0, JSEXN_SYNTAXERR, "duplicate argument names not allowed in this context")
MSG_DEF(JSMSG_NAME_AFTER_DOT, 77, 0, JSEXN_SYNTAXERR, "missing name after . operator")
MSG_DEF(JSMSG_BRACKET_IN_INDEX, 78, 0, JSEXN_SYNTAXERR, "missing ] in index expression")
MSG_DEF(JSMSG_XML_WHOLE_PROGRAM, 79, 0, JSEXN_SYNTAXERR, "XML can't be the whole program")
@ -196,7 +196,7 @@ MSG_DEF(JSMSG_BAD_CLONE_FUNOBJ_SCOPE, 142, 0, JSEXN_TYPEERR, "bad cloned functio
MSG_DEF(JSMSG_SHARPVAR_TOO_BIG, 143, 0, JSEXN_SYNTAXERR, "overlarge sharp variable number")
MSG_DEF(JSMSG_ILLEGAL_CHARACTER, 144, 0, JSEXN_SYNTAXERR, "illegal character")
MSG_DEF(JSMSG_BAD_OCTAL, 145, 1, JSEXN_SYNTAXERR, "{0} is not a legal ECMA-262 octal constant")
MSG_DEF(JSMSG_BAD_INDIRECT_CALL, 146, 1, JSEXN_EVALERR, "function {0} must be called directly, and not by way of a function of another name")
MSG_DEF(JSMSG_UNUSED146, 146, 0, JSEXN_NONE, "")
MSG_DEF(JSMSG_UNCAUGHT_EXCEPTION, 147, 1, JSEXN_INTERNALERR, "uncaught exception: {0}")
MSG_DEF(JSMSG_INVALID_BACKREF, 148, 0, JSEXN_SYNTAXERR, "non-octal digit in an escape sequence that doesn't match a back-reference")
MSG_DEF(JSMSG_BAD_BACKREF, 149, 0, JSEXN_SYNTAXERR, "back-reference exceeds number of capturing parentheses")
@ -352,3 +352,11 @@ MSG_DEF(JSMSG_FUNCTION_ARGUMENTS_AND_REST, 298, 0, JSEXN_ERR, "the 'arguments' p
MSG_DEF(JSMSG_REST_WITH_DEFAULT, 299, 0, JSEXN_SYNTAXERR, "rest parameter may not have a default")
MSG_DEF(JSMSG_NONDEFAULT_FORMAL_AFTER_DEFAULT, 300, 0, JSEXN_SYNTAXERR, "parameter(s) with default followed by parameter without default")
MSG_DEF(JSMSG_YIELD_IN_DEFAULT, 301, 0, JSEXN_SYNTAXERR, "yield in default expression")
MSG_DEF(JSMSG_INTRINSIC_NOT_DEFINED, 302, 1, JSEXN_REFERENCEERR, "no intrinsic function {0}")
MSG_DEF(JSMSG_ALREADY_HAS_SOURCEMAP, 303, 1, JSEXN_ERR, "{0} is being assigned a source map, yet already has one")
MSG_DEF(JSMSG_PAR_ARRAY_BAD_ARG, 304, 1, JSEXN_TYPEERR, "invalid ParallelArray{0} argument")
MSG_DEF(JSMSG_PAR_ARRAY_BAD_PARTITION, 305, 0, JSEXN_ERR, "argument must be divisible by outermost dimension")
MSG_DEF(JSMSG_PAR_ARRAY_REDUCE_EMPTY, 306, 0, JSEXN_ERR, "cannot reduce empty ParallelArray object")
MSG_DEF(JSMSG_PAR_ARRAY_ALREADY_FLAT, 307, 0, JSEXN_ERR, "cannot flatten 1-dimensional ParallelArray object")
MSG_DEF(JSMSG_PAR_ARRAY_SCATTER_CONFLICT, 308, 0, JSEXN_ERR, "no conflict resolution function provided")
MSG_DEF(JSMSG_PAR_ARRAY_SCATTER_BOUNDS, 309, 0, JSEXN_ERR, "index in scatter vector out of bounds")

View File

@ -15,9 +15,6 @@ namespace js {
class TempAllocPolicy;
/* Integral types for all hash functions. */
typedef uint32_t HashNumber;
/*****************************************************************************/
namespace detail {
@ -217,9 +214,6 @@ class HashTable : private AllocPolicy
* this operation until the next call to |popFront()|.
*/
void rekeyFront(const Lookup &l, const Key &k) {
JS_ASSERT(&k != &HashPolicy::getKey(this->cur->t));
if (match(*this->cur, l))
return;
typename HashTableEntry<T>::NonConstT t = this->cur->t;
HashPolicy::setKey(t, const_cast<Key &>(k));
table.remove(*this->cur);
@ -288,7 +282,6 @@ class HashTable : private AllocPolicy
static const uint8_t sMinAlphaFrac = 64; /* (0x100 * .25) taken from jsdhash.h */
static const uint8_t sMaxAlphaFrac = 192; /* (0x100 * .75) taken from jsdhash.h */
static const uint8_t sInvMaxAlpha = 171; /* (ceil(0x100 / .75) >> 1) */
static const HashNumber sGoldenRatio = 0x9E3779B9U; /* taken from jsdhash.h */
static const HashNumber sFreeKey = Entry::sFreeKey;
static const HashNumber sRemovedKey = Entry::sRemovedKey;
static const HashNumber sCollisionBit = Entry::sCollisionBit;
@ -308,10 +301,7 @@ class HashTable : private AllocPolicy
static HashNumber prepareHash(const Lookup& l)
{
HashNumber keyHash = HashPolicy::hash(l);
/* Improve keyHash distribution. */
keyHash *= sGoldenRatio;
HashNumber keyHash = ScrambleHashCode(HashPolicy::hash(l));
/* Avoid reserved hash codes. */
if (!isLiveHash(keyHash))
@ -1003,6 +993,9 @@ template <class Key,
class AllocPolicy = TempAllocPolicy>
class HashMap
{
typedef typename tl::StaticAssert<tl::IsRelocatableHeapType<Key>::result>::result keyAssert;
typedef typename tl::StaticAssert<tl::IsRelocatableHeapType<Value>::result>::result valAssert;
public:
typedef typename HashPolicy::Lookup Lookup;

View File

View File

@ -56,6 +56,7 @@ struct RuntimeSizes
, gcMarker(0)
, mathCache(0)
, scriptFilenames(0)
, scriptSources(0)
, compartmentObjects(0)
{}
@ -71,6 +72,7 @@ struct RuntimeSizes
size_t gcMarker;
size_t mathCache;
size_t scriptFilenames;
size_t scriptSources;
// This is the exception to the "RuntimeSizes doesn't measure things within
// compartments" rule. We combine the sizes of all the JSCompartment

View File

View File

@ -355,6 +355,26 @@ JS_FLOOR_LOG2W(size_t n)
return js_FloorLog2wImpl(n);
}
/*
* JS_ROTATE_LEFT32
*
* There is no rotate operation in the C Language so the construct (a << 4) |
* (a >> 28) is used instead. Most compilers convert this to a rotate
* instruction but some versions of MSVC don't without a little help. To get
* MSVC to generate a rotate instruction, we have to use the _rotl intrinsic
* and use a pragma to make _rotl inline.
*
* MSVC in VS2005 will do an inline rotate instruction on the above construct.
*/
#if defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_AMD64) || \
defined(_M_X64))
#include <stdlib.h>
#pragma intrinsic(_rotl)
#define JS_ROTATE_LEFT32(a, bits) _rotl(a, bits)
#else
#define JS_ROTATE_LEFT32(a, bits) (((a) << (bits)) | ((a) >> (32 - (bits))))
#endif
JS_END_EXTERN_C
#ifdef __cplusplus
@ -599,11 +619,18 @@ public:
class UnwantedForeground : public Foreground {
};
template <typename T>
struct ScopedDeletePtrTraits
template<typename T>
struct ScopedFreePtrTraits
{
typedef T* type;
static T* empty() { return NULL; }
static void release(T* ptr) { Foreground::free_(ptr); }
};
SCOPED_TEMPLATE(ScopedFreePtr, ScopedFreePtrTraits)
template <typename T>
struct ScopedDeletePtrTraits : public ScopedFreePtrTraits<T>
{
typedef T *type;
static T *empty() { return NULL; }
static void release(T *ptr) { Foreground::delete_(ptr); }
};
SCOPED_TEMPLATE(ScopedDeletePtr, ScopedDeletePtrTraits)
@ -829,20 +856,7 @@ class MoveRef {
explicit MoveRef(T &t) : pointer(&t) { }
T &operator*() const { return *pointer; }
T *operator->() const { return pointer; }
#if defined(__GXX_EXPERIMENTAL_CXX0X__) && defined(__clang__)
/*
* If MoveRef is used in a rvalue position (which is expected), we can
* end up in a situation where, without this ifdef, we would try to pass
* a T& to a move constructor, which fails. It is not clear if the compiler
* should instead use the copy constructor, but for now this lets us build
* with clang. See bug 689066 and llvm.org/pr11003 for the details.
* Note: We can probably remove MoveRef completely once we are comfortable
* using c++11.
*/
operator T&& () const { return static_cast<T&&>(*pointer); }
#else
operator T& () const { return *pointer; }
#endif
private:
T *pointer;
};
@ -895,6 +909,51 @@ RoundUpPow2(size_t x)
return size_t(1) << JS_CEILING_LOG2W(x);
}
/* Integral types for all hash functions. */
typedef uint32_t HashNumber;
const unsigned HashNumberSizeBits = 32;
namespace detail {
/*
* Given a raw hash code, h, return a number that can be used to select a hash
* bucket.
*
* This function aims to produce as uniform an output distribution as possible,
* especially in the most significant (leftmost) bits, even though the input
* distribution may be highly nonrandom, given the constraints that this must
* be deterministic and quick to compute.
*
* Since the leftmost bits of the result are best, the hash bucket index is
* computed by doing ScrambleHashCode(h) / (2^32/N) or the equivalent
* right-shift, not ScrambleHashCode(h) % N or the equivalent bit-mask.
*
* FIXME: OrderedHashTable uses a bit-mask; see bug 775896.
*/
inline HashNumber
ScrambleHashCode(HashNumber h)
{
/*
* Simply returning h would not cause any hash tables to produce wrong
* answers. But it can produce pathologically bad performance: The caller
* right-shifts the result, keeping only the highest bits. The high bits of
* hash codes are very often completely entropy-free. (So are the lowest
* bits.)
*
* So we use Fibonacci hashing, as described in Knuth, The Art of Computer
* Programming, 6.4. This mixes all the bits of the input hash code h.
*
* The value of goldenRatio is taken from the hex
* expansion of the golden ratio, which starts 1.9E3779B9....
* This value is especially good if values with consecutive hash codes
* are stored in a hash table; see Knuth for details.
*/
static const HashNumber goldenRatio = 0x9E3779B9U;
return h * goldenRatio;
}
} /* namespace detail */
} /* namespace js */
namespace JS {
@ -910,7 +969,7 @@ namespace JS {
* a live integer value.
*/
inline void PoisonPtr(uintptr_t *v)
inline void PoisonPtr(void *v)
{
#if defined(JSGC_ROOT_ANALYSIS) && defined(DEBUG)
uint8_t *ptr = (uint8_t *) v + 3;

View File

View File

View File

@ -1 +1 @@
37b6af08d1e6059f152ae515d8d7422a346cf7ed
8a03481ec145a3a0e532637dd52bf80605b7a713

View File

@ -12,12 +12,12 @@
#include "jsalloc.h"
#include "jsapi.h"
#include "jsprvtd.h"
#include "jshash.h"
#include "jspubtd.h"
#include "jslock.h"
#include "gc/Barrier.h"
#include "js/HashTable.h"
#include "mozilla/HashFunctions.h"
struct JSIdArray {
int length;
@ -83,23 +83,15 @@ JSID_TO_ATOM(jsid id)
return (JSAtom *)JSID_TO_STRING(id);
}
JS_STATIC_ASSERT(sizeof(JSHashNumber) == 4);
JS_STATIC_ASSERT(sizeof(js::HashNumber) == 4);
JS_STATIC_ASSERT(sizeof(jsid) == JS_BYTES_PER_WORD);
namespace js {
static JS_ALWAYS_INLINE JSHashNumber
static JS_ALWAYS_INLINE js::HashNumber
HashId(jsid id)
{
JSHashNumber n =
#if JS_BYTES_PER_WORD == 4
JSHashNumber(JSID_BITS(id));
#elif JS_BYTES_PER_WORD == 8
JSHashNumber(JSID_BITS(id)) ^ JSHashNumber(JSID_BITS(id) >> 32);
#else
# error "Unsupported configuration"
#endif
return n * JS_GOLDEN_RATIO;
return HashGeneric(JSID_BITS(id));
}
static JS_ALWAYS_INLINE Value
@ -135,15 +127,6 @@ struct DefaultHasher<jsid>
}
#if JS_BYTES_PER_WORD == 4
# define ATOM_HASH(atom) ((JSHashNumber)(atom) >> 2)
#elif JS_BYTES_PER_WORD == 8
# define ATOM_HASH(atom) (((JSHashNumber)(uintptr_t)(atom) >> 3) ^ \
(JSHashNumber)((uintptr_t)(atom) >> 32))
#else
# error "Unsupported configuration"
#endif
/*
* Return a printable, lossless char[] representation of a string-type atom.
* The lifetime of the result matches the lifetime of bytes.
@ -342,29 +325,28 @@ extern const char js_send_str[];
extern const char js_getter_str[];
extern const char js_setter_str[];
namespace js {
/*
* Initialize atom state. Return true on success, false on failure to allocate
* memory. The caller must zero rt->atomState before calling this function and
* only call it after js_InitGC successfully returns.
*/
extern JSBool
js_InitAtomState(JSRuntime *rt);
InitAtomState(JSRuntime *rt);
/*
* Free and clear atom state including any interned string atoms. This
* function must be called before js_FinishGC.
*/
extern void
js_FinishAtomState(JSRuntime *rt);
FinishAtomState(JSRuntime *rt);
/*
* Atom tracing and garbage collection hooks.
*/
namespace js {
extern void
MarkAtomState(JSTracer *trc, bool markAll);
MarkAtomState(JSTracer *trc);
extern void
SweepAtomState(JSRuntime *rt);
@ -382,58 +364,32 @@ enum InternBehavior
InternAtom = true
};
} /* namespace js */
extern JSAtom *
Atomize(JSContext *cx, const char *bytes, size_t length,
js::InternBehavior ib = js::DoNotInternAtom,
js::FlationCoding fc = js::NormalEncoding);
extern JSAtom *
js_Atomize(JSContext *cx, const char *bytes, size_t length,
js::InternBehavior ib = js::DoNotInternAtom,
js::FlationCoding fc = js::NormalEncoding);
AtomizeChars(JSContext *cx, const jschar *chars, size_t length,
js::InternBehavior ib = js::DoNotInternAtom);
extern JSAtom *
js_AtomizeChars(JSContext *cx, const jschar *chars, size_t length,
js::InternBehavior ib = js::DoNotInternAtom);
extern JSAtom *
js_AtomizeString(JSContext *cx, JSString *str, js::InternBehavior ib = js::DoNotInternAtom);
/*
* Return an existing atom for the given char array or null if the char
* sequence is currently not atomized.
*/
extern JSAtom *
js_GetExistingStringAtom(JSContext *cx, const jschar *chars, size_t length);
#ifdef DEBUG
extern JS_FRIEND_API(void)
js_DumpAtoms(JSContext *cx, FILE *fp);
#endif
namespace js {
AtomizeString(JSContext *cx, JSString *str, js::InternBehavior ib = js::DoNotInternAtom);
inline JSAtom *
ToAtom(JSContext *cx, const js::Value &v);
bool
InternNonIntElementId(JSContext *cx, JSObject *obj, const Value &idval,
jsid *idp, Value *vp);
jsid *idp, MutableHandleValue vp);
inline bool
InternNonIntElementId(JSContext *cx, JSObject *obj, const Value &idval, jsid *idp)
{
Value dummy;
RootedValue dummy(cx);
return InternNonIntElementId(cx, obj, idval, idp, &dummy);
}
/*
* For all unmapped atoms recorded in al, add a mapping from the atom's index
* to its address. map->length must already be set to the number of atoms in
* the list and map->vector must point to pre-allocated memory.
*/
extern void
InitAtomMap(JSContext *cx, AtomIndexMap *indices, HeapPtr<JSAtom> *atoms);
template<XDRMode mode>
bool
XDRAtom(XDRState<mode> *xdr, JSAtom **atomp);

View File

@ -43,6 +43,7 @@ DEFINE_ATOM(call, "call")
DEFINE_ATOM(callee, "callee")
DEFINE_ATOM(caller, "caller")
DEFINE_ATOM(classPrototype, "prototype")
DEFINE_ATOM(columnNumber, "columnNumber")
DEFINE_ATOM(constructor, "constructor")
DEFINE_ATOM(each, "each")
DEFINE_ATOM(eval, "eval")
@ -53,7 +54,8 @@ DEFINE_ATOM(ignoreCase, "ignoreCase")
DEFINE_ATOM(index, "index")
DEFINE_ATOM(input, "input")
DEFINE_ATOM(toISOString, "toISOString")
DEFINE_ATOM(iterator, "__iterator__")
DEFINE_ATOM(iterator, "iterator")
DEFINE_ATOM(iteratorIntrinsic, "__iterator__")
DEFINE_ATOM(join, "join")
DEFINE_ATOM(lastIndex, "lastIndex")
DEFINE_ATOM(length, "length")
@ -121,6 +123,7 @@ DEFINE_PROTOTYPE_ATOM(WeakMap)
DEFINE_ATOM(buffer, "buffer")
DEFINE_ATOM(byteLength, "byteLength")
DEFINE_ATOM(byteOffset, "byteOffset")
DEFINE_ATOM(shape, "shape")
DEFINE_KEYWORD_ATOM(return)
DEFINE_KEYWORD_ATOM(throw)
DEFINE_ATOM(url, "url")
@ -148,3 +151,4 @@ DEFINE_ATOM(unescape, "unescape")
DEFINE_ATOM(uneval, "uneval")
DEFINE_ATOM(unwatch, "unwatch")
DEFINE_ATOM(watch, "watch")
DEFINE_ATOM(_CallFunction, "_CallFunction")

View File

@ -161,35 +161,35 @@ typedef JSBool
(* LookupSpecialOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid,
MutableHandleObject objp, MutableHandleShape propp);
typedef JSBool
(* DefineGenericOp)(JSContext *cx, HandleObject obj, HandleId id, const Value *value,
(* DefineGenericOp)(JSContext *cx, HandleObject obj, HandleId id, HandleValue value,
PropertyOp getter, StrictPropertyOp setter, unsigned attrs);
typedef JSBool
(* DefinePropOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, const Value *value,
(* DefinePropOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, HandleValue value,
PropertyOp getter, StrictPropertyOp setter, unsigned attrs);
typedef JSBool
(* DefineElementOp)(JSContext *cx, HandleObject obj, uint32_t index, const Value *value,
(* DefineElementOp)(JSContext *cx, HandleObject obj, uint32_t index, HandleValue value,
PropertyOp getter, StrictPropertyOp setter, unsigned attrs);
typedef JSBool
(* DefineSpecialOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, const Value *value,
(* DefineSpecialOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, HandleValue value,
PropertyOp getter, StrictPropertyOp setter, unsigned attrs);
typedef JSBool
(* GenericIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandleId id, Value *vp);
(* GenericIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandleId id, MutableHandleValue vp);
typedef JSBool
(* PropertyIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandlePropertyName name, Value *vp);
(* PropertyIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandlePropertyName name, MutableHandleValue vp);
typedef JSBool
(* ElementIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, uint32_t index, Value *vp);
(* ElementIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, uint32_t index, MutableHandleValue vp);
typedef JSBool
(* ElementIfPresentOp)(JSContext *cx, HandleObject obj, HandleObject receiver, uint32_t index, Value *vp, bool* present);
(* ElementIfPresentOp)(JSContext *cx, HandleObject obj, HandleObject receiver, uint32_t index, MutableHandleValue vp, bool* present);
typedef JSBool
(* SpecialIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandleSpecialId sid, Value *vp);
(* SpecialIdOp)(JSContext *cx, HandleObject obj, HandleObject receiver, HandleSpecialId sid, MutableHandleValue vp);
typedef JSBool
(* StrictGenericIdOp)(JSContext *cx, HandleObject obj, HandleId id, Value *vp, JSBool strict);
(* StrictGenericIdOp)(JSContext *cx, HandleObject obj, HandleId id, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* StrictPropertyIdOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, Value *vp, JSBool strict);
(* StrictPropertyIdOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* StrictElementIdOp)(JSContext *cx, HandleObject obj, uint32_t index, Value *vp, JSBool strict);
(* StrictElementIdOp)(JSContext *cx, HandleObject obj, uint32_t index, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* StrictSpecialIdOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, Value *vp, JSBool strict);
(* StrictSpecialIdOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* GenericAttributesOp)(JSContext *cx, HandleObject obj, HandleId id, unsigned *attrsp);
typedef JSBool
@ -199,11 +199,11 @@ typedef JSBool
typedef JSBool
(* SpecialAttributesOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, unsigned *attrsp);
typedef JSBool
(* DeletePropertyOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, Value *vp, JSBool strict);
(* DeletePropertyOp)(JSContext *cx, HandleObject obj, HandlePropertyName name, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* DeleteElementOp)(JSContext *cx, HandleObject obj, uint32_t index, Value *vp, JSBool strict);
(* DeleteElementOp)(JSContext *cx, HandleObject obj, uint32_t index, MutableHandleValue vp, JSBool strict);
typedef JSBool
(* DeleteSpecialOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, Value *vp, JSBool strict);
(* DeleteSpecialOp)(JSContext *cx, HandleObject obj, HandleSpecialId sid, MutableHandleValue vp, JSBool strict);
typedef JSType
(* TypeOfOp)(JSContext *cx, HandleObject obj);
@ -257,9 +257,22 @@ struct ClassExtension
* WeakMaps use this to override the wrapper disposal optimization.
*/
bool isWrappedNative;
/*
* If an object is used as a key in a weakmap, it may be desirable for the
* garbage collector to keep that object around longer than it otherwise
* would. A common case is when the key is a wrapper around an object in
* another compartment, and we want to avoid collecting the wrapper (and
* removing the weakmap entry) as long as the wrapped object is alive. In
* that case, the wrapped object is returned by the wrapper's
* weakmapKeyDelegateOp hook. As long as the wrapper is used as a weakmap
* key, it will not be collected (and remain in the weakmap) until the
* wrapped object is collected.
*/
JSWeakmapKeyDelegateOp weakmapKeyDelegateOp;
};
#define JS_NULL_CLASS_EXT {NULL,NULL,NULL,NULL,NULL,false}
#define JS_NULL_CLASS_EXT {NULL,NULL,NULL,NULL,NULL,false,NULL}
struct ObjectOps
{

View File

View File

View File

@ -221,12 +221,6 @@ JS_GetFrameAnnotation(JSContext *cx, JSStackFrame *fp);
extern JS_PUBLIC_API(void)
JS_SetFrameAnnotation(JSContext *cx, JSStackFrame *fp, void *annotation);
extern JS_PUBLIC_API(JSPrincipals*)
JS_GetPrincipalIfDummyFrame(JSContext *cx, JSStackFrame *fpArg);
extern JS_PUBLIC_API(JSBool)
JS_IsScriptFrame(JSContext *cx, JSStackFrame *fp);
extern JS_PUBLIC_API(JSObject *)
JS_GetFrameScopeChain(JSContext *cx, JSStackFrame *fp);

View File

View File

@ -22,10 +22,10 @@ extern JS_FRIEND_API(JSString *)
JS_GetAnonymousString(JSRuntime *rt);
extern JS_FRIEND_API(JSObject *)
JS_FindCompilationScope(JSContext *cx, JSObject *obj);
JS_FindCompilationScope(JSContext *cx, JSRawObject obj);
extern JS_FRIEND_API(JSFunction *)
JS_GetObjectFunction(JSObject *obj);
JS_GetObjectFunction(JSRawObject obj);
extern JS_FRIEND_API(JSObject *)
JS_GetGlobalForFrame(JSStackFrame *fp);
@ -37,7 +37,7 @@ extern JS_FRIEND_API(JSObject *)
JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObject *parent);
extern JS_FRIEND_API(uint32_t)
JS_ObjectCountDynamicSlots(JSObject *obj);
JS_ObjectCountDynamicSlots(JSHandleObject obj);
extern JS_FRIEND_API(void)
JS_ShrinkGCBuffers(JSRuntime *rt);
@ -75,13 +75,18 @@ enum {
JS_TELEMETRY_GC_REASON,
JS_TELEMETRY_GC_IS_COMPARTMENTAL,
JS_TELEMETRY_GC_MS,
JS_TELEMETRY_GC_MAX_PAUSE_MS,
JS_TELEMETRY_GC_MARK_MS,
JS_TELEMETRY_GC_SWEEP_MS,
JS_TELEMETRY_GC_MARK_ROOTS_MS,
JS_TELEMETRY_GC_MARK_GRAY_MS,
JS_TELEMETRY_GC_SLICE_MS,
JS_TELEMETRY_GC_MMU_50,
JS_TELEMETRY_GC_RESET,
JS_TELEMETRY_GC_INCREMENTAL_DISABLED,
JS_TELEMETRY_GC_NON_INCREMENTAL
JS_TELEMETRY_GC_NON_INCREMENTAL,
JS_TELEMETRY_GC_SCC_SWEEP_TOTAL_MS,
JS_TELEMETRY_GC_SCC_SWEEP_MAX_PAUSE_MS
};
typedef void
@ -108,7 +113,7 @@ extern JS_FRIEND_API(JSObject *)
JS_CloneObject(JSContext *cx, JSObject *obj, JSObject *proto, JSObject *parent);
extern JS_FRIEND_API(JSBool)
js_GetterOnlyPropertyStub(JSContext *cx, JSHandleObject obj, JSHandleId id, JSBool strict, jsval *vp);
js_GetterOnlyPropertyStub(JSContext *cx, JSHandleObject obj, JSHandleId id, JSBool strict, JSMutableHandleValue vp);
JS_FRIEND_API(void)
js_ReportOverRecursed(JSContext *maybecx);
@ -159,6 +164,8 @@ struct JSFunctionSpecWithHelp {
#define JS_FN_HELP(name,call,nargs,flags,usage,help) \
{name, call, nargs, (flags) | JSPROP_ENUMERATE | JSFUN_STUB_GSOPS, usage, help}
#define JS_FS_HELP_END \
{NULL, NULL, 0, 0, NULL, NULL}
extern JS_FRIEND_API(bool)
JS_DefineFunctionsWithHelp(JSContext *cx, JSObject *obj, const JSFunctionSpecWithHelp *fs);
@ -169,6 +176,11 @@ JS_END_EXTERN_C
#ifdef __cplusplus
typedef bool (* JS_SourceHook)(JSContext *cx, JSScript *script, jschar **src, uint32_t *length);
extern JS_FRIEND_API(void)
JS_SetSourceHook(JSRuntime *rt, JS_SourceHook hook);
namespace js {
struct RuntimeFriendFields {
@ -213,7 +225,7 @@ class JS_FRIEND_API(AutoSwitchCompartment) {
public:
AutoSwitchCompartment(JSContext *cx, JSCompartment *newCompartment
JS_GUARD_OBJECT_NOTIFIER_PARAM);
AutoSwitchCompartment(JSContext *cx, JSObject *target JS_GUARD_OBJECT_NOTIFIER_PARAM);
AutoSwitchCompartment(JSContext *cx, JSHandleObject target JS_GUARD_OBJECT_NOTIFIER_PARAM);
~AutoSwitchCompartment();
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
@ -274,6 +286,9 @@ typedef void
extern JS_FRIEND_API(void)
VisitGrayWrapperTargets(JSCompartment *comp, GCThingCallback *callback, void *closure);
extern JS_FRIEND_API(JSObject *)
GetWeakmapKeyDelegate(JSObject *key);
/*
* Shadow declarations of JS internal structures, for access by inline access
* functions below. Do not use these structures in any other way. When adding
@ -318,6 +333,16 @@ struct Object {
}
};
struct Function {
Object base;
uint16_t nargs;
uint16_t flags;
/* Used only for natives */
Native native;
const JSJitInfo *jitinfo;
void *_1;
};
struct Atom {
size_t _;
const jschar *chars;
@ -339,35 +364,35 @@ extern JS_FRIEND_DATA(js::Class) XMLClass;
extern JS_FRIEND_DATA(js::Class) ObjectClass;
inline js::Class *
GetObjectClass(const JSObject *obj)
GetObjectClass(RawObject obj)
{
return reinterpret_cast<const shadow::Object*>(obj)->shape->base->clasp;
}
inline JSClass *
GetObjectJSClass(const JSObject *obj)
GetObjectJSClass(RawObject obj)
{
return js::Jsvalify(GetObjectClass(obj));
}
JS_FRIEND_API(bool)
IsScopeObject(JSObject *obj);
IsScopeObject(RawObject obj);
inline JSObject *
GetObjectParent(JSObject *obj)
GetObjectParent(RawObject obj)
{
JS_ASSERT(!IsScopeObject(obj));
return reinterpret_cast<shadow::Object*>(obj)->shape->base->parent;
}
JS_FRIEND_API(JSObject *)
GetObjectParentMaybeScope(JSObject *obj);
GetObjectParentMaybeScope(RawObject obj);
JS_FRIEND_API(JSObject *)
GetGlobalForObjectCrossCompartment(JSObject *obj);
GetGlobalForObjectCrossCompartment(RawObject obj);
JS_FRIEND_API(void)
NotifyAnimationActivity(JSObject *obj);
NotifyAnimationActivity(RawObject obj);
JS_FRIEND_API(bool)
IsOriginalScriptFunction(JSFunction *fun);
@ -391,19 +416,19 @@ InitClassWithReserved(JSContext *cx, JSObject *obj, JSObject *parent_proto,
JSPropertySpec *static_ps, JSFunctionSpec *static_fs);
JS_FRIEND_API(const Value &)
GetFunctionNativeReserved(JSObject *fun, size_t which);
GetFunctionNativeReserved(RawObject fun, size_t which);
JS_FRIEND_API(void)
SetFunctionNativeReserved(JSObject *fun, size_t which, const Value &val);
SetFunctionNativeReserved(RawObject fun, size_t which, const Value &val);
inline JSObject *
GetObjectProto(JSObject *obj)
GetObjectProto(RawObject obj)
{
return reinterpret_cast<const shadow::Object*>(obj)->type->proto;
}
inline void *
GetObjectPrivate(JSObject *obj)
GetObjectPrivate(RawObject obj)
{
const shadow::Object *nobj = reinterpret_cast<const shadow::Object*>(obj);
void **addr = reinterpret_cast<void**>(&nobj->fixedSlots()[nobj->numFixedSlots()]);
@ -415,17 +440,17 @@ GetObjectPrivate(JSObject *obj)
* within the maximum capacity for the object's fixed slots).
*/
inline const Value &
GetReservedSlot(const JSObject *obj, size_t slot)
GetReservedSlot(RawObject obj, size_t slot)
{
JS_ASSERT(slot < JSCLASS_RESERVED_SLOTS(GetObjectClass(obj)));
return reinterpret_cast<const shadow::Object *>(obj)->slotRef(slot);
}
JS_FRIEND_API(void)
SetReservedSlotWithBarrier(JSObject *obj, size_t slot, const Value &value);
SetReservedSlotWithBarrier(RawObject obj, size_t slot, const Value &value);
inline void
SetReservedSlot(JSObject *obj, size_t slot, const Value &value)
SetReservedSlot(RawObject obj, size_t slot, const Value &value)
{
JS_ASSERT(slot < JSCLASS_RESERVED_SLOTS(GetObjectClass(obj)));
shadow::Object *sobj = reinterpret_cast<shadow::Object *>(obj);
@ -436,22 +461,15 @@ SetReservedSlot(JSObject *obj, size_t slot, const Value &value)
}
JS_FRIEND_API(uint32_t)
GetObjectSlotSpan(JSObject *obj);
GetObjectSlotSpan(RawObject obj);
inline const Value &
GetObjectSlot(JSObject *obj, size_t slot)
GetObjectSlot(RawObject obj, size_t slot)
{
JS_ASSERT(slot < GetObjectSlotSpan(obj));
return reinterpret_cast<const shadow::Object *>(obj)->slotRef(slot);
}
inline Shape *
GetObjectShape(JSObject *obj)
{
shadow::Shape *shape = reinterpret_cast<const shadow::Object*>(obj)->shape;
return reinterpret_cast<Shape *>(shape);
}
inline const jschar *
GetAtomChars(JSAtom *atom)
{
@ -465,19 +483,19 @@ AtomToLinearString(JSAtom *atom)
}
static inline js::PropertyOp
CastAsJSPropertyOp(JSObject *object)
CastAsJSPropertyOp(RawObject object)
{
return JS_DATA_TO_FUNC_PTR(js::PropertyOp, object);
}
static inline js::StrictPropertyOp
CastAsJSStrictPropertyOp(JSObject *object)
CastAsJSStrictPropertyOp(RawObject object)
{
return JS_DATA_TO_FUNC_PTR(js::StrictPropertyOp, object);
}
JS_FRIEND_API(bool)
GetPropertyNames(JSContext *cx, JSObject *obj, unsigned flags, js::AutoIdVector *props);
GetPropertyNames(JSContext *cx, RawObject obj, unsigned flags, js::AutoIdVector *props);
JS_FRIEND_API(bool)
GetGeneric(JSContext *cx, JSObject *obj, JSObject *receiver, jsid id, Value *vp);
@ -489,7 +507,7 @@ JS_FRIEND_API(void)
SetPreserveWrapperCallback(JSRuntime *rt, PreserveWrapperCallback callback);
JS_FRIEND_API(bool)
IsObjectInContextCompartment(const JSObject *obj, const JSContext *cx);
IsObjectInContextCompartment(RawObject obj, const JSContext *cx);
/*
* NB: these flag bits are encoded into the bytecode stream in the immediate
@ -542,19 +560,66 @@ GetPCCountScriptContents(JSContext *cx, size_t script);
*
* For more detailed information, see vm/SPSProfiler.h
*/
struct ProfileEntry {
class ProfileEntry
{
/*
* These two fields are marked as 'volatile' so that the compiler doesn't
* re-order instructions which modify them. The operation in question is:
* All fields are marked volatile to prevent the compiler from re-ordering
* instructions. Namely this sequence:
*
* stack[i].string = str;
* (*size)++;
* entry[size] = ...;
* size++;
*
* If the size increment were re-ordered before the store of the string,
* then if sampling occurred there would be a bogus entry on the stack.
* If the size modification were somehow reordered before the stores, then
* if a sample were taken it would be examining bogus information.
*
* A ProfileEntry represents both a C++ profile entry and a JS one. Both use
* the string as a description, but JS uses the sp as NULL to indicate that
* it is a JS entry. The script_ is then only ever examined for a JS entry,
* and the idx is used by both, but with different meanings.
*/
const char * volatile string;
void * volatile sp;
const char * volatile string; // Descriptive string of this entry
void * volatile sp; // Relevant stack pointer for the entry
JSScript * volatile script_; // if js(), non-null script which is running
int32_t volatile idx; // if js(), idx of pc, otherwise line number
public:
/*
* All of these methods are marked with the 'volatile' keyword because SPS's
* representation of the stack is stored such that all ProfileEntry
* instances are volatile. These methods would not be available unless they
* were marked as volatile as well
*/
bool js() volatile {
JS_ASSERT_IF(sp == NULL, script_ != NULL);
return sp == NULL;
}
uint32_t line() volatile { JS_ASSERT(!js()); return idx; }
JSScript *script() volatile { JS_ASSERT(js()); return script_; }
void *stackAddress() volatile { return sp; }
const char *label() volatile { return string; }
void setLine(uint32_t line) volatile { JS_ASSERT(!js()); idx = line; }
void setLabel(const char *string) volatile { this->string = string; }
void setStackAddress(void *sp) volatile { this->sp = sp; }
void setScript(JSScript *script) volatile { script_ = script; }
/* we can't know the layout of JSScript, so look in vm/SPSProfiler.cpp */
JS_FRIEND_API(jsbytecode *) pc() volatile;
JS_FRIEND_API(void) setPC(jsbytecode *pc) volatile;
static size_t offsetOfString() { return offsetof(ProfileEntry, string); }
static size_t offsetOfStackAddress() { return offsetof(ProfileEntry, sp); }
static size_t offsetOfPCIdx() { return offsetof(ProfileEntry, idx); }
static size_t offsetOfScript() { return offsetof(ProfileEntry, script_); }
/*
* The index used in the entry can either be a line number or the offset of
* a pc into a script's code. To signify a NULL pc, use a -1 index. This is
* checked against in pc() and setPC() to set/get the right pc.
*/
static const int32_t NullPCIndex = -1;
};
JS_FRIEND_API(void)
@ -564,6 +629,9 @@ SetRuntimeProfilingStack(JSRuntime *rt, ProfileEntry *stack, uint32_t *size,
JS_FRIEND_API(void)
EnableRuntimeProfilingStack(JSRuntime *rt, bool enabled);
JS_FRIEND_API(jsbytecode*)
ProfilingGetPC(JSRuntime *rt, JSScript *script, void *ip);
#ifdef JS_THREADSAFE
JS_FRIEND_API(void *)
GetOwnerThread(const JSContext *cx);
@ -624,6 +692,7 @@ SizeOfJSContext();
D(DEBUG_GC) \
D(DEBUG_MODE_GC) \
D(TRANSPLANT) \
D(RESET) \
\
/* Reasons from Firefox */ \
D(DOM_WINDOW_UTILS) \
@ -695,7 +764,7 @@ extern JS_FRIEND_API(void)
ShrinkingGC(JSRuntime *rt, gcreason::Reason reason);
extern JS_FRIEND_API(void)
IncrementalGC(JSRuntime *rt, gcreason::Reason reason);
IncrementalGC(JSRuntime *rt, gcreason::Reason reason, int64_t millis = 0);
extern JS_FRIEND_API(void)
FinishIncrementalGC(JSRuntime *rt, gcreason::Reason reason);
@ -733,6 +802,30 @@ typedef void
extern JS_FRIEND_API(GCSliceCallback)
SetGCSliceCallback(JSRuntime *rt, GCSliceCallback callback);
typedef void
(* AnalysisPurgeCallback)(JSRuntime *rt, JSFlatString *desc);
extern JS_FRIEND_API(AnalysisPurgeCallback)
SetAnalysisPurgeCallback(JSRuntime *rt, AnalysisPurgeCallback callback);
/* Was the most recent GC run incrementally? */
extern JS_FRIEND_API(bool)
WasIncrementalGC(JSRuntime *rt);
typedef JSBool
(* DOMInstanceClassMatchesProto)(JSHandleObject protoObject, uint32_t protoID,
uint32_t depth);
struct JSDOMCallbacks {
DOMInstanceClassMatchesProto instanceClassMatchesProto;
};
typedef struct JSDOMCallbacks DOMCallbacks;
extern JS_FRIEND_API(void)
SetDOMCallbacks(JSRuntime *rt, const DOMCallbacks *callbacks);
extern JS_FRIEND_API(const DOMCallbacks *)
GetDOMCallbacks(JSRuntime *rt);
/*
* Signals a good place to do an incremental slice, because the browser is
* drawing a frame.
@ -753,7 +846,7 @@ extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeeded(JSContext *cx);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnObject(JSObject *obj);
IsIncrementalBarrierNeededOnObject(RawObject obj);
extern JS_FRIEND_API(bool)
IsIncrementalBarrierNeededOnScript(JSScript *obj);
@ -821,21 +914,68 @@ CastToJSFreeOp(FreeOp *fop)
/* Implemented in jsexn.cpp. */
/*
* Get an error type name from a number.
* If no exception is associated, return NULL.
* Get an error type name from a JSExnType constant.
* Returns NULL for invalid arguments and JSEXN_INTERNALERR
*/
extern JS_FRIEND_API(const jschar*)
GetErrorTypeNameFromNumber(JSContext* cx, const unsigned errorNumber);
GetErrorTypeName(JSContext* cx, int16_t exnType);
/* Implemented in jswrapper.cpp. */
typedef enum NukedGlobalHandling {
NukeForGlobalObject,
DontNukeForGlobalObject
} NukedGlobalHandling;
typedef enum NukeReferencesToWindow {
NukeWindowReferences,
DontNukeWindowReferences
} NukeReferencesToWindow;
/*
* These filters are designed to be ephemeral stack classes, and thus don't
* do any rooting or holding of their members.
*/
struct CompartmentFilter {
virtual bool match(JSCompartment *c) const = 0;
};
struct AllCompartments : public CompartmentFilter {
virtual bool match(JSCompartment *c) const { return true; }
};
struct ContentCompartmentsOnly : public CompartmentFilter {
virtual bool match(JSCompartment *c) const {
return !IsSystemCompartment(c);
}
};
struct ChromeCompartmentsOnly : public CompartmentFilter {
virtual bool match(JSCompartment *c) const {
return IsSystemCompartment(c);
}
};
struct SingleCompartment : public CompartmentFilter {
JSCompartment *ours;
SingleCompartment(JSCompartment *c) : ours(c) {}
virtual bool match(JSCompartment *c) const { return c == ours; }
};
struct CompartmentsWithPrincipals : public CompartmentFilter {
JSPrincipals *principals;
CompartmentsWithPrincipals(JSPrincipals *p) : principals(p) {}
virtual bool match(JSCompartment *c) const {
return JS_GetCompartmentPrincipals(c) == principals;
}
};
extern JS_FRIEND_API(JSBool)
NukeChromeCrossCompartmentWrappersForGlobal(JSContext *cx, JSObject *obj,
NukedGlobalHandling nukeGlobal);
NukeCrossCompartmentWrappers(JSContext* cx,
const CompartmentFilter& sourceFilter,
const CompartmentFilter& targetFilter,
NukeReferencesToWindow nukeReferencesToWindow);
/* Specify information about ListBase proxies in the DOM, for use by ICs. */
JS_FRIEND_API(void)
SetListBaseInformation(void *listBaseHandlerFamily, uint32_t listBaseExpandoSlot);
void *GetListBaseHandlerFamily();
uint32_t GetListBaseExpandoSlot();
} /* namespace js */
@ -851,7 +991,7 @@ extern JS_FRIEND_API(JSBool)
js_DateIsValid(JSContext *cx, JSObject* obj);
extern JS_FRIEND_API(double)
js_DateGetMsecSinceEpoch(JSContext *cx, JSObject *obj);
js_DateGetMsecSinceEpoch(JSContext *cx, JSRawObject obj);
/* Implemented in jscntxt.cpp. */
@ -1043,6 +1183,35 @@ JS_IsFloat32Array(JSObject *obj, JSContext *cx);
extern JS_FRIEND_API(JSBool)
JS_IsFloat64Array(JSObject *obj, JSContext *cx);
/*
* Unwrap Typed arrays all at once. Return NULL without throwing if the object
* cannot be viewed as the correct typed array, or the typed array object on
* success, filling both outparameters.
*/
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsInt8Array(JSContext *cx, JSObject *obj, uint32_t *length, int8_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsUint8Array(JSContext *cx, JSObject *obj, uint32_t *length, uint8_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsUint8ClampedArray(JSContext *cx, JSObject *obj, uint32_t *length, uint8_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsInt16Array(JSContext *cx, JSObject *obj, uint32_t *length, int16_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsUint16Array(JSContext *cx, JSObject *obj, uint32_t *length, uint16_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsInt32Array(JSContext *cx, JSObject *obj, uint32_t *length, int32_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsUint32Array(JSContext *cx, JSObject *obj, uint32_t *length, uint32_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsFloat32Array(JSContext *cx, JSObject *obj, uint32_t *length, float **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsFloat64Array(JSContext *cx, JSObject *obj, uint32_t *length, double **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsArrayBufferView(JSContext *cx, JSObject *obj, uint32_t *length, uint8_t **data);
extern JS_FRIEND_API(JSObject *)
JS_GetObjectAsArrayBuffer(JSContext *cx, JSObject *obj, uint32_t *length, uint8_t **data);
/*
* Get the type of elements in a typed array.
*
@ -1213,4 +1382,42 @@ JS_GetDataViewByteLength(JSObject *obj, JSContext *cx);
JS_FRIEND_API(void *)
JS_GetDataViewData(JSObject *obj, JSContext *cx);
#ifdef __cplusplus
/*
* This struct contains metadata passed from the DOM to the JS Engine for JIT
* optimizations on DOM property accessors. Eventually, this should be made
* available to general JSAPI users, but we are not currently ready to do so.
*/
typedef bool
(* JSJitPropertyOp)(JSContext *cx, JSHandleObject thisObj,
void *specializedThis, JS::Value *vp);
typedef bool
(* JSJitMethodOp)(JSContext *cx, JSHandleObject thisObj,
void *specializedThis, unsigned argc, JS::Value *vp);
struct JSJitInfo {
JSJitPropertyOp op;
uint32_t protoID;
uint32_t depth;
bool isInfallible; /* Is op fallible? Getters only */
bool isConstant; /* Getting a construction-time constant? */
};
static JS_ALWAYS_INLINE const JSJitInfo *
FUNCTION_VALUE_TO_JITINFO(const JS::Value& v)
{
JS_ASSERT(js::GetObjectClass(&v.toObject()) == &js::FunctionClass);
return reinterpret_cast<js::shadow::Function *>(&v.toObject())->jitinfo;
}
static JS_ALWAYS_INLINE void
SET_JITINFO(JSFunction * func, const JSJitInfo *info)
{
js::shadow::Function *fun = reinterpret_cast<js::shadow::Function *>(func);
/* JS_ASSERT(func->isNative()). 0x4000 is JSFUN_INTERPRETED */
JS_ASSERT(!(fun->flags & 0x4000));
fun->jitinfo = info;
}
#endif /* __cplusplus */
#endif /* jsfriendapi_h___ */

178
scripting/javascript/spidermonkey-win32/include/jsgc.h Normal file → Executable file
View File

@ -41,6 +41,7 @@ namespace js {
class GCHelperThread;
struct Shape;
struct SliceBudget;
namespace gc {
@ -48,6 +49,8 @@ enum State {
NO_INCREMENTAL,
MARK_ROOTS,
MARK,
SWEEP,
SWEEP_END,
INVALID
};
@ -112,36 +115,100 @@ MapAllocToTraceKind(AllocKind thingKind)
return map[thingKind];
}
static inline bool
IsNurseryAllocable(AllocKind kind)
{
JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT);
static const bool map[FINALIZE_LIMIT] = {
false, /* FINALIZE_OBJECT0 */
true, /* FINALIZE_OBJECT0_BACKGROUND */
false, /* FINALIZE_OBJECT2 */
true, /* FINALIZE_OBJECT2_BACKGROUND */
false, /* FINALIZE_OBJECT4 */
true, /* FINALIZE_OBJECT4_BACKGROUND */
false, /* FINALIZE_OBJECT8 */
true, /* FINALIZE_OBJECT8_BACKGROUND */
false, /* FINALIZE_OBJECT12 */
true, /* FINALIZE_OBJECT12_BACKGROUND */
false, /* FINALIZE_OBJECT16 */
true, /* FINALIZE_OBJECT16_BACKGROUND */
false, /* FINALIZE_SCRIPT */
false, /* FINALIZE_SHAPE */
false, /* FINALIZE_BASE_SHAPE */
false, /* FINALIZE_TYPE_OBJECT */
#if JS_HAS_XML_SUPPORT
false, /* FINALIZE_XML */
#endif
true, /* FINALIZE_SHORT_STRING */
true, /* FINALIZE_STRING */
false /* FINALIZE_EXTERNAL_STRING */
};
return map[kind];
}
static inline bool
IsBackgroundFinalized(AllocKind kind)
{
JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT);
static const bool map[FINALIZE_LIMIT] = {
false, /* FINALIZE_OBJECT0 */
true, /* FINALIZE_OBJECT0_BACKGROUND */
false, /* FINALIZE_OBJECT2 */
true, /* FINALIZE_OBJECT2_BACKGROUND */
false, /* FINALIZE_OBJECT4 */
true, /* FINALIZE_OBJECT4_BACKGROUND */
false, /* FINALIZE_OBJECT8 */
true, /* FINALIZE_OBJECT8_BACKGROUND */
false, /* FINALIZE_OBJECT12 */
true, /* FINALIZE_OBJECT12_BACKGROUND */
false, /* FINALIZE_OBJECT16 */
true, /* FINALIZE_OBJECT16_BACKGROUND */
false, /* FINALIZE_SCRIPT */
false, /* FINALIZE_SHAPE */
false, /* FINALIZE_BASE_SHAPE */
false, /* FINALIZE_TYPE_OBJECT */
#if JS_HAS_XML_SUPPORT
false, /* FINALIZE_XML */
#endif
true, /* FINALIZE_SHORT_STRING */
true, /* FINALIZE_STRING */
false /* FINALIZE_EXTERNAL_STRING */
};
return map[kind];
}
inline JSGCTraceKind
GetGCThingTraceKind(const void *thing);
/*
* ArenaList::head points to the start of the list. Normally cursor points
* to the first arena in the list with some free things and all arenas
* before cursor are fully allocated. However, as the arena currently being
* allocated from is considered full while its list of free spans is moved
* into the freeList, during the GC or cell enumeration, when an
* unallocated freeList is moved back to the arena, we can see an arena
* with some free cells before the cursor. The cursor is an indirect
* pointer to allow for efficient list insertion at the cursor point and
* other list manipulations.
*/
struct ArenaList {
ArenaHeader *head;
ArenaHeader **cursor;
ArenaList() {
clear();
}
void clear() {
head = NULL;
cursor = &head;
}
void insert(ArenaHeader *arena);
};
struct ArenaLists {
/*
* ArenaList::head points to the start of the list. Normally cursor points
* to the first arena in the list with some free things and all arenas
* before cursor are fully allocated. However, as the arena currently being
* allocated from is considered full while its list of free spans is moved
* into the freeList, during the GC or cell enumeration, when an
* unallocated freeList is moved back to the arena, we can see an arena
* with some free cells before the cursor. The cursor is an indirect
* pointer to allow for efficient list insertion at the cursor point and
* other list manipulations.
*/
struct ArenaList {
ArenaHeader *head;
ArenaHeader **cursor;
ArenaList() {
clear();
}
void clear() {
head = NULL;
cursor = &head;
}
};
private:
/*
* For each arena kind its free list is represented as the first span with
@ -180,12 +247,18 @@ struct ArenaLists {
volatile uintptr_t backgroundFinalizeState[FINALIZE_LIMIT];
public:
/* For each arena kind, a list of arenas remaining to be swept. */
ArenaHeader *arenaListsToSweep[FINALIZE_LIMIT];
public:
ArenaLists() {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
freeLists[i].initAsEmpty();
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
backgroundFinalizeState[i] = BFS_DONE;
for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
arenaListsToSweep[i] = NULL;
}
~ArenaLists() {
@ -211,6 +284,10 @@ struct ArenaLists {
return arenaLists[thingKind].head;
}
ArenaHeader *getFirstArenaToSweep(AllocKind thingKind) const {
return arenaListsToSweep[thingKind];
}
bool arenaListsAreEmpty() const {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
/*
@ -225,6 +302,10 @@ struct ArenaLists {
return true;
}
bool arenasAreFull(AllocKind thingKind) const {
return !*arenaLists[thingKind].cursor;
}
void unmarkAll() {
for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
/* The background finalization must have stopped at this point. */
@ -238,7 +319,8 @@ struct ArenaLists {
}
bool doneBackgroundFinalize(AllocKind kind) const {
return backgroundFinalizeState[kind] == BFS_DONE;
return backgroundFinalizeState[kind] == BFS_DONE ||
backgroundFinalizeState[kind] == BFS_JUST_FINISHED;
}
/*
@ -333,16 +415,18 @@ struct ArenaLists {
JS_ASSERT(freeLists[kind].isEmpty());
}
void finalizeObjects(FreeOp *fop);
void finalizeStrings(FreeOp *fop);
void finalizeShapes(FreeOp *fop);
void finalizeScripts(FreeOp *fop);
void queueObjectsForSweep(FreeOp *fop);
void queueStringsForSweep(FreeOp *fop);
void queueShapesForSweep(FreeOp *fop);
void queueScriptsForSweep(FreeOp *fop);
static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead);
bool foregroundFinalize(FreeOp *fop, AllocKind thingKind, SliceBudget &sliceBudget);
static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead, bool onBackgroundThread);
private:
inline void finalizeNow(FreeOp *fop, AllocKind thingKind);
inline void finalizeLater(FreeOp *fop, AllocKind thingKind);
inline void queueForForegroundSweep(FreeOp *fop, AllocKind thingKind);
inline void queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind);
inline void *allocateFromArena(JSCompartment *comp, AllocKind thingKind);
};
@ -478,7 +562,7 @@ extern void
GC(JSRuntime *rt, JSGCInvocationKind gckind, js::gcreason::Reason reason);
extern void
GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, js::gcreason::Reason reason);
GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, js::gcreason::Reason reason, int64_t millis = 0);
extern void
GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, js::gcreason::Reason reason);
@ -539,8 +623,6 @@ class GCHelperThread {
void **freeCursor;
void **freeCursorEnd;
Vector<js::gc::ArenaHeader *, 64, js::SystemAllocPolicy> finalizeVector;
bool backgroundAllocation;
friend struct js::gc::ArenaLists;
@ -584,10 +666,10 @@ class GCHelperThread {
/* Must be called with the GC lock taken. */
void startBackgroundShrink();
/* Must be called with the GC lock taken. */
/* Must be called without the GC lock taken. */
void waitBackgroundSweepEnd();
/* Must be called with the GC lock taken. */
/* Must be called without the GC lock taken. */
void waitBackgroundSweepOrAllocEnd();
/* Must be called with the GC lock taken. */
@ -625,9 +707,6 @@ class GCHelperThread {
else
replenishAndFreeLater(ptr);
}
/* Must be called with the GC lock taken. */
bool prepareForBackgroundSweep();
};
@ -1071,22 +1150,33 @@ RunDebugGC(JSContext *cx);
void
SetDeterministicGC(JSContext *cx, bool enabled);
void
SetValidateGC(JSContext *cx, bool enabled);
const int ZealPokeValue = 1;
const int ZealAllocValue = 2;
const int ZealFrameGCValue = 3;
const int ZealVerifierValue = 4;
const int ZealFrameVerifierValue = 5;
const int ZealVerifierPreValue = 4;
const int ZealFrameVerifierPreValue = 5;
const int ZealStackRootingSafeValue = 6;
const int ZealStackRootingValue = 7;
const int ZealIncrementalRootsThenFinish = 8;
const int ZealIncrementalMarkAllThenFinish = 9;
const int ZealIncrementalMultipleSlices = 10;
const int ZealVerifierPostValue = 11;
const int ZealFrameVerifierPostValue = 12;
const int ZealPurgeAnalysisValue = 13;
enum VerifierType {
PreBarrierVerifier,
PostBarrierVerifier
};
#ifdef JS_GC_ZEAL
/* Check that write barriers have been used correctly. See jsgc.cpp. */
void
VerifyBarriers(JSRuntime *rt);
VerifyBarriers(JSRuntime *rt, VerifierType type);
void
MaybeVerifyBarriers(JSContext *cx, bool always = false);
@ -1094,7 +1184,7 @@ MaybeVerifyBarriers(JSContext *cx, bool always = false);
#else
static inline void
VerifyBarriers(JSRuntime *rt)
VerifyBarriers(JSRuntime *rt, VerifierType type)
{
}

View File

@ -1,120 +0,0 @@
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef jshash_h___
#define jshash_h___
/*
* API to portable hash table code.
*/
#include <stddef.h>
#include <stdio.h>
#include "jstypes.h"
JS_BEGIN_EXTERN_C
typedef uint32_t JSHashNumber;
typedef struct JSHashEntry JSHashEntry;
typedef struct JSHashTable JSHashTable;
#define JS_HASH_BITS 32
#define JS_GOLDEN_RATIO 0x9E3779B9U
typedef JSHashNumber (* JSHashFunction)(const void *key);
typedef int (* JSHashComparator)(const void *v1, const void *v2);
typedef int (* JSHashEnumerator)(JSHashEntry *he, int i, void *arg);
/* Flag bits in JSHashEnumerator's return value */
#define HT_ENUMERATE_NEXT 0 /* continue enumerating entries */
#define HT_ENUMERATE_STOP 1 /* stop enumerating entries */
#define HT_ENUMERATE_REMOVE 2 /* remove and free the current entry */
typedef struct JSHashAllocOps {
void * (*allocTable)(void *pool, size_t size);
void (*freeTable)(void *pool, void *item, size_t size);
JSHashEntry * (*allocEntry)(void *pool, const void *key);
void (*freeEntry)(void *pool, JSHashEntry *he, unsigned flag);
} JSHashAllocOps;
#define HT_FREE_VALUE 0 /* just free the entry's value */
#define HT_FREE_ENTRY 1 /* free value and entire entry */
struct JSHashEntry {
JSHashEntry *next; /* hash chain linkage */
JSHashNumber keyHash; /* key hash function result */
const void *key; /* ptr to opaque key */
void *value; /* ptr to opaque value */
};
struct JSHashTable {
JSHashEntry **buckets; /* vector of hash buckets */
uint32_t nentries; /* number of entries in table */
uint32_t shift; /* multiplicative hash shift */
JSHashFunction keyHash; /* key hash function */
JSHashComparator keyCompare; /* key comparison function */
JSHashComparator valueCompare; /* value comparison function */
JSHashAllocOps *allocOps; /* allocation operations */
void *allocPriv; /* allocation private data */
#ifdef JS_HASHMETER
uint32_t nlookups; /* total number of lookups */
uint32_t nsteps; /* number of hash chains traversed */
uint32_t ngrows; /* number of table expansions */
uint32_t nshrinks; /* number of table contractions */
#endif
};
/*
* Create a new hash table.
* If allocOps is null, use default allocator ops built on top of malloc().
*/
extern JS_PUBLIC_API(JSHashTable *)
JS_NewHashTable(uint32_t n, JSHashFunction keyHash,
JSHashComparator keyCompare, JSHashComparator valueCompare,
JSHashAllocOps *allocOps, void *allocPriv);
extern JS_PUBLIC_API(void)
JS_HashTableDestroy(JSHashTable *ht);
/* Low level access methods */
extern JS_PUBLIC_API(JSHashEntry **)
JS_HashTableRawLookup(JSHashTable *ht, JSHashNumber keyHash, const void *key);
#ifdef __cplusplus
extern JS_PUBLIC_API(JSHashEntry *)
JS_HashTableRawAdd(JSHashTable *ht, JSHashEntry **&hep, JSHashNumber keyHash,
const void *key, void *value);
#endif
extern JS_PUBLIC_API(void)
JS_HashTableRawRemove(JSHashTable *ht, JSHashEntry **hep, JSHashEntry *he);
/* Higher level access methods */
extern JS_PUBLIC_API(JSHashEntry *)
JS_HashTableAdd(JSHashTable *ht, const void *key, void *value);
extern JS_PUBLIC_API(JSBool)
JS_HashTableRemove(JSHashTable *ht, const void *key);
extern JS_PUBLIC_API(int)
JS_HashTableEnumerateEntries(JSHashTable *ht, JSHashEnumerator f, void *arg);
extern JS_PUBLIC_API(void *)
JS_HashTableLookup(JSHashTable *ht, const void *key);
extern JS_PUBLIC_API(int)
JS_HashTableDump(JSHashTable *ht, JSHashEnumerator dump, FILE *fp);
/* General-purpose C string hash function. */
extern JS_PUBLIC_API(JSHashNumber)
JS_HashString(const void *key);
/* Stub function just returns v1 == v2 */
extern JS_PUBLIC_API(int)
JS_CompareValues(const void *v1, const void *v2);
JS_END_EXTERN_C
#endif /* jshash_h___ */

View File

@ -16,10 +16,10 @@
# include "prthread.h"
# include "prinit.h"
# define JS_ATOMIC_INCREMENT(p) PR_ATOMIC_INCREMENT((PRInt32 *)(p))
# define JS_ATOMIC_DECREMENT(p) PR_ATOMIC_DECREMENT((PRInt32 *)(p))
# define JS_ATOMIC_ADD(p,v) PR_ATOMIC_ADD((PRInt32 *)(p), (PRInt32)(v))
# define JS_ATOMIC_SET(p,v) PR_ATOMIC_SET((PRInt32 *)(p), (PRInt32)(v))
# define JS_ATOMIC_INCREMENT(p) PR_ATOMIC_INCREMENT((int32_t *)(p))
# define JS_ATOMIC_DECREMENT(p) PR_ATOMIC_DECREMENT((int32_t *)(p))
# define JS_ATOMIC_ADD(p,v) PR_ATOMIC_ADD((int32_t *)(p), (int32_t)(v))
# define JS_ATOMIC_SET(p,v) PR_ATOMIC_SET((int32_t *)(p), (int32_t)(v))
#else /* JS_THREADSAFE */

7
scripting/javascript/spidermonkey-win32/include/json.h Normal file → Executable file
View File

@ -7,6 +7,7 @@
#include "jsprvtd.h"
#include "jspubtd.h"
#include "jsapi.h"
#include "js/Vector.h"
@ -18,7 +19,7 @@ js_InitJSONClass(JSContext *cx, JSObject *obj);
extern JSBool
js_Stringify(JSContext *cx, js::MutableHandleValue vp,
JSObject *replacer, js::Value space,
JSObject *replacer, js::Value space,
js::StringBuffer &sb);
// Avoid build errors on certain platforms that define these names as constants
@ -37,8 +38,8 @@ enum DecodingMode { STRICT, LEGACY };
namespace js {
extern JS_FRIEND_API(JSBool)
ParseJSONWithReviver(JSContext *cx, const jschar *chars, size_t length, const Value &filter,
Value *vp, DecodingMode decodingMode = STRICT);
ParseJSONWithReviver(JSContext *cx, const jschar *chars, size_t length, HandleValue filter,
MutableHandleValue vp, DecodingMode decodingMode = STRICT);
} /* namespace js */

View File

@ -115,7 +115,7 @@ class JS_FRIEND_API(PerfMeasurement)
* global object). The JS-visible API is identical to the C++ API.
*/
extern JS_FRIEND_API(JSObject*)
RegisterPerfMeasurement(JSContext *cx, JSObject *global);
RegisterPerfMeasurement(JSContext *cx, JSRawObject global);
/*
* Given a jsval which contains an instance of the aforementioned

View File

View File

@ -64,6 +64,7 @@ JS_PROTO(WeakMap, 36, js_InitWeakMapClass)
JS_PROTO(Map, 37, js_InitMapClass)
JS_PROTO(Set, 38, js_InitSetClass)
JS_PROTO(DataView, 39, js_InitTypedArrayClasses)
JS_PROTO(ParallelArray, 40, js_InitParallelArrayClass)
#undef XML_INIT
#undef NAMESPACE_INIT

View File

@ -13,7 +13,7 @@
namespace js {
class Wrapper;
class JS_FRIEND_API(Wrapper);
/*
* A proxy is a JSObject that implements generic behavior by providing custom
@ -48,10 +48,19 @@ class Wrapper;
*/
class JS_FRIEND_API(BaseProxyHandler) {
void *mFamily;
bool mHasPrototype;
protected:
// Subclasses may set this in their constructor.
void setHasPrototype(bool hasPrototype) { mHasPrototype = hasPrototype; };
public:
explicit BaseProxyHandler(void *family);
virtual ~BaseProxyHandler();
bool hasPrototype() {
return mHasPrototype;
}
inline void *family() {
return mFamily;
}
@ -102,7 +111,7 @@ class JS_FRIEND_API(BaseProxyHandler) {
/* Spidermonkey extensions. */
virtual bool call(JSContext *cx, JSObject *proxy, unsigned argc, Value *vp);
virtual bool construct(JSContext *cx, JSObject *proxy, unsigned argc, Value *argv, Value *rval);
virtual bool nativeCall(JSContext *cx, JSObject *proxy, Class *clasp, Native native, CallArgs args);
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl, CallArgs args);
virtual bool hasInstance(JSContext *cx, JSObject *proxy, const Value *vp, bool *bp);
virtual JSType typeOf(JSContext *cx, JSObject *proxy);
virtual bool objectClassIs(JSObject *obj, ESClassValue classValue, JSContext *cx);
@ -114,6 +123,10 @@ class JS_FRIEND_API(BaseProxyHandler) {
virtual void finalize(JSFreeOp *fop, JSObject *proxy);
virtual bool getElementIfPresent(JSContext *cx, JSObject *obj, JSObject *receiver,
uint32_t index, Value *vp, bool *present);
virtual bool getPrototypeOf(JSContext *cx, JSObject *proxy, JSObject **proto);
/* See comment for weakmapKeyDelegateOp in jsclass.h. */
virtual JSObject *weakmapKeyDelegate(JSObject *proxy);
};
/*
@ -150,8 +163,8 @@ class JS_PUBLIC_API(IndirectProxyHandler) : public BaseProxyHandler {
Value *vp) MOZ_OVERRIDE;
virtual bool construct(JSContext *cx, JSObject *proxy, unsigned argc,
Value *argv, Value *rval) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, JSObject *proxy, Class *clasp,
Native native, CallArgs args) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl,
CallArgs args) MOZ_OVERRIDE;
virtual bool hasInstance(JSContext *cx, JSObject *proxy, const Value *vp,
bool *bp) MOZ_OVERRIDE;
virtual JSType typeOf(JSContext *cx, JSObject *proxy) MOZ_OVERRIDE;
@ -166,6 +179,7 @@ class JS_PUBLIC_API(IndirectProxyHandler) : public BaseProxyHandler {
Value *vp) MOZ_OVERRIDE;
virtual bool iteratorNext(JSContext *cx, JSObject *proxy,
Value *vp) MOZ_OVERRIDE;
virtual JSObject *weakmapKeyDelegate(JSObject *proxy);
};
/*
@ -215,18 +229,18 @@ class Proxy {
/* ES5 Harmony derived proxy traps. */
static bool has(JSContext *cx, JSObject *proxy, jsid id, bool *bp);
static bool hasOwn(JSContext *cx, JSObject *proxy, jsid id, bool *bp);
static bool get(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id, Value *vp);
static bool getElementIfPresent(JSContext *cx, JSObject *proxy, JSObject *receiver,
uint32_t index, Value *vp, bool *present);
static bool set(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id, bool strict,
Value *vp);
static bool get(JSContext *cx, HandleObject proxy, HandleObject receiver, HandleId id, MutableHandleValue vp);
static bool getElementIfPresent(JSContext *cx, HandleObject proxy, HandleObject receiver,
uint32_t index, MutableHandleValue vp, bool *present);
static bool set(JSContext *cx, HandleObject proxy, HandleObject receiver, HandleId id, bool strict,
MutableHandleValue vp);
static bool keys(JSContext *cx, JSObject *proxy, AutoIdVector &props);
static bool iterate(JSContext *cx, JSObject *proxy, unsigned flags, Value *vp);
static bool iterate(JSContext *cx, HandleObject proxy, unsigned flags, MutableHandleValue vp);
/* Spidermonkey extensions. */
static bool call(JSContext *cx, JSObject *proxy, unsigned argc, Value *vp);
static bool construct(JSContext *cx, JSObject *proxy, unsigned argc, Value *argv, Value *rval);
static bool nativeCall(JSContext *cx, JSObject *proxy, Class *clasp, Native native, CallArgs args);
static bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl, CallArgs args);
static bool hasInstance(JSContext *cx, JSObject *proxy, const Value *vp, bool *bp);
static JSType typeOf(JSContext *cx, JSObject *proxy);
static bool objectClassIs(JSObject *obj, ESClassValue classValue, JSContext *cx);
@ -247,17 +261,17 @@ inline bool IsFunctionProxyClass(const Class *clasp)
return clasp == &js::FunctionProxyClass;
}
inline bool IsObjectProxy(const JSObject *obj)
inline bool IsObjectProxy(RawObject obj)
{
return IsObjectProxyClass(GetObjectClass(obj));
}
inline bool IsFunctionProxy(const JSObject *obj)
inline bool IsFunctionProxy(RawObject obj)
{
return IsFunctionProxyClass(GetObjectClass(obj));
}
inline bool IsProxy(const JSObject *obj)
inline bool IsProxy(RawObject obj)
{
Class *clasp = GetObjectClass(obj);
return IsObjectProxyClass(clasp) || IsFunctionProxyClass(clasp);
@ -272,56 +286,56 @@ const uint32_t JSSLOT_PROXY_CALL = 4;
const uint32_t JSSLOT_PROXY_CONSTRUCT = 5;
inline BaseProxyHandler *
GetProxyHandler(const JSObject *obj)
GetProxyHandler(RawObject obj)
{
JS_ASSERT(IsProxy(obj));
return (BaseProxyHandler *) GetReservedSlot(obj, JSSLOT_PROXY_HANDLER).toPrivate();
}
inline const Value &
GetProxyPrivate(const JSObject *obj)
GetProxyPrivate(RawObject obj)
{
JS_ASSERT(IsProxy(obj));
return GetReservedSlot(obj, JSSLOT_PROXY_PRIVATE);
}
inline JSObject *
GetProxyTargetObject(const JSObject *obj)
GetProxyTargetObject(RawObject obj)
{
JS_ASSERT(IsProxy(obj));
return GetProxyPrivate(obj).toObjectOrNull();
}
inline const Value &
GetProxyCall(const JSObject *obj)
GetProxyCall(RawObject obj)
{
JS_ASSERT(IsFunctionProxy(obj));
return GetReservedSlot(obj, JSSLOT_PROXY_CALL);
}
inline const Value &
GetProxyExtra(const JSObject *obj, size_t n)
GetProxyExtra(RawObject obj, size_t n)
{
JS_ASSERT(IsProxy(obj));
return GetReservedSlot(obj, JSSLOT_PROXY_EXTRA + n);
}
inline void
SetProxyHandler(JSObject *obj, BaseProxyHandler *handler)
SetProxyHandler(RawObject obj, BaseProxyHandler *handler)
{
JS_ASSERT(IsProxy(obj));
SetReservedSlot(obj, JSSLOT_PROXY_HANDLER, PrivateValue(handler));
}
inline void
SetProxyPrivate(JSObject *obj, const Value &value)
SetProxyPrivate(RawObject obj, const Value &value)
{
JS_ASSERT(IsProxy(obj));
SetReservedSlot(obj, JSSLOT_PROXY_PRIVATE, value);
}
inline void
SetProxyExtra(JSObject *obj, size_t n, const Value &extra)
SetProxyExtra(RawObject obj, size_t n, const Value &extra)
{
JS_ASSERT(IsProxy(obj));
JS_ASSERT(n <= 1);

View File

@ -82,7 +82,6 @@ class JSExtensibleString;
class JSExternalString;
class JSLinearString;
class JSFixedString;
class JSStaticAtom;
class JSRope;
class JSAtom;
class JSWrapper;
@ -131,24 +130,10 @@ class StackSpace;
class ContextStack;
class ScriptFrameIter;
struct BytecodeEmitter;
struct Definition;
struct FunctionBox;
struct ObjectBox;
struct ParseNode;
struct Parser;
struct SharedContext;
class TokenStream;
struct Token;
struct TokenPos;
struct TokenPtr;
struct TreeContext;
class UpvarCookie;
class Proxy;
class BaseProxyHandler;
class DirectWrapper;
class CrossCompartmentWrapper;
class JS_FRIEND_API(BaseProxyHandler);
class JS_FRIEND_API(DirectWrapper);
class JS_FRIEND_API(CrossCompartmentWrapper);
class TempAllocPolicy;
class RuntimeAllocPolicy;
@ -172,13 +157,6 @@ class Bindings;
struct StackBaseShape;
struct StackShape;
class MultiDeclRange;
class ParseMapPool;
class DefinitionList;
typedef InlineMap<JSAtom *, Definition *, 24> AtomDefnMap;
typedef InlineMap<JSAtom *, jsatomid, 24> AtomIndexMap;
typedef Vector<UpvarCookie, 8> UpvarCookies;
class Breakpoint;
class BreakpointSite;
class Debugger;
@ -197,6 +175,22 @@ typedef JSPropertyOp PropertyOp;
typedef JSStrictPropertyOp StrictPropertyOp;
typedef JSPropertyDescriptor PropertyDescriptor;
namespace frontend {
struct BytecodeEmitter;
struct Definition;
struct FunctionBox;
struct ObjectBox;
struct Token;
struct TokenPos;
struct TokenPtr;
class TokenStream;
struct Parser;
class ParseMapPool;
struct ParseNode;
} /* namespace frontend */
namespace analyze {
struct LifetimeVariable;

View File

@ -240,6 +240,21 @@ enum ThingRootKind
THING_ROOT_LIMIT
};
template <typename T>
struct RootKind;
/*
* Specifically mark the ThingRootKind of externally visible types, so that
* JSAPI users may use JSRooted... types without having the class definition
* available.
*/
template <> struct RootKind<JSObject *> { static ThingRootKind rootKind() { return THING_ROOT_OBJECT; }; };
template <> struct RootKind<JSFunction *> { static ThingRootKind rootKind() { return THING_ROOT_OBJECT; }; };
template <> struct RootKind<JSString *> { static ThingRootKind rootKind() { return THING_ROOT_STRING; }; };
template <> struct RootKind<JSScript *> { static ThingRootKind rootKind() { return THING_ROOT_SCRIPT; }; };
template <> struct RootKind<jsid> { static ThingRootKind rootKind() { return THING_ROOT_ID; }; };
template <> struct RootKind<Value> { static ThingRootKind rootKind() { return THING_ROOT_VALUE; }; };
struct ContextFriendFields {
JSRuntime *const runtime;

View File

@ -147,8 +147,6 @@
***********************************************************************/
#define JS_HOWMANY(x,y) (((x)+(y)-1)/(y))
#define JS_ROUNDUP(x,y) (JS_HOWMANY(x,y)*(y))
#define JS_MIN(x,y) ((x)<(y)?(x):(y))
#define JS_MAX(x,y) ((x)>(y)?(x):(y))
#include "jscpucfg.h"

View File

@ -15,6 +15,10 @@
#include "js/Utility.h"
#ifdef USE_ZLIB
#include "zlib.h"
#endif
/* Forward declarations. */
struct JSContext;
@ -335,41 +339,43 @@ ClearAllBitArrayElements(size_t *array, size_t length)
array[i] = 0;
}
} /* namespace js */
#endif /* __cplusplus */
#ifdef USE_ZLIB
class Compressor
{
/* Number of bytes we should hand to zlib each compressMore() call. */
static const size_t CHUNKSIZE = 2048;
z_stream zs;
const unsigned char *inp;
size_t inplen;
public:
Compressor(const unsigned char *inp, size_t inplen, unsigned char *out)
: inp(inp),
inplen(inplen)
{
JS_ASSERT(inplen > 0);
zs.opaque = NULL;
zs.next_in = (Bytef *)inp;
zs.avail_in = 0;
zs.next_out = out;
zs.avail_out = inplen;
}
bool init();
/* Compress some of the input. Return true if it should be called again. */
bool compressMore();
/* Finalize compression. Return the length of the compressed input. */
size_t finish();
};
/*
* JS_ROTATE_LEFT32
*
* There is no rotate operation in the C Language so the construct (a << 4) |
* (a >> 28) is used instead. Most compilers convert this to a rotate
* instruction but some versions of MSVC don't without a little help. To get
* MSVC to generate a rotate instruction, we have to use the _rotl intrinsic
* and use a pragma to make _rotl inline.
*
* MSVC in VS2005 will do an inline rotate instruction on the above construct.
* Decompress a string. The caller must know the length of the output and
* allocate |out| to a string of that length.
*/
#if defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_AMD64) || \
defined(_M_X64))
#include <stdlib.h>
#pragma intrinsic(_rotl)
#define JS_ROTATE_LEFT32(a, bits) _rotl(a, bits)
#else
#define JS_ROTATE_LEFT32(a, bits) (((a) << (bits)) | ((a) >> (32 - (bits))))
bool DecompressString(const unsigned char *inp, size_t inplen,
unsigned char *out, size_t outlen);
#endif
/* Static control-flow checks. */
#ifdef NS_STATIC_CHECKING
/* Trigger a control flow check to make sure that code flows through label */
inline __attribute__ ((unused)) void MUST_FLOW_THROUGH(const char *label) {}
/* Avoid unused goto-label warnings. */
# define MUST_FLOW_LABEL(label) goto label; label:
#else
# define MUST_FLOW_THROUGH(label) ((void) 0)
# define MUST_FLOW_LABEL(label)
#endif
} /* namespace js */
#endif /* __cplusplus */
/* Crash diagnostics */
#ifdef DEBUG

View File

@ -218,6 +218,7 @@ typedef enum JSWhyMagic
JS_OVERWRITTEN_CALLEE, /* arguments.callee has been overwritten */
JS_FORWARD_TO_CALL_OBJECT, /* args object element stored in call object */
JS_BLOCK_NEEDS_CLONE, /* value of static block object slot */
JS_HASH_KEY_EMPTY, /* see class js::HashableValue */
JS_GENERIC_MAGIC /* for local use */
} JSWhyMagic;

View File

View File

@ -64,9 +64,9 @@ class JS_FRIEND_API(Wrapper)
static JSObject *New(JSContext *cx, JSObject *obj, JSObject *proto,
JSObject *parent, Wrapper *handler);
static Wrapper *wrapperHandler(const JSObject *wrapper);
static Wrapper *wrapperHandler(RawObject wrapper);
static JSObject *wrappedObject(const JSObject *wrapper);
static JSObject *wrappedObject(RawObject wrapper);
explicit Wrapper(unsigned flags);
@ -166,7 +166,7 @@ class JS_FRIEND_API(IndirectWrapper) : public Wrapper,
class JS_FRIEND_API(DirectWrapper) : public Wrapper, public DirectProxyHandler
{
public:
explicit DirectWrapper(unsigned flags);
explicit DirectWrapper(unsigned flags, bool hasPrototype = false);
virtual ~DirectWrapper();
@ -206,7 +206,8 @@ class JS_FRIEND_API(DirectWrapper) : public Wrapper, public DirectProxyHandler
/* Spidermonkey extensions. */
virtual bool call(JSContext *cx, JSObject *wrapper, unsigned argc, Value *vp) MOZ_OVERRIDE;
virtual bool construct(JSContext *cx, JSObject *wrapper, unsigned argc, Value *argv, Value *rval) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, JSObject *wrapper, Class *clasp, Native native, CallArgs args) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl,
CallArgs args) MOZ_OVERRIDE;
virtual bool hasInstance(JSContext *cx, JSObject *wrapper, const Value *vp, bool *bp) MOZ_OVERRIDE;
virtual JSString *obj_toString(JSContext *cx, JSObject *wrapper) MOZ_OVERRIDE;
virtual JSString *fun_toString(JSContext *cx, JSObject *wrapper, unsigned indent) MOZ_OVERRIDE;
@ -214,6 +215,7 @@ class JS_FRIEND_API(DirectWrapper) : public Wrapper, public DirectProxyHandler
Value *vp) MOZ_OVERRIDE;
static DirectWrapper singleton;
static DirectWrapper singletonWithPrototype;
static void *getWrapperFamily();
};
@ -222,7 +224,7 @@ class JS_FRIEND_API(DirectWrapper) : public Wrapper, public DirectProxyHandler
class JS_FRIEND_API(CrossCompartmentWrapper) : public DirectWrapper
{
public:
CrossCompartmentWrapper(unsigned flags);
CrossCompartmentWrapper(unsigned flags, bool hasPrototype = false);
virtual ~CrossCompartmentWrapper();
@ -249,7 +251,8 @@ class JS_FRIEND_API(CrossCompartmentWrapper) : public DirectWrapper
/* Spidermonkey extensions. */
virtual bool call(JSContext *cx, JSObject *wrapper, unsigned argc, Value *vp) MOZ_OVERRIDE;
virtual bool construct(JSContext *cx, JSObject *wrapper, unsigned argc, Value *argv, Value *rval) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, JSObject *wrapper, Class *clasp, Native native, CallArgs args) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl,
CallArgs args) MOZ_OVERRIDE;
virtual bool hasInstance(JSContext *cx, JSObject *wrapper, const Value *vp, bool *bp) MOZ_OVERRIDE;
virtual JSString *obj_toString(JSContext *cx, JSObject *wrapper) MOZ_OVERRIDE;
virtual JSString *fun_toString(JSContext *cx, JSObject *wrapper, unsigned indent) MOZ_OVERRIDE;
@ -258,6 +261,7 @@ class JS_FRIEND_API(CrossCompartmentWrapper) : public DirectWrapper
virtual bool iteratorNext(JSContext *cx, JSObject *wrapper, Value *vp);
static CrossCompartmentWrapper singleton;
static CrossCompartmentWrapper singletonWithPrototype;
};
/*
@ -275,7 +279,8 @@ class JS_FRIEND_API(SecurityWrapper) : public Base
public:
SecurityWrapper(unsigned flags);
virtual bool nativeCall(JSContext *cx, JSObject *wrapper, Class *clasp, Native native, CallArgs args) MOZ_OVERRIDE;
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl,
CallArgs args) MOZ_OVERRIDE;
virtual bool objectClassIs(JSObject *obj, ESClassValue classValue, JSContext *cx) MOZ_OVERRIDE;
virtual bool regexp_toShared(JSContext *cx, JSObject *proxy, RegExpGuard *g) MOZ_OVERRIDE;
};
@ -283,25 +288,6 @@ class JS_FRIEND_API(SecurityWrapper) : public Base
typedef SecurityWrapper<DirectWrapper> SameCompartmentSecurityWrapper;
typedef SecurityWrapper<CrossCompartmentWrapper> CrossCompartmentSecurityWrapper;
/*
* A hacky class that lets a friend force a fake frame. We must already be
* in the compartment of |target| when we enter the forced frame.
*/
class JS_FRIEND_API(ForceFrame)
{
public:
JSContext * const context;
JSObject * const target;
private:
DummyFrameGuard *frame;
public:
ForceFrame(JSContext *cx, JSObject *target);
~ForceFrame();
bool enter();
};
class JS_FRIEND_API(DeadObjectProxy) : public BaseProxyHandler
{
public:
@ -323,7 +309,8 @@ class JS_FRIEND_API(DeadObjectProxy) : public BaseProxyHandler
/* Spidermonkey extensions. */
virtual bool call(JSContext *cx, JSObject *proxy, unsigned argc, Value *vp);
virtual bool construct(JSContext *cx, JSObject *proxy, unsigned argc, Value *argv, Value *rval);
virtual bool nativeCall(JSContext *cx, JSObject *proxy, Class *clasp, Native native, CallArgs args);
virtual bool nativeCall(JSContext *cx, IsAcceptableThis test, NativeImpl impl,
CallArgs args) MOZ_OVERRIDE;
virtual bool hasInstance(JSContext *cx, JSObject *proxy, const Value *vp, bool *bp);
virtual bool objectClassIs(JSObject *obj, ESClassValue classValue, JSContext *cx);
virtual JSString *obj_toString(JSContext *cx, JSObject *proxy);
@ -347,7 +334,7 @@ TransparentObjectWrapper(JSContext *cx, JSObject *obj, JSObject *wrappedProto, J
extern JS_FRIEND_DATA(int) sWrapperFamily;
inline bool
IsWrapper(const JSObject *obj)
IsWrapper(RawObject obj)
{
return IsProxy(obj) && GetProxyHandler(obj)->family() == &sWrapperFamily;
}
@ -366,8 +353,13 @@ UnwrapObject(JSObject *obj, bool stopAtOuter = true, unsigned *flagsp = NULL);
JS_FRIEND_API(JSObject *)
UnwrapObjectChecked(JSContext *cx, JSObject *obj);
// Unwrap only the outermost security wrapper, with the same semantics as
// above. This is the checked version of Wrapper::wrappedObject.
JS_FRIEND_API(JSObject *)
UnwrapOneChecked(JSContext *cx, JSObject *obj);
JS_FRIEND_API(bool)
IsCrossCompartmentWrapper(const JSObject *obj);
IsCrossCompartmentWrapper(RawObject obj);
JSObject *
NewDeadProxyObject(JSContext *cx, JSObject *parent);
@ -384,37 +376,6 @@ RemapAllWrappersForObject(JSContext *cx, JSObject *oldTarget,
// API to recompute all cross-compartment wrappers whose source and target
// match the given filters.
//
// These filters are designed to be ephemeral stack classes, and thus don't
// do any rooting or holding of their members.
struct CompartmentFilter {
virtual bool match(JSCompartment *c) const = 0;
};
struct AllCompartments : public CompartmentFilter {
virtual bool match(JSCompartment *c) const { return true; }
};
struct ContentCompartmentsOnly : public CompartmentFilter {
virtual bool match(JSCompartment *c) const {
return !IsSystemCompartment(c);
}
};
struct SingleCompartment : public CompartmentFilter {
JSCompartment *ours;
SingleCompartment(JSCompartment *c) : ours(c) {}
virtual bool match(JSCompartment *c) const { return c == ours; }
};
struct CompartmentsWithPrincipals : public CompartmentFilter {
JSPrincipals *principals;
CompartmentsWithPrincipals(JSPrincipals *p) : principals(p) {}
virtual bool match(JSCompartment *c) const {
return JS_GetCompartmentPrincipals(c) == principals;
}
};
JS_FRIEND_API(bool)
RecomputeWrappers(JSContext *cx, const CompartmentFilter &sourceFilter,
const CompartmentFilter &targetFilter);

View File

View File

@ -70,6 +70,10 @@
# define MOZ_HAVE_CXX11_OVERRIDE
# define MOZ_HAVE_CXX11_FINAL final
# endif
# if __has_extension(cxx_strong_enums)
# define MOZ_HAVE_CXX11_ENUM_TYPE
# define MOZ_HAVE_CXX11_STRONG_ENUMS
# endif
# if __has_attribute(noinline)
# define MOZ_HAVE_NEVER_INLINE __attribute__((noinline))
# endif
@ -89,6 +93,8 @@
# endif
# if __GNUC_MINOR__ >= 4
# define MOZ_HAVE_CXX11_DELETE
# define MOZ_HAVE_CXX11_ENUM_TYPE
# define MOZ_HAVE_CXX11_STRONG_ENUMS
# endif
# endif
# else
@ -108,6 +114,10 @@
# define MOZ_HAVE_CXX11_OVERRIDE
/* MSVC currently spells "final" as "sealed". */
# define MOZ_HAVE_CXX11_FINAL sealed
# define MOZ_HAVE_CXX11_ENUM_TYPE
# endif
# if _MSC_VER >= 1700
# define MOZ_HAVE_CXX11_STRONG_ENUMS
# endif
# define MOZ_HAVE_NEVER_INLINE __declspec(noinline)
# define MOZ_HAVE_NORETURN __declspec(noreturn)
@ -298,6 +308,167 @@
# define MOZ_FINAL /* no support */
#endif
/**
* MOZ_ENUM_TYPE specifies the underlying numeric type for an enum. It's
* specified by placing MOZ_ENUM_TYPE(type) immediately after the enum name in
* its declaration, and before the opening curly brace, like
*
* enum MyEnum MOZ_ENUM_TYPE(uint16_t)
* {
* A,
* B = 7,
* C
* };
*
* In supporting compilers, the macro will expand to ": uint16_t". The
* compiler will allocate exactly two bytes for MyEnum, and will require all
* enumerators to have values between 0 and 65535. (Thus specifying "B =
* 100000" instead of "B = 7" would fail to compile.) In old compilers, the
* macro expands to the empty string, and the underlying type is generally
* undefined.
*/
#ifdef MOZ_HAVE_CXX11_ENUM_TYPE
# define MOZ_ENUM_TYPE(type) : type
#else
# define MOZ_ENUM_TYPE(type) /* no support */
#endif
/**
* MOZ_BEGIN_ENUM_CLASS and MOZ_END_ENUM_CLASS provide access to the
* strongly-typed enumeration feature of C++11 ("enum class"). If supported
* by the compiler, an enum defined using these macros will not be implicitly
* converted to any other type, and its enumerators will be scoped using the
* enumeration name. Place MOZ_BEGIN_ENUM_CLASS(EnumName, type) in place of
* "enum EnumName {", and MOZ_END_ENUM_CLASS(EnumName) in place of the closing
* "};". For example,
*
* MOZ_BEGIN_ENUM_CLASS(Enum, int32_t)
* A, B = 6
* MOZ_END_ENUM_CLASS(Enum)
*
* This will make "Enum::A" and "Enum::B" appear in the global scope, but "A"
* and "B" will not. In compilers that support C++11 strongly-typed
* enumerations, implicit conversions of Enum values to numeric types will
* fail. In other compilers, Enum itself will actually be defined as a class,
* and some implicit conversions will fail while others will succeed.
*
* The type argument specifies the underlying type for the enum where
* supported, as with MOZ_ENUM_TYPE(). For simplicity, it is currently
* mandatory. As with MOZ_ENUM_TYPE(), it will do nothing on compilers that do
* not support it.
*/
#if defined(MOZ_HAVE_CXX11_STRONG_ENUMS)
/* All compilers that support strong enums also support an explicit
* underlying type, so no extra check is needed */
# define MOZ_BEGIN_ENUM_CLASS(Name, type) enum class Name : type {
# define MOZ_END_ENUM_CLASS(Name) };
#else
/**
* We need Name to both name a type, and scope the provided enumerator
* names. Namespaces and classes both provide scoping, but namespaces
* aren't types, so we need to use a class that wraps the enum values. We
* have an implicit conversion from the inner enum type to the class, so
* statements like
*
* Enum x = Enum::A;
*
* will still work. We need to define an implicit conversion from the class
* to the inner enum as well, so that (for instance) switch statements will
* work. This means that the class can be implicitly converted to a numeric
* value as well via the enum type, since C++ allows an implicit
* user-defined conversion followed by a standard conversion to still be
* implicit.
*
* We have an explicit constructor from int defined, so that casts like
* (Enum)7 will still work. We also have a zero-argument constructor with
* no arguments, so declaration without initialization (like "Enum foo;")
* will work.
*
* Additionally, we'll delete as many operators as possible for the inner
* enum type, so statements like this will still fail:
*
* f(5 + Enum::B); // deleted operator+
*
* But we can't prevent things like this, because C++ doesn't allow
* overriding conversions or assignment operators for enums:
*
* int x = Enum::A;
* int f()
* {
* return Enum::A;
* }
*/
# define MOZ_BEGIN_ENUM_CLASS(Name, type) \
class Name \
{ \
public: \
enum Enum MOZ_ENUM_TYPE(type) \
{
# define MOZ_END_ENUM_CLASS(Name) \
}; \
Name() {} \
Name(Enum aEnum) : mEnum(aEnum) {} \
explicit Name(int num) : mEnum((Enum)num) {} \
operator Enum() const { return mEnum; } \
private: \
Enum mEnum; \
}; \
inline int operator+(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator+(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator-(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator-(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator*(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator*(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator/(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator/(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator%(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator%(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator+(const Name::Enum&) MOZ_DELETE; \
inline int operator-(const Name::Enum&) MOZ_DELETE; \
inline int& operator++(Name::Enum&) MOZ_DELETE; \
inline int operator++(Name::Enum&, int) MOZ_DELETE; \
inline int& operator--(Name::Enum&) MOZ_DELETE; \
inline int operator--(Name::Enum&, int) MOZ_DELETE; \
inline bool operator==(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator==(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator!=(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator!=(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator>(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator>(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator<(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator<(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator>=(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator>=(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator<=(const int&, const Name::Enum&) MOZ_DELETE; \
inline bool operator<=(const Name::Enum&, const int&) MOZ_DELETE; \
inline bool operator!(const Name::Enum&) MOZ_DELETE; \
inline bool operator&&(const bool&, const Name::Enum&) MOZ_DELETE; \
inline bool operator&&(const Name::Enum&, const bool&) MOZ_DELETE; \
inline bool operator||(const bool&, const Name::Enum&) MOZ_DELETE; \
inline bool operator||(const Name::Enum&, const bool&) MOZ_DELETE; \
inline int operator~(const Name::Enum&) MOZ_DELETE; \
inline int operator&(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator&(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator|(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator|(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator^(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator^(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator<<(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator<<(const Name::Enum&, const int&) MOZ_DELETE; \
inline int operator>>(const int&, const Name::Enum&) MOZ_DELETE; \
inline int operator>>(const Name::Enum&, const int&) MOZ_DELETE; \
inline int& operator+=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator-=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator*=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator/=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator%=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator&=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator|=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator^=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator<<=(int&, const Name::Enum&) MOZ_DELETE; \
inline int& operator>>=(int&, const Name::Enum&) MOZ_DELETE;
#endif
/**
* MOZ_WARN_UNUSED_RESULT tells the compiler to emit a warning if a function's
* return value is not used by the caller.

View File

View File

View File

@ -0,0 +1,15 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* mfbt math constants. */
#ifndef mozilla_Constants_h_
#define mozilla_Constants_h_
#ifndef M_PI
# define M_PI 3.14159265358979323846
#endif
#endif /* mozilla_Constants_h_ */

View File

View File

@ -179,6 +179,14 @@ AddToHash(uint32_t hash, A* a)
return detail::AddUintptrToHash<sizeof(uintptr_t)>(hash, uintptr_t(a));
}
template<>
MOZ_WARN_UNUSED_RESULT
inline uint32_t
AddToHash(uint32_t hash, uintptr_t a)
{
return detail::AddUintptrToHash<sizeof(uintptr_t)>(hash, a);
}
template<typename A, typename B>
MOZ_WARN_UNUSED_RESULT
uint32_t

View File

View File

View File

View File

@ -0,0 +1,47 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* mfbt maths algorithms. */
#ifndef mozilla_MathAlgorithms_h_
#define mozilla_MathAlgorithms_h_
#include "mozilla/Assertions.h"
namespace mozilla {
// Greatest Common Divisor
template<typename IntegerType>
MOZ_ALWAYS_INLINE IntegerType
EuclidGCD(IntegerType a, IntegerType b)
{
// Euclid's algorithm; O(N) in the worst case. (There are better
// ways, but we don't need them for the current use of this algo.)
MOZ_ASSERT(a > 0);
MOZ_ASSERT(b > 0);
while (a != b) {
if (a > b) {
a = a - b;
} else {
b = b - a;
}
}
return a;
}
// Least Common Multiple
template<typename IntegerType>
MOZ_ALWAYS_INLINE IntegerType
EuclidLCM(IntegerType a, IntegerType b)
{
// Divide first to reduce overflow risk.
return (a / EuclidGCD(a, b)) * b;
}
} /* namespace mozilla */
#endif /* mozilla_MathAlgorithms_h_ */

View File

@ -0,0 +1,46 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* Implements a workaround for compilers which do not support the C++11 nullptr
* constant.
*/
#ifndef mozilla_NullPtr_h_
#define mozilla_NullPtr_h_
#if defined(__clang__)
# ifndef __has_extension
# define __has_extension __has_feature
# endif
# if __has_extension(cxx_nullptr)
# define MOZ_HAVE_CXX11_NULLPTR
# endif
#elif defined(__GNUC__)
# if defined(_GXX_EXPERIMENTAL_CXX0X__) || __cplusplus >= 201103L
# if (__GNUC__ * 1000 + __GNU_MINOR__) >= 4006
# define MOZ_HAVE_CXX11_NULLPTR
# endif
# endif
#elif _MSC_VER >= 1600
# define MOZ_HAVE_CXX11_NULLPTR
#endif
/**
* Use C++11 nullptr if available; otherwise use __null for gcc, or a 0 literal
* with the correct size to match the size of a pointer on a given platform.
*/
#ifndef MOZ_HAVE_CXX11_NULLPTR
# if defined(__GNUC__)
# define nullptr __null
# elif defined(_WIN64)
# define nullptr 0LL
# else
# define nullptr 0L
# endif
#endif
#endif /* mozilla_NullPtr_h_ */

View File

View File

View File

@ -0,0 +1,46 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* Simple class for computing SHA1. */
/*
* To compute the SHA1 of a buffer using this class you should write something
* like:
* void SHA1(const uint8_t* buf, unsigned size, uint8_t hash[20])
* {
* SHA1Sum S;
* S.update(buf, size);
* S.finish(hash);
* }
* If there are multiple buffers or chunks, the update method can be called
* multiple times and the SHA1 is computed on the concatenation of all the
* buffers passed to it.
* The finish method may only be called once and cannot be followed by calls
* to update.
*/
#ifndef mozilla_SHA1_h_
#define mozilla_SHA1_h_
#include "mozilla/StandardInteger.h"
namespace mozilla {
class SHA1Sum {
union {
uint32_t w[16]; /* input buffer */
uint8_t b[64];
} u;
uint64_t size; /* count of hashed bytes. */
unsigned H[22]; /* 5 state variables, 16 tmp values, 1 extra */
bool mDone;
public:
static const unsigned int HashSize = 20;
SHA1Sum();
void update(const uint8_t *dataIn, uint32_t len);
void finish(uint8_t hashout[20]);
};
}
#endif /* mozilla_SHA1_h_ */

View File

View File

View File

View File

View File

View File

@ -0,0 +1,139 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
/* Weak pointer functionality, implemented as a mixin for use with any class. */
/**
* SupportsWeakPtr lets you have a pointer to an object 'Foo' without affecting
* its lifetime. It works by creating a single shared reference counted object
* (WeakReference) that each WeakPtr will access 'Foo' through. This lets 'Foo'
* clear the pointer in the WeakReference without having to know about all of
* the WeakPtrs to it and allows the WeakReference to live beyond the lifetime
* of 'Foo'.
*
* The overhead of WeakPtr is that accesses to 'Foo' becomes an additional
* dereference, and an additional heap allocated pointer sized object shared
* between all of the WeakPtrs.
*
* Example of usage:
*
* // To have a class C support weak pointers, inherit from SupportsWeakPtr<C>.
* class C : public SupportsWeakPtr<C>
* {
* public:
* int num;
* void act();
* };
*
* C* ptr = new C();
*
* // Get weak pointers to ptr. The first time asWeakPtr is called
* // a reference counted WeakReference object is created that
* // can live beyond the lifetime of 'ptr'. The WeakReference
* // object will be notified of 'ptr's destruction.
* WeakPtr<C> weak = ptr->asWeakPtr();
* WeakPtr<C> other = ptr->asWeakPtr();
*
* // Test a weak pointer for validity before using it.
* if (weak) {
* weak->num = 17;
* weak->act();
* }
*
* // Destroying the underlying object clears weak pointers to it.
* delete ptr;
*
* MOZ_ASSERT(!weak, "Deleting |ptr| clears weak pointers to it.");
* MOZ_ASSERT(!other, "Deleting |ptr| clears all weak pointers to it.");
*
* WeakPtr is typesafe and may be used with any class. It is not required that
* the class be reference-counted or allocated in any particular way.
*
* The API was loosely inspired by Chromium's weak_ptr.h:
* http://src.chromium.org/svn/trunk/src/base/memory/weak_ptr.h
*/
#ifndef mozilla_WeakPtr_h_
#define mozilla_WeakPtr_h_
#include "mozilla/Assertions.h"
#include "mozilla/NullPtr.h"
#include "mozilla/RefPtr.h"
#include "mozilla/TypeTraits.h"
namespace mozilla {
template <typename T> class WeakPtr;
template <typename T>
class SupportsWeakPtr
{
public:
WeakPtr<T> asWeakPtr() {
if (!weakRef)
weakRef = new WeakReference(static_cast<T*>(this));
return WeakPtr<T>(weakRef);
}
protected:
~SupportsWeakPtr() {
MOZ_STATIC_ASSERT((IsBaseOf<SupportsWeakPtr<T>, T>::value), "T must derive from SupportsWeakPtr<T>");
if (weakRef)
weakRef->detach();
}
private:
friend class WeakPtr<T>;
// This can live beyond the lifetime of the class derived from SupportsWeakPtr.
class WeakReference : public RefCounted<WeakReference>
{
public:
explicit WeakReference(T* ptr) : ptr(ptr) {}
T* get() const {
return ptr;
}
private:
friend class WeakPtr<T>;
friend class SupportsWeakPtr<T>;
void detach() {
ptr = nullptr;
}
T* ptr;
};
RefPtr<WeakReference> weakRef;
};
template <typename T>
class WeakPtr
{
public:
WeakPtr(const WeakPtr<T>& o) : ref(o.ref) {}
WeakPtr() : ref(nullptr) {}
operator T*() const {
return ref->get();
}
T& operator*() const {
return *ref->get();
}
T* operator->() const {
return ref->get();
}
private:
friend class SupportsWeakPtr<T>;
explicit WeakPtr(const RefPtr<typename SupportsWeakPtr<T>::WeakReference> &o) : ref(o) {}
RefPtr<typename SupportsWeakPtr<T>::WeakReference> ref;
};
} // namespace mozilla
#endif /* ifdef mozilla_WeakPtr_h_ */

View File

@ -1 +1 @@
520bea66db801ee6ba7a86ca8974c3999fdc0a30
da504065669dce359843f03a46e60f49be6d156b

Some files were not shown because too many files have changed in this diff Show More