diff --git a/js/src/ctypes/CTypes.cpp b/js/src/ctypes/CTypes.cpp index 157090979e0..8c07d0885fb 100644 --- a/js/src/ctypes/CTypes.cpp +++ b/js/src/ctypes/CTypes.cpp @@ -4289,6 +4289,7 @@ StructType::ConstructData(JSContext* cx, if (argc == fields->count()) { for (FieldInfoHash::Range r = fields->all(); !r.empty(); r.popFront()) { const FieldInfo& field = r.front().value; + STATIC_ASSUME(field.mIndex < fields->count()); /* Quantified invariant */ if (!ImplicitConvert(cx, argv[field.mIndex], field.mType, buffer + field.mOffset, false, NULL)) diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp index 1047748c9c7..b31f104d323 100644 --- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -3955,6 +3955,7 @@ JS_NextProperty(JSContext *cx, JSObject *iterobj, jsid *idp) /* Non-native case: use the ida enumerated when iterobj was created. */ ida = (JSIdArray *) iterobj->getPrivate(); JS_ASSERT(i <= ida->length); + STATIC_ASSUME(i <= ida->length); if (i == 0) { *idp = JSID_VOID; } else { diff --git a/js/src/jsarena.cpp b/js/src/jsarena.cpp index 80440bcda1b..dd1b8a650d1 100644 --- a/js/src/jsarena.cpp +++ b/js/src/jsarena.cpp @@ -48,7 +48,7 @@ #include "jsstdint.h" #include "jsbit.h" #include "jsarena.h" -#include "jsutil.h" +#include "jsprvtd.h" #ifdef JS_ARENAMETER static JSArenaStats *arena_stats_list; diff --git a/js/src/jsarena.h b/js/src/jsarena.h index ad2abd19ac6..6eeb04a8cfa 100644 --- a/js/src/jsarena.h +++ b/js/src/jsarena.h @@ -126,6 +126,7 @@ struct JSArenaPool { else \ _a->avail = _p + _nb; \ p = (type) _p; \ + STATIC_ASSUME(!p || ubound((char *)p) >= nb) \ JS_ArenaCountAllocation(pool, nb); \ JS_END_MACRO @@ -149,6 +150,7 @@ struct JSArenaPool { } else { \ p = (type) JS_ArenaGrow(pool, p, size, incr); \ } \ + STATIC_ASSUME(!p || ubound((char *)p) >= size + incr); \ JS_ArenaCountGrowth(pool, size, incr); \ JS_END_MACRO diff --git a/js/src/jscntxt.cpp b/js/src/jscntxt.cpp index c9a896b8ce7..c9626bbb27f 100644 --- a/js/src/jscntxt.cpp +++ b/js/src/jscntxt.cpp @@ -221,6 +221,7 @@ StackSpace::mark(JSTracer *trc) */ Value *end = firstUnused(); for (StackSegment *seg = currentSegment; seg; seg = seg->getPreviousInMemory()) { + STATIC_ASSERT(ubound(end) >= 0); if (seg->inContext()) { /* This may be the only pointer to the initialVarObj. */ if (seg->hasInitialVarObj()) diff --git a/js/src/jscntxtinlines.h b/js/src/jscntxtinlines.h index c5134c05387..a06cbf0253b 100644 --- a/js/src/jscntxtinlines.h +++ b/js/src/jscntxtinlines.h @@ -139,6 +139,7 @@ StackSpace::isCurrentAndActive(JSContext *cx) const currentSegment == cx->getCurrentSegment(); } +STATIC_POSTCONDITION(!return || ubound(from) >= nvals) JS_ALWAYS_INLINE bool StackSpace::ensureSpace(JSContext *maybecx, Value *from, ptrdiff_t nvals) const { @@ -642,6 +643,7 @@ assertSameCompartment(JSContext *cx, T1 t1, T2 t2, T3 t3, T4 t4, T5 t5) #undef START_ASSERT_SAME_COMPARTMENT +STATIC_PRECONDITION_ASSUME(ubound(vp) >= argc + 2) JS_ALWAYS_INLINE bool CallJSNative(JSContext *cx, js::Native native, uintN argc, js::Value *vp) { @@ -657,6 +659,7 @@ CallJSNative(JSContext *cx, js::Native native, uintN argc, js::Value *vp) return ok; } +STATIC_PRECONDITION(ubound(vp) >= argc + 2) JS_ALWAYS_INLINE bool CallJSNativeConstructor(JSContext *cx, js::Native native, uintN argc, js::Value *vp) { diff --git a/js/src/jsfun.cpp b/js/src/jsfun.cpp index 2eec2313b2a..4963789c88b 100644 --- a/js/src/jsfun.cpp +++ b/js/src/jsfun.cpp @@ -210,7 +210,7 @@ NewArguments(JSContext *cx, JSObject *parent, uint32 argc, JSObject &callee) namespace { -struct PutArg +struct STATIC_SKIP_INFERENCE PutArg { PutArg(Value *dst) : dst(dst) {} Value *dst; @@ -2288,7 +2288,7 @@ js_fun_call(JSContext *cx, uintN argc, Value *vp) namespace { -struct CopyNonHoleArgs +struct STATIC_SKIP_INFERENCE CopyNonHoleArgs { CopyNonHoleArgs(JSObject *aobj, Value *dst) : aobj(aobj), dst(dst) {} JSObject *aobj; diff --git a/js/src/jsgc.h b/js/src/jsgc.h index 20679c45411..dc01d92aed1 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -328,6 +328,7 @@ Cell::bitmap() const return &chunk()->bitmaps[arena()->arenaIndex()]; } +STATIC_POSTCONDITION_ASSUME(return < ArenaBitmap::BitCount) size_t Cell::cellIndex() const { diff --git a/js/src/jsinterp.h b/js/src/jsinterp.h index 95799c5cae8..690cf876b0e 100644 --- a/js/src/jsinterp.h +++ b/js/src/jsinterp.h @@ -52,6 +52,7 @@ struct JSFrameRegs { + STATIC_SKIP_INFERENCE js::Value *sp; /* stack pointer */ jsbytecode *pc; /* program counter */ JSStackFrame *fp; /* active frame */ diff --git a/js/src/jsprvtd.h b/js/src/jsprvtd.h index b20c555e931..a79cbbeaa95 100644 --- a/js/src/jsprvtd.h +++ b/js/src/jsprvtd.h @@ -55,6 +55,7 @@ */ #include "jspubtd.h" +#include "jsstaticcheck.h" #include "jsutil.h" JS_BEGIN_EXTERN_C diff --git a/js/src/jsstaticcheck.h b/js/src/jsstaticcheck.h index db38ebf2f16..c605d2f0848 100644 --- a/js/src/jsstaticcheck.h +++ b/js/src/jsstaticcheck.h @@ -66,4 +66,60 @@ JS_ASSERT_NOT_ON_TRACE(JSContext *cx) #endif #define VOUCH_HAVE_STACK VOUCH_DOES_NOT_REQUIRE_STACK +/* sixgill annotation defines */ + +/* Avoid name collision if included with other headers defining annotations. */ +#ifndef HAVE_STATIC_ANNOTATIONS +#define HAVE_STATIC_ANNOTATIONS + +#ifdef XGILL_PLUGIN + +#define STATIC_PRECONDITION(COND) __attribute__((precondition(#COND))) +#define STATIC_PRECONDITION_ASSUME(COND) __attribute__((precondition_assume(#COND))) +#define STATIC_POSTCONDITION(COND) __attribute__((postcondition(#COND))) +#define STATIC_POSTCONDITION_ASSUME(COND) __attribute__((postcondition_assume(#COND))) +#define STATIC_INVARIANT(COND) __attribute__((invariant(#COND))) +#define STATIC_INVARIANT_ASSUME(COND) __attribute__((invariant_assume(#COND))) + +/* Used to make identifiers for assert/assume annotations in a function. */ +#define STATIC_PASTE2(X,Y) X ## Y +#define STATIC_PASTE1(X,Y) STATIC_PASTE2(X,Y) + +#define STATIC_ASSERT(COND) \ + JS_BEGIN_MACRO \ + __attribute__((assert_static(#COND), unused)) \ + int STATIC_PASTE1(assert_static_, __COUNTER__); \ + JS_END_MACRO + +#define STATIC_ASSUME(COND) \ + JS_BEGIN_MACRO \ + __attribute__((assume_static(#COND), unused)) \ + int STATIC_PASTE1(assume_static_, __COUNTER__); \ + JS_END_MACRO + +#define STATIC_ASSERT_RUNTIME(COND) \ + JS_BEGIN_MACRO \ + __attribute__((assert_static_runtime(#COND), unused)) \ + int STATIC_PASTE1(assert_static_runtime_, __COUNTER__); \ + JS_END_MACRO + +#else /* XGILL_PLUGIN */ + +#define STATIC_PRECONDITION(COND) /* nothing */ +#define STATIC_PRECONDITION_ASSUME(COND) /* nothing */ +#define STATIC_POSTCONDITION(COND) /* nothing */ +#define STATIC_POSTCONDITION_ASSUME(COND) /* nothing */ +#define STATIC_INVARIANT(COND) /* nothing */ +#define STATIC_INVARIANT_ASSUME(COND) /* nothing */ + +#define STATIC_ASSERT(COND) JS_BEGIN_MACRO /* nothing */ JS_END_MACRO +#define STATIC_ASSUME(COND) JS_BEGIN_MACRO /* nothing */ JS_END_MACRO +#define STATIC_ASSERT_RUNTIME(COND) JS_BEGIN_MACRO /* nothing */ JS_END_MACRO + +#endif /* XGILL_PLUGIN */ + +#define STATIC_SKIP_INFERENCE STATIC_INVARIANT(skip_inference()) + +#endif /* HAVE_STATIC_ANNOTATIONS */ + #endif /* jsstaticcheck_h___ */ diff --git a/js/src/jstl.h b/js/src/jstl.h index 967930b69dd..d05914289b1 100644 --- a/js/src/jstl.h +++ b/js/src/jstl.h @@ -212,6 +212,7 @@ class ReentrancyGuard * Round x up to the nearest power of 2. This function assumes that the most * significant bit of x is not set, which would lead to overflow. */ +STATIC_POSTCONDITION_ASSUME(return >= x) JS_ALWAYS_INLINE size_t RoundUpPow2(size_t x) { diff --git a/js/src/jsvector.h b/js/src/jsvector.h index cb80a0f178c..2450e1236c5 100644 --- a/js/src/jsvector.h +++ b/js/src/jsvector.h @@ -467,6 +467,7 @@ Vector::~Vector() * curLength and check for overflow. */ template +STATIC_POSTCONDITION(!return || newCap >= curLength + lengthInc) inline bool Vector::calculateNewCapacity(size_t curLength, size_t lengthInc, size_t &newCap) @@ -622,6 +623,7 @@ Vector::growByUninitialized(size_t incr) } template +STATIC_POSTCONDITION(!return || ubound(this->begin()) >= newLength) inline bool Vector::resize(size_t newLength) { diff --git a/js/src/methodjit/InvokeHelpers.cpp b/js/src/methodjit/InvokeHelpers.cpp index 1f9cd245e9f..b24a9da0ecd 100644 --- a/js/src/methodjit/InvokeHelpers.cpp +++ b/js/src/methodjit/InvokeHelpers.cpp @@ -530,7 +530,7 @@ stubs::UncachedCallHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr) } if (ucr->fun->isNative()) { - if (!ucr->fun->u.n.native(cx, argc, vp)) + if (!CallJSNative(cx, ucr->fun->u.n.native, argc, vp)) THROW(); return; } diff --git a/js/src/methodjit/MonoIC.cpp b/js/src/methodjit/MonoIC.cpp index c6b47b3f7e6..ce12f4a9f3d 100644 --- a/js/src/methodjit/MonoIC.cpp +++ b/js/src/methodjit/MonoIC.cpp @@ -442,8 +442,7 @@ class CallCompiler : public BaseCompiler if (callingNew) vp[1].setMagicWithObjectOrNullPayload(NULL); - Native fn = fun->u.n.native; - if (!fn(cx, ic.argc, vp)) + if (!CallJSNative(cx, fun->u.n.native, ic.argc, vp)) THROWV(true); /* Right now, take slow-path for IC misses or multiple stubs. */ diff --git a/xpcom/glue/nsDebug.h b/xpcom/glue/nsDebug.h index 3eeddb669d9..5d43c3d3b02 100644 --- a/xpcom/glue/nsDebug.h +++ b/xpcom/glue/nsDebug.h @@ -189,6 +189,10 @@ ** When the tool is not running these macros are no-ops. ******************************************************************************/ +/* Avoid name collision if included with other headers defining annotations. */ +#ifndef HAVE_STATIC_ANNOTATIONS +#define HAVE_STATIC_ANNOTATIONS + #ifdef XGILL_PLUGIN #define STATIC_PRECONDITION(COND) __attribute__((precondition(#COND))) @@ -204,34 +208,22 @@ #define STATIC_ASSERT(COND) \ PR_BEGIN_MACRO \ - __attribute__((assert(#COND), unused)) \ - int STATIC_PASTE1(static_assert_, __COUNTER__); \ + __attribute__((assert_static(#COND), unused)) \ + int STATIC_PASTE1(assert_static_, __COUNTER__); \ PR_END_MACRO #define STATIC_ASSUME(COND) \ PR_BEGIN_MACRO \ - __attribute__((assume(#COND), unused)) \ - int STATIC_PASTE1(static_assume_, __COUNTER__); \ + __attribute__((assume_static(#COND), unused)) \ + int STATIC_PASTE1(assume_static_, __COUNTER__); \ PR_END_MACRO #define STATIC_ASSERT_RUNTIME(COND) \ PR_BEGIN_MACRO \ - __attribute__((assert_runtime(#COND), unused)) \ - int STATIC_PASTE1(static_assert_runtime_, __COUNTER__); \ + __attribute__((assert_static_runtime(#COND), unused)) \ + int STATIC_PASTE1(assert_static_runtime_, __COUNTER__); \ PR_END_MACRO -/* Redefine runtime assertion macros to perform static assertions, for both - * debug and release builds. Don't include the original runtime assertions; - * this ensures the tool will consider cases where the assertion fails. */ - -#undef NS_PRECONDITION -#undef NS_ASSERTION -#undef NS_POSTCONDITION - -#define NS_PRECONDITION(expr, str) STATIC_ASSERT_RUNTIME(expr) -#define NS_ASSERTION(expr, str) STATIC_ASSERT_RUNTIME(expr) -#define NS_POSTCONDITION(expr, str) STATIC_ASSERT_RUNTIME(expr) - #else /* XGILL_PLUGIN */ #define STATIC_PRECONDITION(COND) /* nothing */ @@ -247,6 +239,26 @@ #endif /* XGILL_PLUGIN */ +#define STATIC_SKIP_INFERENCE STATIC_INVARIANT(skip_inference()) + +#endif /* HAVE_STATIC_ANNOTATIONS */ + +#ifdef XGILL_PLUGIN + +/* Redefine runtime assertion macros to perform static assertions, for both + * debug and release builds. Don't include the original runtime assertions; + * this ensures the tool will consider cases where the assertion fails. */ + +#undef NS_PRECONDITION +#undef NS_ASSERTION +#undef NS_POSTCONDITION + +#define NS_PRECONDITION(expr, str) STATIC_ASSERT_RUNTIME(expr) +#define NS_ASSERTION(expr, str) STATIC_ASSERT_RUNTIME(expr) +#define NS_POSTCONDITION(expr, str) STATIC_ASSERT_RUNTIME(expr) + +#endif /* XGILL_PLUGIN */ + /****************************************************************************** ** Macros for terminating execution when an unrecoverable condition is ** reached. These need to be compiled regardless of the NS_DEBUG flag.