Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

SERVER-726 optimize exclusive bounds

  • Loading branch information...
commit fe7a8dfaf48e167a5cfeed4aa1b27ae803623688 1 parent fef8cb5
@astaple astaple authored
View
51 db/btree.cpp
@@ -366,15 +366,17 @@ namespace mongo {
}
}
- int BtreeBucket::customBSONCmp( const BSONObj &l, const BSONObj &rBegin, int rBeginLen, const vector< const BSONElement * > &rEnd, const Ordering &o ) {
+ int BtreeBucket::customBSONCmp( const BSONObj &l, const BSONObj &rBegin, int rBeginLen, bool rSup, const vector< const BSONElement * > &rEnd, const vector< bool > &rEndInclusive, const Ordering &o, int direction ) {
BSONObjIterator ll( l );
BSONObjIterator rr( rBegin );
vector< const BSONElement * >::const_iterator rr2 = rEnd.begin();
+ vector< bool >::const_iterator inc = rEndInclusive.begin();
unsigned mask = 1;
for( int i = 0; i < rBeginLen; ++i, mask <<= 1 ) {
BSONElement lll = ll.next();
BSONElement rrr = rr.next();
++rr2;
+ ++inc;
int x = lll.woCompare( rrr, false );
if ( o.descending( mask ) )
@@ -382,6 +384,9 @@ namespace mongo {
if ( x != 0 )
return x;
}
+ if ( rSup ) {
+ return -direction;
+ }
for( ; ll.more(); mask <<= 1 ) {
BSONElement lll = ll.next();
BSONElement rrr = **rr2;
@@ -391,6 +396,10 @@ namespace mongo {
x = -x;
if ( x != 0 )
return x;
+ if ( !*inc ) {
+ return -direction;
+ }
+ ++inc;
}
return 0;
}
@@ -879,8 +888,8 @@ namespace mongo {
else
return pos == n ? DiskLoc() /*theend*/ : thisLoc;
}
-
- bool BtreeBucket::customFind( int l, int h, const BSONObj &keyBegin, int keyBeginLen, const vector< const BSONElement * > &keyEnd, const Ordering &order, int direction, DiskLoc &thisLoc, int &keyOfs, pair< DiskLoc, int > &bestParent ) {
+
+ bool BtreeBucket::customFind( int l, int h, const BSONObj &keyBegin, int keyBeginLen, bool afterKey, const vector< const BSONElement * > &keyEnd, const vector< bool > &keyEndInclusive, const Ordering &order, int direction, DiskLoc &thisLoc, int &keyOfs, pair< DiskLoc, int > &bestParent ) {
while( 1 ) {
if ( l + 1 == h ) {
keyOfs = ( direction > 0 ) ? h : l;
@@ -894,7 +903,7 @@ namespace mongo {
}
}
int m = l + ( h - l ) / 2;
- int cmp = customBSONCmp( thisLoc.btree()->keyNode( m ).key, keyBegin, keyBeginLen, keyEnd, order );
+ int cmp = customBSONCmp( thisLoc.btree()->keyNode( m ).key, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction );
if ( cmp < 0 ) {
l = m;
} else if ( cmp > 0 ) {
@@ -912,22 +921,22 @@ namespace mongo {
// find smallest/biggest value greater-equal/less-equal than specified
// starting thisLoc + keyOfs will be strictly less than/strictly greater than keyBegin/keyBeginLen/keyEnd
// All the direction checks below allowed me to refactor the code, but possibly separate forward and reverse implementations would be more efficient
- void BtreeBucket::advanceTo(const IndexDetails &id, DiskLoc &thisLoc, int &keyOfs, const BSONObj &keyBegin, int keyBeginLen, const vector< const BSONElement * > &keyEnd, const Ordering &order, int direction ) {
+ void BtreeBucket::advanceTo(DiskLoc &thisLoc, int &keyOfs, const BSONObj &keyBegin, int keyBeginLen, bool afterKey, const vector< const BSONElement * > &keyEnd, const vector< bool > &keyEndInclusive, const Ordering &order, int direction ) {
int l,h;
bool dontGoUp;
if ( direction > 0 ) {
l = keyOfs;
h = n - 1;
- dontGoUp = ( customBSONCmp( keyNode( h ).key, keyBegin, keyBeginLen, keyEnd, order ) >= 0 );
+ dontGoUp = ( customBSONCmp( keyNode( h ).key, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction ) >= 0 );
} else {
l = 0;
h = keyOfs;
- dontGoUp = ( customBSONCmp( keyNode( l ).key, keyBegin, keyBeginLen, keyEnd, order ) <= 0 );
+ dontGoUp = ( customBSONCmp( keyNode( l ).key, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction ) <= 0 );
}
pair< DiskLoc, int > bestParent;
if ( dontGoUp ) {
// this comparison result assures h > l
- if ( !customFind( l, h, keyBegin, keyBeginLen, keyEnd, order, direction, thisLoc, keyOfs, bestParent ) ) {
+ if ( !customFind( l, h, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction, thisLoc, keyOfs, bestParent ) ) {
return;
}
} else {
@@ -935,26 +944,34 @@ namespace mongo {
while( !thisLoc.btree()->parent.isNull() ) {
thisLoc = thisLoc.btree()->parent;
if ( direction > 0 ) {
- if ( customBSONCmp( thisLoc.btree()->keyNode( thisLoc.btree()->n - 1 ).key, keyBegin, keyBeginLen, keyEnd, order ) >= 0 ) {
+ if ( customBSONCmp( thisLoc.btree()->keyNode( thisLoc.btree()->n - 1 ).key, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction ) >= 0 ) {
break;
}
} else {
- if ( customBSONCmp( thisLoc.btree()->keyNode( 0 ).key, keyBegin, keyBeginLen, keyEnd, order ) <= 0 ) {
+ if ( customBSONCmp( thisLoc.btree()->keyNode( 0 ).key, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction ) <= 0 ) {
break;
}
}
}
}
+ customLocate( thisLoc, keyOfs, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction, bestParent );
+ }
+
+ void BtreeBucket::customLocate(DiskLoc &thisLoc, int &keyOfs, const BSONObj &keyBegin, int keyBeginLen, bool afterKey, const vector< const BSONElement * > &keyEnd, const vector< bool > &keyEndInclusive, const Ordering &order, int direction, pair< DiskLoc, int > &bestParent ) {
+ if ( thisLoc.btree()->n == 0 ) {
+ thisLoc = DiskLoc();
+ return;
+ }
// go down until find smallest/biggest >=/<= target
while( 1 ) {
- l = 0;
- h = thisLoc.btree()->n - 1;
+ int l = 0;
+ int h = thisLoc.btree()->n - 1;
// leftmost/rightmost key may possibly be >=/<= search key
bool firstCheck;
if ( direction > 0 ) {
- firstCheck = ( customBSONCmp( thisLoc.btree()->keyNode( 0 ).key, keyBegin, keyBeginLen, keyEnd, order ) >= 0 );
+ firstCheck = ( customBSONCmp( thisLoc.btree()->keyNode( 0 ).key, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction ) >= 0 );
} else {
- firstCheck = ( customBSONCmp( thisLoc.btree()->keyNode( h ).key, keyBegin, keyBeginLen, keyEnd, order ) <= 0 );
+ firstCheck = ( customBSONCmp( thisLoc.btree()->keyNode( h ).key, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction ) <= 0 );
}
if ( firstCheck ) {
DiskLoc next;
@@ -975,9 +992,9 @@ namespace mongo {
}
bool secondCheck;
if ( direction > 0 ) {
- secondCheck = ( customBSONCmp( thisLoc.btree()->keyNode( h ).key, keyBegin, keyBeginLen, keyEnd, order ) < 0 );
+ secondCheck = ( customBSONCmp( thisLoc.btree()->keyNode( h ).key, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction ) < 0 );
} else {
- secondCheck = ( customBSONCmp( thisLoc.btree()->keyNode( 0 ).key, keyBegin, keyBeginLen, keyEnd, order ) > 0 );
+ secondCheck = ( customBSONCmp( thisLoc.btree()->keyNode( 0 ).key, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction ) > 0 );
}
if ( secondCheck ) {
DiskLoc next;
@@ -996,7 +1013,7 @@ namespace mongo {
continue;
}
}
- if ( !customFind( l, h, keyBegin, keyBeginLen, keyEnd, order, direction, thisLoc, keyOfs, bestParent ) ) {
+ if ( !customFind( l, h, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, order, direction, thisLoc, keyOfs, bestParent ) ) {
return;
}
}
View
10 db/btree.h
@@ -235,7 +235,8 @@ namespace mongo {
/* advance one key position in the index: */
DiskLoc advance(const DiskLoc& thisLoc, int& keyOfs, int direction, const char *caller);
- void advanceTo(const IndexDetails &id, DiskLoc &thisLoc, int &keyOfs, const BSONObj &keyBegin, int keyBeginLen, const vector< const BSONElement * > &keyEnd, const Ordering &order, int direction );
+ void advanceTo(DiskLoc &thisLoc, int &keyOfs, const BSONObj &keyBegin, int keyBeginLen, bool afterKey, const vector< const BSONElement * > &keyEnd, const vector< bool > &keyEndInclusive, const Ordering &order, int direction );
+ void customLocate(DiskLoc &thisLoc, int &keyOfs, const BSONObj &keyBegin, int keyBeginLen, bool afterKey, const vector< const BSONElement * > &keyEnd, const vector< bool > &keyEndInclusive, const Ordering &order, int direction, pair< DiskLoc, int > &bestParent );
DiskLoc getHead(const DiskLoc& thisLoc);
@@ -259,9 +260,9 @@ namespace mongo {
const BSONObj& key, const Ordering &order, bool dupsAllowed,
DiskLoc lChild, DiskLoc rChild, IndexDetails&);
bool find(const IndexDetails& idx, const BSONObj& key, DiskLoc recordLoc, const Ordering &order, int& pos, bool assertIfDup);
- bool customFind( int l, int h, const BSONObj &keyBegin, int keyBeginLen, const vector< const BSONElement * > &keyEnd, const Ordering &order, int direction, DiskLoc &thisLoc, int &keyOfs, pair< DiskLoc, int > &bestParent );
+ bool customFind( int l, int h, const BSONObj &keyBegin, int keyBeginLen, bool afterKey, const vector< const BSONElement * > &keyEnd, const vector< bool > &keyEndInclusive, const Ordering &order, int direction, DiskLoc &thisLoc, int &keyOfs, pair< DiskLoc, int > &bestParent );
static void findLargestKey(const DiskLoc& thisLoc, DiskLoc& largestLoc, int& largestKey);
- static int customBSONCmp( const BSONObj &l, const BSONObj &rBegin, int rBeginLen, const vector< const BSONElement * > &rEnd, const Ordering &o );
+ static int customBSONCmp( const BSONObj &l, const BSONObj &rBegin, int rBeginLen, bool rSup, const vector< const BSONElement * > &rEnd, const vector< bool > &rEndInclusive, const Ordering &o, int direction );
public:
// simply builds and returns a dup key error message string
static string dupKeyError( const IndexDetails& idx , const BSONObj& key );
@@ -384,7 +385,8 @@ namespace mongo {
// set initial bucket
void init();
- void advanceTo( const BSONObj &keyBegin, int keyBeginLen, const vector< const BSONElement * > &keyEnd);
+ // if afterKey is true, we want the first key with values of the keyBegin fields greater than keyBegin
+ void advanceTo( const BSONObj &keyBegin, int keyBeginLen, bool afterKey, const vector< const BSONElement * > &keyEnd, const vector< bool > &keyEndInclusive );
friend class BtreeBucket;
set<DiskLoc> dups;
View
14 db/btreecursor.cpp
@@ -64,10 +64,12 @@ namespace mongo {
massert( 13384, "BtreeCursor FieldRangeVector constructor doesn't accept special indexes", !_spec.getType() );
audit();
startKey = bounds_->startKey();
- bool found;
_boundsIterator->advance( startKey ); // handles initialization
- bucket = indexDetails.head.btree()->
- locate(indexDetails, indexDetails.head, startKey, _ordering, keyOfs, found, direction > 0 ? minDiskLoc : maxDiskLoc, direction);
+ _boundsIterator->prepDive();
+ pair< DiskLoc, int > noBestParent;
+ bucket = indexDetails.head;
+ keyOfs = 0;
+ indexDetails.head.btree()->customLocate( bucket, keyOfs, startKey, 0, false, _boundsIterator->cmp(), _boundsIterator->inc(), _ordering, direction, noBestParent );
skipAndCheck();
DEV assert( dups.size() == 0 );
}
@@ -128,7 +130,7 @@ namespace mongo {
return false;
}
++_nscanned;
- advanceTo( currKeyNode().key, ret, _boundsIterator->cmp() );
+ advanceTo( currKeyNode().key, ret, _boundsIterator->after(), _boundsIterator->cmp(), _boundsIterator->inc() );
return true;
}
@@ -174,8 +176,8 @@ namespace mongo {
}
}
- void BtreeCursor::advanceTo( const BSONObj &keyBegin, int keyBeginLen, const vector< const BSONElement * > &keyEnd) {
- bucket.btree()->advanceTo( indexDetails, bucket, keyOfs, keyBegin, keyBeginLen, keyEnd, _ordering, direction );
+ void BtreeCursor::advanceTo( const BSONObj &keyBegin, int keyBeginLen, bool afterKey, const vector< const BSONElement * > &keyEnd, const vector< bool > &keyEndInclusive) {
+ bucket.btree()->advanceTo( bucket, keyOfs, keyBegin, keyBeginLen, afterKey, keyEnd, keyEndInclusive, _ordering, direction );
}
bool BtreeCursor::advance() {
View
3  db/query.cpp
@@ -785,6 +785,9 @@ namespace mongo {
// this plan won, so set data for response broadly
void finish( bool stop ) {
+ if ( _c.get() ) {
+ _nscanned = _c->nscanned();
+ }
if ( _pq.isExplain() ) {
_n = _inMemSort ? _so->size() : _n;
}
View
70 db/queryutil.cpp
@@ -940,22 +940,28 @@ namespace mongo {
}
bool FieldRangeVector::matchesElement( const BSONElement &e, int i, bool forward ) const {
- int l = matchingLowElement( e, i, forward );
+ bool eq;
+ int l = matchingLowElement( e, i, forward, eq );
return ( l % 2 == 0 ); // if we're inside an interval
}
// binary search for interval containing the specified element
// an even return value indicates that the element is contained within a valid interval
- int FieldRangeVector::matchingLowElement( const BSONElement &e, int i, bool forward ) const {
+ int FieldRangeVector::matchingLowElement( const BSONElement &e, int i, bool forward, bool &lowEquality ) const {
+ lowEquality = false;
int l = -1;
int h = _ranges[ i ].intervals().size() * 2;
while( l + 1 < h ) {
int m = ( l + h ) / 2;
BSONElement toCmp;
+ bool toCmpInclusive;
+ const FieldInterval &interval = _ranges[ i ].intervals()[ m / 2 ];
if ( m % 2 == 0 ) {
- toCmp = _ranges[ i ].intervals()[ m / 2 ]._lower._bound;
+ toCmp = interval._lower._bound;
+ toCmpInclusive = interval._lower._inclusive;
} else {
- toCmp = _ranges[ i ].intervals()[ m / 2 ]._upper._bound;
+ toCmp = interval._upper._bound;
+ toCmpInclusive = interval._upper._inclusive;
}
int cmp = toCmp.woCompare( e, false );
if ( !forward ) {
@@ -966,7 +972,18 @@ namespace mongo {
} else if ( cmp > 0 ) {
h = m;
} else {
- return ( m % 2 == 0 ) ? m : m - 1;
+ if ( m % 2 == 0 ) {
+ lowEquality = true;
+ }
+ int ret = m;
+ // if left match and inclusive, all good
+ // if left match and not inclusive, return right before left bound
+ // if right match and inclusive, return left bound
+ // if right match and not inclusive, return right bound
+ if ( ( m % 2 == 0 && !toCmpInclusive ) || ( m % 2 == 1 && toCmpInclusive ) ) {
+ --ret;
+ }
+ return ret;
}
}
assert( l + 1 == h );
@@ -1016,7 +1033,8 @@ namespace mongo {
for( int i = 0; i < (int)_i.size(); ++i ) {
if ( i > 0 && !_v._ranges[ i - 1 ].intervals()[ _i[ i - 1 ] ].equality() ) {
// if last bound was inequality, we don't know anything about where we are for this field
- // TODO if possible avoid this certain cases when field in prev key is the same
+ // TODO if possible avoid this certain cases when value in previous field of the previous
+ // key is the same as value of previous field in current key
setMinus( i );
}
bool eq = false;
@@ -1024,7 +1042,8 @@ namespace mongo {
bool reverse = ( ( oo.number() < 0 ) ^ ( _v._direction < 0 ) );
BSONElement jj = j.next();
if ( _i[ i ] == -1 ) { // unknown position for this field, do binary search
- int l = _v.matchingLowElement( jj, i, !reverse );
+ bool lowEquality;
+ int l = _v.matchingLowElement( jj, i, !reverse, lowEquality );
if ( l % 2 == 0 ) { // we are in a valid range for this field
_i[ i ] = l / 2;
int diff = (int)_v._ranges[ i ].intervals().size() - _i[ i ];
@@ -1045,17 +1064,23 @@ namespace mongo {
}
setZero( latestNonEndpoint + 1 );
// skip to curr / latestNonEndpoint + 1 / superlative
- for( int j = latestNonEndpoint + 1; j < (int)_i.size(); ++j ) {
- _cmp[ j ] = _superlative[ j ];
- }
+ _after = true;
return latestNonEndpoint + 1;
}
_i[ i ] = ( l + 1 ) / 2;
+ if ( lowEquality ) {
+ // skip to curr / i + 1 / superlative
+ _after = true;
+ return i + 1;
+ }
// skip to curr / i / nextbounds
_cmp[ i ] = &_v._ranges[ i ].intervals()[ _i[ i ] ]._lower._bound;
+ _inc[ i ] = _v._ranges[ i ].intervals()[ _i[ i ] ]._lower._inclusive;
for( int j = i + 1; j < (int)_i.size(); ++j ) {
_cmp[ j ] = &_v._ranges[ j ].intervals().front()._lower._bound;
+ _inc[ j ] = _v._ranges[ j ].intervals().front()._lower._inclusive;
}
+ _after = false;
return i;
}
}
@@ -1069,7 +1094,7 @@ namespace mongo {
if ( reverse ) {
x = -x;
}
- if ( x == 0 ) {
+ if ( x == 0 && _v._ranges[ i ].intervals()[ _i[ i ] ]._upper._inclusive ) {
eq = true;
break;
}
@@ -1088,14 +1113,24 @@ namespace mongo {
x = -x;
}
}
+ // if we're equal to and not inclusive the lower bound, advance
+ if ( ( x == 0 && !_v._ranges[ i ].intervals()[ _i[ i ] ]._lower._inclusive ) ) {
+ setZero( i + 1 );
+ // skip to curr / i + 1 / superlative
+ _after = true;
+ return i + 1;
+ }
// if we're less than the lower bound, advance
if ( x > 0 ) {
setZero( i + 1 );
// skip to curr / i / nextbounds
_cmp[ i ] = &_v._ranges[ i ].intervals()[ _i[ i ] ]._lower._bound;
+ _inc[ i ] = _v._ranges[ i ].intervals()[ _i[ i ] ]._lower._inclusive;
for( int j = i + 1; j < (int)_i.size(); ++j ) {
_cmp[ j ] = &_v._ranges[ j ].intervals().front()._lower._bound;
+ _inc[ j ] = _v._ranges[ j ].intervals().front()._lower._inclusive;
}
+ _after = false;
return i;
} else {
break;
@@ -1108,7 +1143,7 @@ namespace mongo {
}
int diff = (int)_v._ranges[ i ].intervals().size() - _i[ i ];
if ( diff > 1 || ( !eq && diff == 1 ) ) {
- // check if we're not at the end of valid values for this field
+ // check if we're not at the end of valid values for this field
latestNonEndpoint = i;
} else if ( diff == 0 ) { // check if we're past the last interval for this field
if ( latestNonEndpoint == -1 ) {
@@ -1117,15 +1152,20 @@ namespace mongo {
// more values possible, skip...
setZero( latestNonEndpoint + 1 );
// skip to curr / latestNonEndpoint + 1 / superlative
- for( int j = latestNonEndpoint + 1; j < (int)_i.size(); ++j ) {
- _cmp[ j ] = _superlative[ j ];
- }
+ _after = true;
return latestNonEndpoint + 1;
}
}
return -1;
}
+ void FieldRangeVector::Iterator::prepDive() {
+ for( int j = 0; j < (int)_i.size(); ++j ) {
+ _cmp[ j ] = &_v._ranges[ j ].intervals().front()._lower._bound;
+ _inc[ j ] = _v._ranges[ j ].intervals().front()._lower._inclusive;
+ }
+ }
+
struct SimpleRegexUnitTest : UnitTest {
void run(){
{
View
22 db/queryutil.h
@@ -60,7 +60,7 @@ namespace mongo {
const FieldRange &operator&=( const FieldRange &other );
const FieldRange &operator|=( const FieldRange &other );
// does not remove fully contained ranges (eg [1,3] - [2,2] doesn't remove anything)
- // in future we can change so that an or on $in:[3] combined with $in:{$gt:2} doesn't scan 3 a second time
+ // in future we can change so that an or on $in:[3] combined with $gt:2 doesn't scan 3 a second time
const FieldRange &operator-=( const FieldRange &other );
// true iff other includes this
bool operator<=( const FieldRange &other );
@@ -381,17 +381,7 @@ namespace mongo {
bool matches( const BSONObj &obj ) const;
class Iterator {
public:
- Iterator( const FieldRangeVector &v ) : _v( v ), _i( _v._ranges.size(), -1 ), _cmp( _v._ranges.size(), 0 ), _superlative( _v._ranges.size(), 0 ) {
- static BSONObj minObj = minObject();
- static BSONElement minElt = minObj.firstElement();
- static BSONObj maxObj = maxObject();
- static BSONElement maxElt = maxObj.firstElement();
- BSONObjIterator i( _v._keyPattern );
- for( int j = 0; j < (int)_superlative.size(); ++j ) {
- int number = (int) i.next().number();
- bool forward = ( ( number >= 0 ? 1 : -1 ) * ( _v._direction >= 0 ? 1 : -1 ) > 0 );
- _superlative[ j ] = forward ? &maxElt : &minElt;
- }
+ Iterator( const FieldRangeVector &v ) : _v( v ), _i( _v._ranges.size(), -1 ), _cmp( _v._ranges.size(), 0 ), _inc( _v._ranges.size(), false ), _after() {
}
static BSONObj minObject() {
BSONObjBuilder b;
@@ -424,6 +414,9 @@ namespace mongo {
// >= 0 skip parameter
int advance( const BSONObj &curr );
const vector< const BSONElement * > &cmp() const { return _cmp; }
+ const vector< bool > &inc() const { return _inc; }
+ bool after() const { return _after; }
+ void prepDive();
void setZero( int i ) {
for( int j = i; j < (int)_i.size(); ++j ) {
_i[ j ] = 0;
@@ -459,10 +452,11 @@ namespace mongo {
const FieldRangeVector &_v;
vector< int > _i;
vector< const BSONElement* > _cmp;
- vector< const BSONElement* > _superlative;
+ vector< bool > _inc;
+ bool _after;
};
private:
- int matchingLowElement( const BSONElement &e, int i, bool direction ) const;
+ int matchingLowElement( const BSONElement &e, int i, bool direction, bool &lowEquality ) const;
bool matchesElement( const BSONElement &e, int i, bool direction ) const;
vector< FieldRange > _ranges;
BSONObj _keyPattern;
View
2  dbtests/queryoptimizertests.cpp
@@ -1415,7 +1415,7 @@ namespace QueryOptimizerTests {
theDataFileMgr.insertWithObjMod( ns(), one );
theDataFileMgr.insertWithObjMod( ns(), two );
theDataFileMgr.insertWithObjMod( ns(), three );
- deleteObjects( ns(), BSON( "a" << GTE << 0 << "_id" << GT << 0 ), true );
+ deleteObjects( ns(), BSON( "a" << GTE << 0 ), true );
for( boost::shared_ptr<Cursor> c = theDataFileMgr.findAll( ns() ); c->ok(); c->advance() )
ASSERT( 2 != c->current().getIntField( "_id" ) );
}
View
6 jstests/explain2.js
@@ -16,12 +16,12 @@ function go( q , c , b , o ){
}
q = { a : { $gt : 3 } }
-go( q , 6 , 7 , 6 );
+go( q , 6 , 6 , 6 );
q.b = 5
-go( q , 1 , 2 , 1 );
+go( q , 1 , 6 , 1 );
delete q.b
q.c = 5
-go( q , 1 , 7 , 6 );
+go( q , 1 , 6 , 6 );
View
2  jstests/in4.js
@@ -27,7 +27,7 @@ checkRanges( {a:[[2,2],[3,3]],b:[[4,10]]}, t.find( {a:{$in:[2,3]},b:{$gt:4,$lt:1
t.save( {a:1,b:1} );
t.save( {a:2,b:4.5} );
t.save( {a:2,b:4} );
-assert.eq.automsg( "1", "t.find( {a:{$in:[2,3]},b:{$in:[4,5]}} ).explain().nscanned" );
+assert.eq.automsg( "2", "t.find( {a:{$in:[2,3]},b:{$in:[4,5]}} ).explain().nscanned" );
assert.eq.automsg( "2", "t.findOne( {a:{$in:[2,3]},b:{$in:[4,5]}} ).a" );
assert.eq.automsg( "4", "t.findOne( {a:{$in:[2,3]},b:{$in:[4,5]}} ).b" );
View
10 jstests/index_check6.js
@@ -41,21 +41,21 @@ assert.eq.automsg( "3", "t.find( { a:5, b:5, c:{$gte:5,$lte:7} } ).sort( sort ).
assert.eq.automsg( "4", "t.find( { a:5, b:{$gte:5,$lte:6}, c:5 } ).sort( sort ).explain().nscanned" );
if ( s.b > 0 ) {
assert.eq.automsg( "2", "t.find( { a:5, b:{$gte:5.5,$lte:6}, c:5 } ).sort( sort ).explain().nscanned" );
- assert.eq.automsg( "1", "t.find( { a:5, b:{$gte:5,$lte:5.5}, c:5 } ).sort( sort ).explain().nscanned" );
+ assert.eq.automsg( "2", "t.find( { a:5, b:{$gte:5,$lte:5.5}, c:5 } ).sort( sort ).explain().nscanned" );
} else {
- assert.eq.automsg( "1", "t.find( { a:5, b:{$gte:5.5,$lte:6}, c:5 } ).sort( sort ).explain().nscanned" );
+ assert.eq.automsg( "2", "t.find( { a:5, b:{$gte:5.5,$lte:6}, c:5 } ).sort( sort ).explain().nscanned" );
assert.eq.automsg( "2", "t.find( { a:5, b:{$gte:5,$lte:5.5}, c:5 } ).sort( sort ).explain().nscanned" );
}
assert.eq.automsg( "7", "t.find( { a:5, b:{$gte:5,$lte:7}, c:5 } ).sort( sort ).explain().nscanned" );
assert.eq.automsg( "4", "t.find( { a:{$gte:5,$lte:6}, b:5, c:5 } ).sort( sort ).explain().nscanned" );
if ( s.a > 0 ) {
assert.eq.automsg( "2", "t.find( { a:{$gte:5.5,$lte:6}, b:5, c:5 } ).sort( sort ).explain().nscanned" );
- assert.eq.automsg( "1", "t.find( { a:{$gte:5,$lte:5.5}, b:5, c:5 } ).sort( sort ).explain().nscanned" );
+ assert.eq.automsg( "2", "t.find( { a:{$gte:5,$lte:5.5}, b:5, c:5 } ).sort( sort ).explain().nscanned" );
assert.eq.automsg( "3", "t.find( { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} } ).sort( sort ).explain().nscanned" );
} else {
- assert.eq.automsg( "1", "t.find( { a:{$gte:5.5,$lte:6}, b:5, c:5 } ).sort( sort ).explain().nscanned" );
+ assert.eq.automsg( "2", "t.find( { a:{$gte:5.5,$lte:6}, b:5, c:5 } ).sort( sort ).explain().nscanned" );
assert.eq.automsg( "2", "t.find( { a:{$gte:5,$lte:5.5}, b:5, c:5 } ).sort( sort ).explain().nscanned" );
- assert.eq.automsg( "2", "t.find( { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} } ).sort( sort ).explain().nscanned" );
+ assert.eq.automsg( "3", "t.find( { a:{$gte:5.5,$lte:6}, b:5, c:{$gte:5,$lte:6} } ).sort( sort ).explain().nscanned" );
}
assert.eq.automsg( "7", "t.find( { a:{$gte:5,$lte:7}, b:5, c:5 } ).sort( sort ).explain().nscanned" );
assert.eq.automsg( "6", "t.find( { a:{$gte:5,$lte:6}, b:5, c:{$gte:5,$lte:6} } ).sort( sort ).explain().nscanned" );
View
2  jstests/index_check7.js
@@ -11,5 +11,5 @@ assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "A" )
t.ensureIndex( { x : -1 } )
assert.eq( 1 , t.find( { x : 27 } ).explain().nscanned , "B" )
-assert.eq( 41 , t.find( { x : { $gt : 59 } } ).explain().nscanned , "C" );
+assert.eq( 40 , t.find( { x : { $gt : 59 } } ).explain().nscanned , "C" );
View
44 jstests/indexj.js
@@ -0,0 +1,44 @@
+// SERVER-726
+
+t = db.jstests_indexj;
+t.drop();
+
+t.ensureIndex( {a:1} );
+t.save( {a:5} );
+assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "A" );
+
+t.drop();
+t.ensureIndex( {a:1} );
+t.save( {a:4} );
+assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "B" );
+
+t.save( {a:5} );
+assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "D" );
+
+t.save( {a:4} );
+assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "C" );
+
+t.save( {a:5} );
+assert.eq( 0, t.find( { a: { $gt:4, $lt:5 } } ).explain().nscanned, "D" );
+
+t.drop();
+t.ensureIndex( {a:1,b:1} );
+t.save( { a:1,b:1 } );
+t.save( { a:1,b:2 } );
+t.save( { a:2,b:1 } );
+t.save( { a:2,b:2 } );
+
+assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).explain().nscanned );
+assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).sort( {a:-1,b:-1} ).explain().nscanned );
+
+t.save( {a:1,b:1} );
+t.save( {a:1,b:1} );
+assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).explain().nscanned );
+assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).explain().nscanned );
+assert.eq( 2, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).sort( {a:-1,b:-1} ).explain().nscanned );
+
+assert.eq( 1, t.find( { a:{$in:[1,1.9]}, b:{$gt:1,$lt:2} } ).explain().nscanned );
+assert.eq( 1, t.find( { a:{$in:[1.1,2]}, b:{$gt:1,$lt:2} } ).sort( {a:-1,b:-1} ).explain().nscanned );
+
+t.save( { a:1,b:1.5} );
+assert.eq( 3, t.find( { a:{$in:[1,2]}, b:{$gt:1,$lt:2} } ).explain().nscanned, "F" );
View
2  jstests/regex3.js
@@ -23,7 +23,7 @@ t.save( { name : "c" } );
assert.eq( 3 , t.find( { name : /^aa*/ } ).count() , "B ni" );
t.ensureIndex( { name : 1 } );
assert.eq( 3 , t.find( { name : /^aa*/ } ).count() , "B i 1" );
-assert.eq( 3 , t.find( { name : /^aa*/ } ).explain().nscanned , "B i 1 e" );
+assert.eq( 4 , t.find( { name : /^aa*/ } ).explain().nscanned , "B i 1 e" );
assert.eq( 2 , t.find( { name : /^a[ab]/ } ).count() , "B i 2" );
assert.eq( 2 , t.find( { name : /^a[bc]/ } ).count() , "B i 3" );
View
4 jstests/regex6.js
@@ -10,10 +10,10 @@ t.save( { name : "aaron" } );
t.ensureIndex( { name : 1 } );
assert.eq( 0 , t.find( { name : /^\// } ).count() , "index count" );
-assert.eq( 0 , t.find( { name : /^\// } ).explain().nscanned , "index explain 1" );
+assert.eq( 1 , t.find( { name : /^\// } ).explain().nscanned , "index explain 1" );
assert.eq( 0 , t.find( { name : /^é/ } ).explain().nscanned , "index explain 2" );
assert.eq( 0 , t.find( { name : /^/ } ).explain().nscanned , "index explain 3" );
-assert.eq( 0 , t.find( { name : /^\./ } ).explain().nscanned , "index explain 4" );
+assert.eq( 1 , t.find( { name : /^\./ } ).explain().nscanned , "index explain 4" );
assert.eq( 4 , t.find( { name : /^./ } ).explain().nscanned , "index explain 5" );
assert.eq( 4 , t.find( { name : /^\Qblah\E/ } ).explain().nscanned , "index explain 6" );
View
2  mongo.xcodeproj/project.pbxproj
@@ -544,6 +544,7 @@
93BFA0E311330A8C0045D084 /* not2.js */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.javascript; path = not2.js; sourceTree = "<group>"; };
93C38E940FA66622007D6E4A /* basictests.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = basictests.cpp; sourceTree = "<group>"; };
93C46E021219AB8E00382E61 /* update_arraymatch6.js */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.javascript; path = update_arraymatch6.js; sourceTree = "<group>"; };
+ 93C46E64121A086500382E61 /* indexj.js */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.javascript; path = indexj.js; sourceTree = "<group>"; };
93C529C511D047CF00CF42F7 /* repair2.js */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.javascript; name = repair2.js; path = disk/repair2.js; sourceTree = "<group>"; };
93C5BC7911E5AE8700F9671C /* in6.js */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.javascript; path = in6.js; sourceTree = "<group>"; };
93C5BC9E11E5B7FE00F9671C /* group6.js */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.javascript; path = group6.js; sourceTree = "<group>"; };
@@ -834,6 +835,7 @@
934BEB9A10DFFA9600178102 /* jstests */ = {
isa = PBXGroup;
children = (
+ 93C46E64121A086500382E61 /* indexj.js */,
93C46E021219AB8E00382E61 /* update_arraymatch6.js */,
93EC350F1207AEB000A95C8A /* remove9.js */,
93EC34601207628300A95C8A /* capped7.js */,
Please sign in to comment.
Something went wrong with that request. Please try again.