summaryrefslogtreecommitdiff
path: root/dbtests
diff options
context:
space:
mode:
Diffstat (limited to 'dbtests')
-rw-r--r--dbtests/basictests.cpp77
-rw-r--r--dbtests/btreetests.cpp1702
-rw-r--r--dbtests/btreetests.inl1702
-rw-r--r--dbtests/clienttests.cpp2
-rw-r--r--dbtests/cursortests.cpp66
-rw-r--r--dbtests/dbtests.cpp3
-rw-r--r--dbtests/directclienttests.cpp31
-rw-r--r--dbtests/framework.cpp65
-rw-r--r--dbtests/jsobjtests.cpp381
-rw-r--r--dbtests/jsontests.cpp23
-rw-r--r--dbtests/jstests.cpp119
-rw-r--r--dbtests/mockdbclient.h97
-rw-r--r--dbtests/namespacetests.cpp500
-rw-r--r--dbtests/pairingtests.cpp344
-rw-r--r--dbtests/pdfiletests.cpp4
-rw-r--r--dbtests/perf/perftest.cpp70
-rw-r--r--dbtests/perftests.cpp691
-rw-r--r--dbtests/queryoptimizertests.cpp2885
-rw-r--r--dbtests/querytests.cpp179
-rw-r--r--dbtests/queryutiltests.cpp989
-rw-r--r--dbtests/repltests.cpp183
-rw-r--r--dbtests/socktests.cpp2
-rw-r--r--dbtests/spin_lock_test.cpp13
-rwxr-xr-xdbtests/test.sln26
-rw-r--r--dbtests/test.vcxproj1486
-rwxr-xr-xdbtests/test.vcxproj.filters275
-rw-r--r--dbtests/threadedtests.cpp389
-rw-r--r--dbtests/updatetests.cpp17
28 files changed, 7998 insertions, 4323 deletions
diff --git a/dbtests/basictests.cpp b/dbtests/basictests.cpp
index 3e0eecd..80bd7d7 100644
--- a/dbtests/basictests.cpp
+++ b/dbtests/basictests.cpp
@@ -25,6 +25,9 @@
#include "../util/text.h"
#include "../util/queue.h"
#include "../util/paths.h"
+#include "../util/stringutils.h"
+#include "../util/compress.h"
+#include "../db/db.h"
namespace BasicTests {
@@ -195,12 +198,16 @@ namespace BasicTests {
int matches = 0;
for( int p = 0; p < 3; p++ ) {
sleepsecs( 1 );
- int sec = t.seconds();
+ int sec = (t.millis() + 2)/1000;
if( sec == 1 )
matches++;
+ else
+ log() << "temp millis: " << t.millis() << endl;
ASSERT( sec >= 0 && sec <= 2 );
t.reset();
}
+ if ( matches < 2 )
+ log() << "matches:" << matches << endl;
ASSERT( matches >= 2 );
sleepmicros( 1527123 );
@@ -222,7 +229,7 @@ namespace BasicTests {
{
int x = t.millis();
if ( x < 1000 || x > 2500 ) {
- cout << "sleeptest x: " << x << endl;
+ cout << "sleeptest finds sleep accuracy to be not great. x: " << x << endl;
ASSERT( x >= 1000 );
ASSERT( x <= 20000 );
}
@@ -399,6 +406,27 @@ namespace BasicTests {
ASSERT_EQUALS( -1, lexNumCmp( "a", "0a"));
ASSERT_EQUALS( -1, lexNumCmp( "000a", "001a"));
ASSERT_EQUALS( 0, lexNumCmp( "010a", "0010a"));
+
+ ASSERT_EQUALS( -1 , lexNumCmp( "a0" , "a00" ) );
+ ASSERT_EQUALS( 0 , lexNumCmp( "a.0" , "a.00" ) );
+ ASSERT_EQUALS( -1 , lexNumCmp( "a.b.c.d0" , "a.b.c.d00" ) );
+ ASSERT_EQUALS( 1 , lexNumCmp( "a.b.c.0.y" , "a.b.c.00.x" ) );
+
+ ASSERT_EQUALS( -1, lexNumCmp( "a", "a-" ) );
+ ASSERT_EQUALS( 1, lexNumCmp( "a-", "a" ) );
+ ASSERT_EQUALS( 0, lexNumCmp( "a-", "a-" ) );
+
+ ASSERT_EQUALS( -1, lexNumCmp( "a", "a-c" ) );
+ ASSERT_EQUALS( 1, lexNumCmp( "a-c", "a" ) );
+ ASSERT_EQUALS( 0, lexNumCmp( "a-c", "a-c" ) );
+
+ ASSERT_EQUALS( 1, lexNumCmp( "a-c.t", "a.t" ) );
+ ASSERT_EQUALS( -1, lexNumCmp( "a.t", "a-c.t" ) );
+ ASSERT_EQUALS( 0, lexNumCmp( "a-c.t", "a-c.t" ) );
+
+ ASSERT_EQUALS( 1, lexNumCmp( "ac.t", "a.t" ) );
+ ASSERT_EQUALS( -1, lexNumCmp( "a.t", "ac.t" ) );
+ ASSERT_EQUALS( 0, lexNumCmp( "ac.t", "ac.t" ) );
}
};
@@ -409,16 +437,16 @@ namespace BasicTests {
ASSERT( ! Database::validDBName( "foo/bar" ) );
ASSERT( ! Database::validDBName( "foo.bar" ) );
- ASSERT( isANormalNSName( "asdads" ) );
- ASSERT( ! isANormalNSName( "asda$ds" ) );
- ASSERT( isANormalNSName( "local.oplog.$main" ) );
+ ASSERT( NamespaceString::normal( "asdads" ) );
+ ASSERT( ! NamespaceString::normal( "asda$ds" ) );
+ ASSERT( NamespaceString::normal( "local.oplog.$main" ) );
}
};
class DatabaseOwnsNS {
public:
void run() {
-
+ dblock lk;
bool isNew = false;
// this leaks as ~Database is private
// if that changes, should put this on the stack
@@ -584,6 +612,40 @@ namespace BasicTests {
}
};
+ class CmdLineParseConfigTest {
+ public:
+ void run() {
+ stringstream ss1;
+ istringstream iss1("");
+ CmdLine::parseConfigFile( iss1, ss1 );
+ stringstream ss2;
+ istringstream iss2("password=\'foo bar baz\'");
+ CmdLine::parseConfigFile( iss2, ss2 );
+ stringstream ss3;
+ istringstream iss3("\t this = false \n#that = true\n #another = whocares\n\n other = monkeys ");
+ CmdLine::parseConfigFile( iss3, ss3 );
+
+ ASSERT( ss1.str().compare("\n") == 0 );
+ ASSERT( ss2.str().compare("password=\'foo bar baz\'\n\n") == 0 );
+ ASSERT( ss3.str().compare("\n other = monkeys \n\n") == 0 );
+ }
+ };
+
+ struct CompressionTest1 {
+ void run() {
+ const char * c = "this is a test";
+ std::string s;
+ size_t len = compress(c, strlen(c)+1, &s);
+ assert( len > 0 );
+
+ std::string out;
+ bool ok = uncompress(s.c_str(), s.size(), &out);
+ assert(ok);
+ assert( strcmp(out.c_str(), c) == 0 );
+ }
+ } ctest1;
+
+
class All : public Suite {
public:
All() : Suite( "basic" ) {
@@ -620,6 +682,9 @@ namespace BasicTests {
add< HostAndPortTests >();
add< RelativePathTest >();
+ add< CmdLineParseConfigTest >();
+
+ add< CompressionTest1 >();
}
} myall;
diff --git a/dbtests/btreetests.cpp b/dbtests/btreetests.cpp
index 4da7375..44c5474 100644
--- a/dbtests/btreetests.cpp
+++ b/dbtests/btreetests.cpp
@@ -24,1688 +24,26 @@
#include "dbtests.h"
-namespace BtreeTests {
-
- const char* ns() {
- return "unittests.btreetests";
- }
-
- // dummy, valid record loc
- const DiskLoc recordLoc() {
- return DiskLoc( 0, 2 );
- }
-
- class Ensure {
- public:
- Ensure() {
- _c.ensureIndex( ns(), BSON( "a" << 1 ), false, "testIndex" );
- }
- ~Ensure() {
- _c.dropIndexes( ns() );
- }
- private:
- DBDirectClient _c;
- };
-
- class Base : public Ensure {
- public:
- Base() :
- _context( ns() ) {
- {
- bool f = false;
- assert( f = true );
- massert( 10402 , "assert is misdefined", f);
- }
- }
- virtual ~Base() {}
- static string bigNumString( long long n, int len = 800 ) {
- char sub[17];
- sprintf( sub, "%.16llx", n );
- string val( len, ' ' );
- for( int i = 0; i < len; ++i ) {
- val[ i ] = sub[ i % 16 ];
- }
- return val;
- }
- protected:
- const BtreeBucket* bt() {
- return id().head.btree();
- }
- DiskLoc dl() {
- return id().head;
- }
- IndexDetails& id() {
- NamespaceDetails *nsd = nsdetails( ns() );
- assert( nsd );
- return nsd->idx( 1 );
- }
- void checkValid( int nKeys ) {
- ASSERT( bt() );
- ASSERT( bt()->isHead() );
- bt()->assertValid( order(), true );
- ASSERT_EQUALS( nKeys, bt()->fullValidate( dl(), order(), 0, true ) );
- }
- void dump() {
- bt()->dumpTree( dl(), order() );
- }
- void insert( BSONObj &key ) {
- bt()->bt_insert( dl(), recordLoc(), key, Ordering::make(order()), true, id(), true );
- getDur().commitIfNeeded();
- }
- bool unindex( BSONObj &key ) {
- getDur().commitIfNeeded();
- return bt()->unindex( dl(), id(), key, recordLoc() );
- }
- static BSONObj simpleKey( char c, int n = 1 ) {
- BSONObjBuilder builder;
- string val( n, c );
- builder.append( "a", val );
- return builder.obj();
- }
- void locate( BSONObj &key, int expectedPos,
- bool expectedFound, const DiskLoc &expectedLocation,
- int direction = 1 ) {
- int pos;
- bool found;
- DiskLoc location =
- bt()->locate( id(), dl(), key, Ordering::make(order()), pos, found, recordLoc(), direction );
- ASSERT_EQUALS( expectedFound, found );
- ASSERT( location == expectedLocation );
- ASSERT_EQUALS( expectedPos, pos );
- }
- bool present( BSONObj &key, int direction ) {
- int pos;
- bool found;
- bt()->locate( id(), dl(), key, Ordering::make(order()), pos, found, recordLoc(), direction );
- return found;
- }
- BSONObj order() {
- return id().keyPattern();
- }
- const BtreeBucket *child( const BtreeBucket *b, int i ) {
- assert( i <= b->nKeys() );
- DiskLoc d;
- if ( i == b->nKeys() ) {
- d = b->getNextChild();
- }
- else {
- d = const_cast< DiskLoc& >( b->keyNode( i ).prevChildBucket );
- }
- assert( !d.isNull() );
- return d.btree();
- }
- void checkKey( char i ) {
- stringstream ss;
- ss << i;
- checkKey( ss.str() );
- }
- void checkKey( const string &k ) {
- BSONObj key = BSON( "" << k );
-// log() << "key: " << key << endl;
- ASSERT( present( key, 1 ) );
- ASSERT( present( key, -1 ) );
- }
- private:
- dblock lk_;
- Client::Context _context;
- };
-
- class Create : public Base {
- public:
- void run() {
- checkValid( 0 );
- }
- };
-
- class SimpleInsertDelete : public Base {
- public:
- void run() {
- BSONObj key = simpleKey( 'z' );
- insert( key );
-
- checkValid( 1 );
- locate( key, 0, true, dl() );
-
- unindex( key );
-
- checkValid( 0 );
- locate( key, 0, false, DiskLoc() );
- }
- };
-
- class SplitUnevenBucketBase : public Base {
- public:
- virtual ~SplitUnevenBucketBase() {}
- void run() {
- for ( int i = 0; i < 10; ++i ) {
- BSONObj shortKey = simpleKey( shortToken( i ), 1 );
- insert( shortKey );
- BSONObj longKey = simpleKey( longToken( i ), 800 );
- insert( longKey );
- }
- checkValid( 20 );
- ASSERT_EQUALS( 1, bt()->nKeys() );
- checkSplit();
- }
- protected:
- virtual char shortToken( int i ) const = 0;
- virtual char longToken( int i ) const = 0;
- static char leftToken( int i ) {
- return 'a' + i;
- }
- static char rightToken( int i ) {
- return 'z' - i;
- }
- virtual void checkSplit() = 0;
- };
-
- class SplitRightHeavyBucket : public SplitUnevenBucketBase {
- private:
- virtual char shortToken( int i ) const {
- return leftToken( i );
- }
- virtual char longToken( int i ) const {
- return rightToken( i );
- }
- virtual void checkSplit() {
- ASSERT_EQUALS( 15, child( bt(), 0 )->nKeys() );
- ASSERT_EQUALS( 4, child( bt(), 1 )->nKeys() );
- }
- };
-
- class SplitLeftHeavyBucket : public SplitUnevenBucketBase {
- private:
- virtual char shortToken( int i ) const {
- return rightToken( i );
- }
- virtual char longToken( int i ) const {
- return leftToken( i );
- }
- virtual void checkSplit() {
- ASSERT_EQUALS( 4, child( bt(), 0 )->nKeys() );
- ASSERT_EQUALS( 15, child( bt(), 1 )->nKeys() );
- }
- };
-
- class MissingLocate : public Base {
- public:
- void run() {
- for ( int i = 0; i < 3; ++i ) {
- BSONObj k = simpleKey( 'b' + 2 * i );
- insert( k );
- }
-
- locate( 1, 'a', 'b', dl() );
- locate( 1, 'c', 'd', dl() );
- locate( 1, 'e', 'f', dl() );
- locate( 1, 'g', 'g' + 1, DiskLoc() ); // of course, 'h' isn't in the index.
-
- // old behavior
- // locate( -1, 'a', 'b', dl() );
- // locate( -1, 'c', 'd', dl() );
- // locate( -1, 'e', 'f', dl() );
- // locate( -1, 'g', 'f', dl() );
-
- locate( -1, 'a', 'a' - 1, DiskLoc() ); // of course, 'a' - 1 isn't in the index
- locate( -1, 'c', 'b', dl() );
- locate( -1, 'e', 'd', dl() );
- locate( -1, 'g', 'f', dl() );
- }
- private:
- void locate( int direction, char token, char expectedMatch,
- DiskLoc expectedLocation ) {
- BSONObj k = simpleKey( token );
- int expectedPos = ( expectedMatch - 'b' ) / 2;
- Base::locate( k, expectedPos, false, expectedLocation, direction );
- }
- };
-
- class MissingLocateMultiBucket : public Base {
- public:
- void run() {
- for ( int i = 0; i < 8; ++i ) {
- insert( i );
- }
- insert( 9 );
- insert( 8 );
-// dump();
- BSONObj straddle = key( 'i' );
- locate( straddle, 0, false, dl(), 1 );
- straddle = key( 'k' );
- locate( straddle, 0, false, dl(), -1 );
- }
- private:
- BSONObj key( char c ) {
- return simpleKey( c, 800 );
- }
- void insert( int i ) {
- BSONObj k = key( 'b' + 2 * i );
- Base::insert( k );
- }
- };
-
- class SERVER983 : public Base {
- public:
- void run() {
- for ( int i = 0; i < 10; ++i ) {
- insert( i );
- }
-// dump();
- BSONObj straddle = key( 'o' );
- locate( straddle, 0, false, dl(), 1 );
- straddle = key( 'q' );
- locate( straddle, 0, false, dl(), -1 );
- }
- private:
- BSONObj key( char c ) {
- return simpleKey( c, 800 );
- }
- void insert( int i ) {
- BSONObj k = key( 'b' + 2 * i );
- Base::insert( k );
- }
- };
-
- class DontReuseUnused : public Base {
- public:
- void run() {
- for ( int i = 0; i < 10; ++i ) {
- insert( i );
- }
-// dump();
- BSONObj root = key( 'p' );
- unindex( root );
- Base::insert( root );
- locate( root, 0, true, bt()->getNextChild(), 1 );
- }
- private:
- BSONObj key( char c ) {
- return simpleKey( c, 800 );
- }
- void insert( int i ) {
- BSONObj k = key( 'b' + 2 * i );
- Base::insert( k );
- }
- };
-
- class PackUnused : public Base {
- public:
- void run() {
- for ( long long i = 0; i < 1000000; i += 1000 ) {
- insert( i );
- }
-// dump();
- string orig, after;
- {
- stringstream ss;
- bt()->shape( ss );
- orig = ss.str();
- }
- vector< string > toDel;
- vector< string > other;
- BSONObjBuilder start;
- start.appendMinKey( "a" );
- BSONObjBuilder end;
- end.appendMaxKey( "a" );
- auto_ptr< BtreeCursor > c( new BtreeCursor( nsdetails( ns() ), 1, id(), start.done(), end.done(), false, 1 ) );
- while( c->ok() ) {
- if ( !c->currKeyNode().prevChildBucket.isNull() ) {
- toDel.push_back( c->currKey().firstElement().valuestr() );
- }
- else {
- other.push_back( c->currKey().firstElement().valuestr() );
- }
- c->advance();
- }
- ASSERT( toDel.size() > 0 );
- for( vector< string >::const_iterator i = toDel.begin(); i != toDel.end(); ++i ) {
- BSONObj o = BSON( "a" << *i );
- unindex( o );
- }
- ASSERT( other.size() > 0 );
- for( vector< string >::const_iterator i = other.begin(); i != other.end(); ++i ) {
- BSONObj o = BSON( "a" << *i );
- unindex( o );
- }
-
- int unused = 0;
- ASSERT_EQUALS( 0, bt()->fullValidate( dl(), order(), &unused, true ) );
-
- for ( long long i = 50000; i < 50100; ++i ) {
- insert( i );
- }
-
- int unused2 = 0;
- ASSERT_EQUALS( 100, bt()->fullValidate( dl(), order(), &unused2, true ) );
-
-// log() << "old unused: " << unused << ", new unused: " << unused2 << endl;
-//
- ASSERT( unused2 <= unused );
- }
- protected:
- void insert( long long n ) {
- string val = bigNumString( n );
- BSONObj k = BSON( "a" << val );
- Base::insert( k );
- }
- };
-
- class DontDropReferenceKey : public PackUnused {
- public:
- void run() {
- // with 80 root node is full
- for ( long long i = 0; i < 80; i += 1 ) {
- insert( i );
- }
-
- BSONObjBuilder start;
- start.appendMinKey( "a" );
- BSONObjBuilder end;
- end.appendMaxKey( "a" );
- BSONObj l = bt()->keyNode( 0 ).key;
- string toInsert;
- auto_ptr< BtreeCursor > c( new BtreeCursor( nsdetails( ns() ), 1, id(), start.done(), end.done(), false, 1 ) );
- while( c->ok() ) {
- if ( c->currKey().woCompare( l ) > 0 ) {
- toInsert = c->currKey().firstElement().valuestr();
- break;
- }
- c->advance();
- }
- // too much work to try to make this happen through inserts and deletes
- // we are intentionally manipulating the btree bucket directly here
- getDur().writingDiskLoc( const_cast< DiskLoc& >( bt()->keyNode( 1 ).prevChildBucket ) ) = DiskLoc();
- getDur().writingInt( const_cast< DiskLoc& >( bt()->keyNode( 1 ).recordLoc ).GETOFS() ) |= 1; // make unused
- BSONObj k = BSON( "a" << toInsert );
- Base::insert( k );
- }
- };
-
- class MergeBuckets : public Base {
- public:
- virtual ~MergeBuckets() {}
- void run() {
- for ( int i = 0; i < 10; ++i ) {
- insert( i );
- }
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- int expectedCount = 10 - unindexKeys();
-// dump();
- ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
- int unused = 0;
- ASSERT_EQUALS( expectedCount, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- }
- protected:
- BSONObj key( char c ) {
- return simpleKey( c, 800 );
- }
- void insert( int i ) {
- BSONObj k = key( 'b' + 2 * i );
- Base::insert( k );
- }
- virtual int unindexKeys() = 0;
- };
-
- class MergeBucketsLeft : public MergeBuckets {
- virtual int unindexKeys() {
- BSONObj k = key( 'b' );
- unindex( k );
- k = key( 'b' + 2 );
- unindex( k );
- k = key( 'b' + 4 );
- unindex( k );
- k = key( 'b' + 6 );
- unindex( k );
- return 4;
- }
- };
-
- class MergeBucketsRight : public MergeBuckets {
- virtual int unindexKeys() {
- BSONObj k = key( 'b' + 2 * 9 );
- unindex( k );
- return 1;
- }
- };
-
- // deleting from head won't coalesce yet
-// class MergeBucketsHead : public MergeBuckets {
-// virtual BSONObj unindexKey() { return key( 'p' ); }
-// };
-
- class MergeBucketsDontReplaceHead : public Base {
- public:
- void run() {
- for ( int i = 0; i < 18; ++i ) {
- insert( i );
- }
- // dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = key( 'a' + 17 );
- unindex( k );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- int unused = 0;
- ASSERT_EQUALS( 17, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- }
- private:
- BSONObj key( char c ) {
- return simpleKey( c, 800 );
- }
- void insert( int i ) {
- BSONObj k = key( 'a' + i );
- Base::insert( k );
- }
- };
-
- // Tool to construct custom trees for tests.
- class ArtificialTree : public BtreeBucket {
- public:
- void push( const BSONObj &key, const DiskLoc &child ) {
- pushBack( dummyDiskLoc(), key, Ordering::make( BSON( "a" << 1 ) ), child );
- }
- void setNext( const DiskLoc &child ) {
- nextChild = child;
- }
- static DiskLoc make( IndexDetails &id ) {
- DiskLoc ret = addBucket( id );
- is( ret )->init();
- getDur().commitIfNeeded();
- return ret;
- }
- static ArtificialTree *is( const DiskLoc &l ) {
- return static_cast< ArtificialTree * >( l.btreemod() );
- }
- static DiskLoc makeTree( const string &spec, IndexDetails &id ) {
- return makeTree( fromjson( spec ), id );
- }
- static DiskLoc makeTree( const BSONObj &spec, IndexDetails &id ) {
- DiskLoc node = make( id );
- ArtificialTree *n = ArtificialTree::is( node );
- BSONObjIterator i( spec );
- while( i.more() ) {
- BSONElement e = i.next();
- DiskLoc child;
- if ( e.type() == Object ) {
- child = makeTree( e.embeddedObject(), id );
- }
- if ( e.fieldName() == string( "_" ) ) {
- n->setNext( child );
- }
- else {
- n->push( BSON( "" << expectedKey( e.fieldName() ) ), child );
- }
- }
- n->fixParentPtrs( node );
- return node;
- }
- static void setTree( const string &spec, IndexDetails &id ) {
- set( makeTree( spec, id ), id );
- }
- static void set( const DiskLoc &l, IndexDetails &id ) {
- ArtificialTree::is( id.head )->deallocBucket( id.head, id );
- getDur().writingDiskLoc(id.head) = l;
- }
- static string expectedKey( const char *spec ) {
- if ( spec[ 0 ] != '$' ) {
- return spec;
- }
- char *endPtr;
- // parsing a long long is a pain, so just allow shorter keys for now
- unsigned long long num = strtol( spec + 1, &endPtr, 16 );
- int len = 800;
- if( *endPtr == '$' ) {
- len = strtol( endPtr + 1, 0, 16 );
- }
- return Base::bigNumString( num, len );
- }
- static void checkStructure( const BSONObj &spec, const IndexDetails &id, const DiskLoc node ) {
- ArtificialTree *n = ArtificialTree::is( node );
- BSONObjIterator j( spec );
- for( int i = 0; i < n->n; ++i ) {
- ASSERT( j.more() );
- BSONElement e = j.next();
- KeyNode kn = n->keyNode( i );
- string expected = expectedKey( e.fieldName() );
- ASSERT( present( id, BSON( "" << expected ), 1 ) );
- ASSERT( present( id, BSON( "" << expected ), -1 ) );
- ASSERT_EQUALS( expected, kn.key.firstElement().valuestr() );
- if ( kn.prevChildBucket.isNull() ) {
- ASSERT( e.type() == jstNULL );
- }
- else {
- ASSERT( e.type() == Object );
- checkStructure( e.embeddedObject(), id, kn.prevChildBucket );
- }
- }
- if ( n->nextChild.isNull() ) {
- // maybe should allow '_' field with null value?
- ASSERT( !j.more() );
- }
- else {
- BSONElement e = j.next();
- ASSERT_EQUALS( string( "_" ), e.fieldName() );
- ASSERT( e.type() == Object );
- checkStructure( e.embeddedObject(), id, n->nextChild );
- }
- ASSERT( !j.more() );
- }
- static void checkStructure( const string &spec, const IndexDetails &id ) {
- checkStructure( fromjson( spec ), id, id.head );
- }
- static bool present( const IndexDetails &id, const BSONObj &key, int direction ) {
- int pos;
- bool found;
- id.head.btree()->locate( id, id.head, key, Ordering::make(id.keyPattern()), pos, found, recordLoc(), direction );
- return found;
- }
- int headerSize() const { return BtreeBucket::headerSize(); }
- int packedDataSize( int pos ) const { return BtreeBucket::packedDataSize( pos ); }
- void fixParentPtrs( const DiskLoc &thisLoc ) { BtreeBucket::fixParentPtrs( thisLoc ); }
- void forcePack() {
- topSize += emptySize;
- emptySize = 0;
- setNotPacked();
- }
- private:
- DiskLoc dummyDiskLoc() const { return DiskLoc( 0, 2 ); }
- };
-
- /**
- * We could probably refactor the following tests, but it's easier to debug
- * them in the present state.
- */
-
- class MergeBucketsDelInternal : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{d:{b:{a:null},bb:null,_:{c:null}},_:{f:{e:null},_:{g:null}}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "bb" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 7, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{b:{a:null},d:{c:null},f:{e:null},_:{g:null}}", id() );
- }
- };
-
- class MergeBucketsRightNull : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{d:{b:{a:null},bb:null,cc:{c:null}},_:{f:{e:null},h:{g:null}}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "bb" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{b:{a:null},cc:{c:null},d:null,f:{e:null},h:{g:null}}", id() );
- }
- };
-
- // not yet handling this case
- class DontMergeSingleBucket : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{d:{b:{a:null},c:null}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << "c" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{d:{b:{a:null}}}", id() );
- }
- };
-
- class ParentMergeNonRightToLeft : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{d:{b:{a:null},bb:null,cc:{c:null}},i:{f:{e:null},h:{g:null}}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "bb" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
- // child does not currently replace parent in this case
- ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{i:{b:{a:null},cc:{c:null},d:null,f:{e:null},h:{g:null}}}", id() );
- }
- };
-
- class ParentMergeNonRightToRight : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{d:{b:{a:null},cc:{c:null}},i:{f:{e:null},ff:null,h:{g:null}}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "ff" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
- // child does not currently replace parent in this case
- ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{i:{b:{a:null},cc:{c:null},d:null,f:{e:null},h:{g:null}}}", id() );
- }
- };
-
- class CantMergeRightNoMerge : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{d:{b:{a:null},bb:null,cc:{c:null}},dd:null,_:{f:{e:null},h:{g:null}}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "bb" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{d:{b:{a:null},cc:{c:null}},dd:null,_:{f:{e:null},h:{g:null}}}", id() );
- }
- };
-
- class CantMergeLeftNoMerge : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{c:{b:{a:null}},d:null,_:{f:{e:null},g:null}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 7, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "g" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 6, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{c:{b:{a:null}},d:null,_:{f:{e:null}}}", id() );
- }
- };
-
- class MergeOption : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{c:{b:{a:null}},f:{e:{d:null},ee:null},_:{h:{g:null}}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "ee" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{c:{b:{a:null}},_:{e:{d:null},f:null,h:{g:null}}}", id() );
- }
- };
-
- class ForceMergeLeft : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{c:{b:{a:null}},f:{e:{d:null},ee:null},ff:null,_:{h:{g:null}}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "ee" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{f:{b:{a:null},c:null,e:{d:null}},ff:null,_:{h:{g:null}}}", id() );
- }
- };
-
- class ForceMergeRight : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{c:{b:{a:null}},cc:null,f:{e:{d:null},ee:null},_:{h:{g:null}}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "ee" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{c:{b:{a:null}},cc:null,_:{e:{d:null},f:null,h:{g:null}}}", id() );
- }
- };
-
- class RecursiveMerge : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{h:{e:{b:{a:null},c:null,d:null},g:{f:null}},j:{i:null}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "c" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- // height is not currently reduced in this case
- ArtificialTree::checkStructure( "{j:{g:{b:{a:null},d:null,e:null,f:null},h:null,i:null}}", id() );
- }
- };
-
- class RecursiveMergeRightBucket : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{h:{e:{b:{a:null},c:null,d:null},g:{f:null}},_:{i:null}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "c" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{g:{b:{a:null},d:null,e:null,f:null},h:null,i:null}", id() );
- }
- };
-
- class RecursiveMergeDoubleRightBucket : public Base {
- public:
- void run() {
- ArtificialTree::setTree( "{h:{e:{b:{a:null},c:null,d:null},_:{f:null}},_:{i:null}}", id() );
-// dump();
- string ns = id().indexNamespace();
- ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
-
- BSONObj k = BSON( "" << "c" );
- assert( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 7, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- // no recursion currently in this case
- ArtificialTree::checkStructure( "{h:{b:{a:null},d:null,e:null,f:null},_:{i:null}}", id() );
- }
- };
-
- class MergeSizeBase : public Base {
- public:
- MergeSizeBase() : _count() {}
- virtual ~MergeSizeBase() {}
- void run() {
- typedef ArtificialTree A;
- A::set( A::make( id() ), id() );
- A* root = A::is( dl() );
- DiskLoc left = A::make( id() );
- root->push( biggestKey( 'm' ), left );
- _count = 1;
- A* l = A::is( left );
- DiskLoc right = A::make( id() );
- root->setNext( right );
- A* r = A::is( right );
- root->fixParentPtrs( dl() );
-
- ASSERT_EQUALS( bigSize(), bigSize() / 2 * 2 );
- fillToExactSize( l, leftSize(), 'a' );
- fillToExactSize( r, rightSize(), 'n' );
- ASSERT( leftAdditional() <= 2 );
- if ( leftAdditional() >= 2 ) {
- l->push( bigKey( 'k' ), DiskLoc() );
- }
- if ( leftAdditional() >= 1 ) {
- l->push( bigKey( 'l' ), DiskLoc() );
- }
- ASSERT( rightAdditional() <= 2 );
- if ( rightAdditional() >= 2 ) {
- r->push( bigKey( 'y' ), DiskLoc() );
- }
- if ( rightAdditional() >= 1 ) {
- r->push( bigKey( 'z' ), DiskLoc() );
- }
- _count += leftAdditional() + rightAdditional();
-
-// dump();
-
- initCheck();
- string ns = id().indexNamespace();
- const char *keys = delKeys();
- for( const char *i = keys; *i; ++i ) {
- int unused = 0;
- ASSERT_EQUALS( _count, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = bigKey( *i );
- unindex( k );
-// dump();
- --_count;
- }
-
-// dump();
-
- int unused = 0;
- ASSERT_EQUALS( _count, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- validate();
- if ( !merge() ) {
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- }
- else {
- ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
- }
- }
- protected:
- virtual int leftAdditional() const { return 2; }
- virtual int rightAdditional() const { return 2; }
- virtual void initCheck() {}
- virtual void validate() {}
- virtual int leftSize() const = 0;
- virtual int rightSize() const = 0;
- virtual const char * delKeys() const { return "klyz"; }
- virtual bool merge() const { return true; }
- void fillToExactSize( ArtificialTree *t, int targetSize, char startKey ) {
- int size = 0;
- while( size < targetSize ) {
- int space = targetSize - size;
- int nextSize = space - sizeof( _KeyNode );
- assert( nextSize > 0 );
- BSONObj newKey = key( startKey++, nextSize );
- t->push( newKey, DiskLoc() );
- size += newKey.objsize() + sizeof( _KeyNode );
- _count += 1;
- }
- ASSERT_EQUALS( t->packedDataSize( 0 ), targetSize );
- }
- static BSONObj key( char a, int size ) {
- if ( size >= bigSize() ) {
- return bigKey( a );
- }
- return simpleKey( a, size - ( bigSize() - 801 ) );
- }
- static BSONObj bigKey( char a ) {
- return simpleKey( a, 801 );
- }
- static BSONObj biggestKey( char a ) {
- int size = BtreeBucket::getKeyMax() - bigSize() + 801;
- return simpleKey( a, size );
- }
- static int bigSize() {
- return bigKey( 'a' ).objsize();
- }
- static int biggestSize() {
- return biggestKey( 'a' ).objsize();
- }
- int _count;
- };
-
- class MergeSizeJustRightRight : public MergeSizeBase {
- protected:
- virtual int rightSize() const { return BtreeBucket::getLowWaterMark() - 1; }
- virtual int leftSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) - ( BtreeBucket::getLowWaterMark() - 1 ); }
- };
-
- class MergeSizeJustRightLeft : public MergeSizeBase {
- protected:
- virtual int leftSize() const { return BtreeBucket::getLowWaterMark() - 1; }
- virtual int rightSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) - ( BtreeBucket::getLowWaterMark() - 1 ); }
- virtual const char * delKeys() const { return "yzkl"; }
- };
-
- class MergeSizeRight : public MergeSizeJustRightRight {
- virtual int rightSize() const { return MergeSizeJustRightRight::rightSize() - 1; }
- virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() + 1; }
- };
-
- class MergeSizeLeft : public MergeSizeJustRightLeft {
- virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() + 1; }
- virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize() - 1; }
- };
-
- class NoMergeBelowMarkRight : public MergeSizeJustRightRight {
- virtual int rightSize() const { return MergeSizeJustRightRight::rightSize() + 1; }
- virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() - 1; }
- virtual bool merge() const { return false; }
- };
-
- class NoMergeBelowMarkLeft : public MergeSizeJustRightLeft {
- virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() - 1; }
- virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize() + 1; }
- virtual bool merge() const { return false; }
- };
-
- class MergeSizeRightTooBig : public MergeSizeJustRightLeft {
- virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() + 1; }
- virtual bool merge() const { return false; }
- };
-
- class MergeSizeLeftTooBig : public MergeSizeJustRightRight {
- virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() + 1; }
- virtual bool merge() const { return false; }
- };
-
- class BalanceOneLeftToRight : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},b:{$20:null,$30:null,$40:null,$50:null,a:null},_:{c:null}}", id() );
- ASSERT_EQUALS( 14, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << bigNumString( 0x40 ) );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$6:{$1:null,$2:null,$3:null,$4:null,$5:null},b:{$10:null,$20:null,$30:null,$50:null,a:null},_:{c:null}}", id() );
- }
- };
-
- class BalanceOneRightToLeft : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null},b:{$20:null,$30:null,$40:null,$50:null,$60:null,$70:null},_:{c:null}}", id() );
- ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << bigNumString( 0x3 ) );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$20:{$1:null,$2:null,$4:null,$10:null},b:{$30:null,$40:null,$50:null,$60:null,$70:null},_:{c:null}}", id() );
- }
- };
-
- class BalanceThreeLeftToRight : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$20:{$1:{$0:null},$3:{$2:null},$5:{$4:null},$7:{$6:null},$9:{$8:null},$11:{$10:null},$13:{$12:null},_:{$14:null}},b:{$30:null,$40:{$35:null},$50:{$45:null}},_:{c:null}}", id() );
- ASSERT_EQUALS( 23, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 14, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << bigNumString( 0x30 ) );
- // dump();
- ASSERT( unindex( k ) );
- // dump();
- ASSERT_EQUALS( 22, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 14, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$9:{$1:{$0:null},$3:{$2:null},$5:{$4:null},$7:{$6:null},_:{$8:null}},b:{$11:{$10:null},$13:{$12:null},$20:{$14:null},$40:{$35:null},$50:{$45:null}},_:{c:null}}", id() );
- }
- };
-
- class BalanceThreeRightToLeft : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$20:{$1:{$0:null},$3:{$2:null},$5:null,_:{$14:null}},b:{$30:{$25:null},$40:{$35:null},$50:{$45:null},$60:{$55:null},$70:{$65:null},$80:{$75:null},$90:{$85:null},$100:{$95:null}},_:{c:null}}", id() );
- ASSERT_EQUALS( 25, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 15, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << bigNumString( 0x5 ) );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 24, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 15, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$50:{$1:{$0:null},$3:{$2:null},$20:{$14:null},$30:{$25:null},$40:{$35:null},_:{$45:null}},b:{$60:{$55:null},$70:{$65:null},$80:{$75:null},$90:{$85:null},$100:{$95:null}},_:{c:null}}", id() );
- }
- };
-
- class BalanceSingleParentKey : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},_:{$20:null,$30:null,$40:null,$50:null,a:null}}", id() );
- ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << bigNumString( 0x40 ) );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$6:{$1:null,$2:null,$3:null,$4:null,$5:null},_:{$10:null,$20:null,$30:null,$50:null,a:null}}", id() );
- }
- };
-
- class PackEmpty : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{a:null}", id() );
- BSONObj k = BSON( "" << "a" );
- ASSERT( unindex( k ) );
- ArtificialTree *t = ArtificialTree::is( dl() );
- t->forcePack();
- Tester::checkEmpty( t, id() );
- }
- class Tester : public ArtificialTree {
- public:
- static void checkEmpty( ArtificialTree *a, const IndexDetails &id ) {
- Tester *t = static_cast< Tester * >( a );
- ASSERT_EQUALS( 0, t->n );
- ASSERT( !( t->flags & Packed ) );
- Ordering o = Ordering::make( id.keyPattern() );
- int zero = 0;
- t->_packReadyForMod( o, zero );
- ASSERT_EQUALS( 0, t->n );
- ASSERT_EQUALS( 0, t->topSize );
- ASSERT_EQUALS( BtreeBucket::bodySize(), t->emptySize );
- ASSERT( t->flags & Packed );
- }
- };
- };
-
- class PackedDataSizeEmpty : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{a:null}", id() );
- BSONObj k = BSON( "" << "a" );
- ASSERT( unindex( k ) );
- ArtificialTree *t = ArtificialTree::is( dl() );
- t->forcePack();
- Tester::checkEmpty( t, id() );
- }
- class Tester : public ArtificialTree {
- public:
- static void checkEmpty( ArtificialTree *a, const IndexDetails &id ) {
- Tester *t = static_cast< Tester * >( a );
- ASSERT_EQUALS( 0, t->n );
- ASSERT( !( t->flags & Packed ) );
- int zero = 0;
- ASSERT_EQUALS( 0, t->packedDataSize( zero ) );
- ASSERT( !( t->flags & Packed ) );
- }
- };
- };
-
- class BalanceSingleParentKeyPackParent : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},_:{$20:null,$30:null,$40:null,$50:null,a:null}}", id() );
- ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- // force parent pack
- ArtificialTree::is( dl() )->forcePack();
- BSONObj k = BSON( "" << bigNumString( 0x40 ) );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$6:{$1:null,$2:null,$3:null,$4:null,$5:null},_:{$10:null,$20:null,$30:null,$50:null,a:null}}", id() );
- }
- };
-
- class BalanceSplitParent : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$10$10:{$1:null,$2:null,$3:null,$4:null},$100:{$20:null,$30:null,$40:null,$50:null,$60:null,$70:null,$80:null},$200:null,$300:null,$400:null,$500:null,$600:null,$700:null,$800:null,$900:null,_:{c:null}}", id() );
- ASSERT_EQUALS( 22, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << bigNumString( 0x3 ) );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 21, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$500:{$30:{$1:null,$2:null,$4:null,$10$10:null,$20:null},$100:{$40:null,$50:null,$60:null,$70:null,$80:null},$200:null,$300:null,$400:null},_:{$600:null,$700:null,$800:null,$900:null,_:{c:null}}}", id() );
- }
- };
-
- class RebalancedSeparatorBase : public Base {
- public:
- void run() {
- ArtificialTree::setTree( treeSpec(), id() );
- modTree();
- Tester::checkSeparator( id(), expectedSeparator() );
- }
- virtual string treeSpec() const = 0;
- virtual int expectedSeparator() const = 0;
- virtual void modTree() {}
- struct Tester : public ArtificialTree {
- static void checkSeparator( const IndexDetails& id, int expected ) {
- ASSERT_EQUALS( expected, static_cast< Tester * >( id.head.btreemod() )->rebalancedSeparatorPos( id.head, 0 ) );
- }
- };
- };
-
- class EvenRebalanceLeft : public RebalancedSeparatorBase {
- virtual string treeSpec() const { return "{$7:{$1:null,$2$31f:null,$3:null,$4$31f:null,$5:null,$6:null},_:{$8:null,$9:null,$10$31e:null}}"; }
- virtual int expectedSeparator() const { return 4; }
- };
-
- class EvenRebalanceLeftCusp : public RebalancedSeparatorBase {
- virtual string treeSpec() const { return "{$6:{$1:null,$2$31f:null,$3:null,$4$31f:null,$5:null},_:{$7:null,$8:null,$9$31e:null,$10:null}}"; }
- virtual int expectedSeparator() const { return 4; }
- };
-
- class EvenRebalanceRight : public RebalancedSeparatorBase {
- virtual string treeSpec() const { return "{$3:{$1:null,$2$31f:null},_:{$4$31f:null,$5:null,$6:null,$7:null,$8$31e:null,$9:null,$10:null}}"; }
- virtual int expectedSeparator() const { return 4; }
- };
-
- class EvenRebalanceRightCusp : public RebalancedSeparatorBase {
- virtual string treeSpec() const { return "{$4$31f:{$1:null,$2$31f:null,$3:null},_:{$5:null,$6:null,$7$31e:null,$8:null,$9:null,$10:null}}"; }
- virtual int expectedSeparator() const { return 4; }
- };
-
- class EvenRebalanceCenter : public RebalancedSeparatorBase {
- virtual string treeSpec() const { return "{$5:{$1:null,$2$31f:null,$3:null,$4$31f:null},_:{$6:null,$7$31e:null,$8:null,$9:null,$10:null}}"; }
- virtual int expectedSeparator() const { return 4; }
- };
-
- class OddRebalanceLeft : public RebalancedSeparatorBase {
- virtual string treeSpec() const { return "{$6$31f:{$1:null,$2:null,$3:null,$4:null,$5:null},_:{$7:null,$8:null,$9:null,$10:null}}"; }
- virtual int expectedSeparator() const { return 4; }
- };
-
- class OddRebalanceRight : public RebalancedSeparatorBase {
- virtual string treeSpec() const { return "{$4:{$1:null,$2:null,$3:null},_:{$5:null,$6:null,$7:null,$8$31f:null,$9:null,$10:null}}"; }
- virtual int expectedSeparator() const { return 4; }
- };
-
- class OddRebalanceCenter : public RebalancedSeparatorBase {
- virtual string treeSpec() const { return "{$5:{$1:null,$2:null,$3:null,$4:null},_:{$6:null,$7:null,$8:null,$9:null,$10$31f:null}}"; }
- virtual int expectedSeparator() const { return 4; }
- };
-
- class RebalanceEmptyRight : public RebalancedSeparatorBase {
- virtual string treeSpec() const { return "{$a:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null,$7:null,$8:null,$9:null},_:{$b:null}}"; }
- virtual void modTree() {
- BSONObj k = BSON( "" << bigNumString( 0xb ) );
- ASSERT( unindex( k ) );
- }
- virtual int expectedSeparator() const { return 4; }
- };
-
- class RebalanceEmptyLeft : public RebalancedSeparatorBase {
- virtual string treeSpec() const { return "{$a:{$1:null},_:{$11:null,$12:null,$13:null,$14:null,$15:null,$16:null,$17:null,$18:null,$19:null}}"; }
- virtual void modTree() {
- BSONObj k = BSON( "" << bigNumString( 0x1 ) );
- ASSERT( unindex( k ) );
- }
- virtual int expectedSeparator() const { return 4; }
- };
-
- class NoMoveAtLowWaterMarkRight : public MergeSizeJustRightRight {
- virtual int rightSize() const { return MergeSizeJustRightRight::rightSize() + 1; }
- virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key; }
- virtual void validate() { ASSERT_EQUALS( _oldTop, bt()->keyNode( 0 ).key ); }
- virtual bool merge() const { return false; }
- protected:
- BSONObj _oldTop;
- };
-
- class MoveBelowLowWaterMarkRight : public NoMoveAtLowWaterMarkRight {
- virtual int rightSize() const { return MergeSizeJustRightRight::rightSize(); }
- virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() + 1; }
- // different top means we rebalanced
- virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key ) ); }
- };
-
- class NoMoveAtLowWaterMarkLeft : public MergeSizeJustRightLeft {
- virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize() + 1; }
- virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key; }
- virtual void validate() { ASSERT_EQUALS( _oldTop, bt()->keyNode( 0 ).key ); }
- virtual bool merge() const { return false; }
- protected:
- BSONObj _oldTop;
- };
-
- class MoveBelowLowWaterMarkLeft : public NoMoveAtLowWaterMarkLeft {
- virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize(); }
- virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() + 1; }
- // different top means we rebalanced
- virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key ) ); }
- };
-
- class PreferBalanceLeft : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},$20:{$11:null,$12:null,$13:null,$14:null},_:{$30:null}}", id() );
- ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << bigNumString( 0x12 ) );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$5:{$1:null,$2:null,$3:null,$4:null},$20:{$6:null,$10:null,$11:null,$13:null,$14:null},_:{$30:null}}", id() );
- }
- };
-
- class PreferBalanceRight : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$10:{$1:null},$20:{$11:null,$12:null,$13:null,$14:null},_:{$31:null,$32:null,$33:null,$34:null,$35:null,$36:null}}", id() );
- ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << bigNumString( 0x12 ) );
- // dump();
- ASSERT( unindex( k ) );
- // dump();
- ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$10:{$1:null},$31:{$11:null,$13:null,$14:null,$20:null},_:{$32:null,$33:null,$34:null,$35:null,$36:null}}", id() );
- }
- };
-
- class RecursiveMergeThenBalance : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$10:{$5:{$1:null,$2:null},$8:{$6:null,$7:null}},_:{$20:null,$30:null,$40:null,$50:null,$60:null,$70:null,$80:null,$90:null}}", id() );
- ASSERT_EQUALS( 15, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << bigNumString( 0x7 ) );
- // dump();
- ASSERT( unindex( k ) );
- // dump();
- ASSERT_EQUALS( 14, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$40:{$8:{$1:null,$2:null,$5:null,$6:null},$10:null,$20:null,$30:null},_:{$50:null,$60:null,$70:null,$80:null,$90:null}}", id() );
- }
- };
-
- class MergeRightEmpty : public MergeSizeBase {
- protected:
- virtual int rightAdditional() const { return 1; }
- virtual int leftAdditional() const { return 1; }
- virtual const char * delKeys() const { return "lz"; }
- virtual int rightSize() const { return 0; }
- virtual int leftSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ); }
- };
-
- class MergeMinRightEmpty : public MergeSizeBase {
- protected:
- virtual int rightAdditional() const { return 1; }
- virtual int leftAdditional() const { return 0; }
- virtual const char * delKeys() const { return "z"; }
- virtual int rightSize() const { return 0; }
- virtual int leftSize() const { return bigSize() + sizeof( _KeyNode ); }
- };
-
- class MergeLeftEmpty : public MergeSizeBase {
- protected:
- virtual int rightAdditional() const { return 1; }
- virtual int leftAdditional() const { return 1; }
- virtual const char * delKeys() const { return "zl"; }
- virtual int leftSize() const { return 0; }
- virtual int rightSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ); }
- };
-
- class MergeMinLeftEmpty : public MergeSizeBase {
- protected:
- virtual int leftAdditional() const { return 1; }
- virtual int rightAdditional() const { return 0; }
- virtual const char * delKeys() const { return "l"; }
- virtual int leftSize() const { return 0; }
- virtual int rightSize() const { return bigSize() + sizeof( _KeyNode ); }
- };
-
- class BalanceRightEmpty : public MergeRightEmpty {
- protected:
- virtual int leftSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) + 1; }
- virtual bool merge() const { return false; }
- virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key; }
- virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key ) ); }
- private:
- BSONObj _oldTop;
- };
-
- class BalanceLeftEmpty : public MergeLeftEmpty {
- protected:
- virtual int rightSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) + 1; }
- virtual bool merge() const { return false; }
- virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key; }
- virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key ) ); }
- private:
- BSONObj _oldTop;
- };
-
- class DelEmptyNoNeighbors : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{b:{a:null}}", id() );
- ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << "a" );
- // dump();
- ASSERT( unindex( k ) );
- // dump();
- ASSERT_EQUALS( 1, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{b:null}", id() );
- }
- };
-
- class DelEmptyEmptyNeighbors : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{a:null,c:{b:null},d:null}", id() );
- ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << "b" );
- // dump();
- ASSERT( unindex( k ) );
- // dump();
- ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), 0, true ) );
- ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{a:null,c:null,d:null}", id() );
- }
- };
-
- class DelInternal : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{a:null,c:{b:null},d:null}", id() );
- int unused = 0;
- ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << "c" );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{a:null,b:null,d:null}", id() );
- }
- };
-
- class DelInternalReplaceWithUnused : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{a:null,c:{b:null},d:null}", id() );
- getDur().writingInt( const_cast< DiskLoc& >( bt()->keyNode( 1 ).prevChildBucket.btree()->keyNode( 0 ).recordLoc ).GETOFS() ) |= 1; // make unused
- int unused = 0;
- ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 1, unused );
- ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << "c" );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- unused = 0;
- ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 1, unused );
- ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
- // doesn't discriminate between used and unused
- ArtificialTree::checkStructure( "{a:null,b:null,d:null}", id() );
- }
- };
-
- class DelInternalReplaceRight : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{a:null,_:{b:null}}", id() );
- int unused = 0;
- ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << "a" );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- unused = 0;
- ASSERT_EQUALS( 1, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{b:null}", id() );
- }
- };
-
- class DelInternalPromoteKey : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{a:null,y:{d:{c:{b:null}},_:{e:null}},z:null}", id() );
- int unused = 0;
- ASSERT_EQUALS( 7, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << "y" );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- unused = 0;
- ASSERT_EQUALS( 6, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{a:null,e:{c:{b:null},d:null},z:null}", id() );
- }
- };
-
- class DelInternalPromoteRightKey : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{a:null,_:{e:{c:null},_:{f:null}}}", id() );
- int unused = 0;
- ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << "a" );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- unused = 0;
- ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{c:null,_:{e:null,f:null}}", id() );
- }
- };
-
- class DelInternalReplacementPrevNonNull : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{a:null,d:{c:{b:null}},e:null}", id() );
- int unused = 0;
- ASSERT_EQUALS( 5, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << "d" );
- // dump();
- ASSERT( unindex( k ) );
- // dump();
- ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 1, unused );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{a:null,d:{c:{b:null}},e:null}", id() );
- ASSERT( bt()->keyNode( 1 ).recordLoc.getOfs() & 1 ); // check 'unused' key
- }
- };
-
- class DelInternalReplacementNextNonNull : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{a:null,_:{c:null,_:{d:null}}}", id() );
- int unused = 0;
- ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << "a" );
- // dump();
- ASSERT( unindex( k ) );
- // dump();
- ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 1, unused );
- ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{a:null,_:{c:null,_:{d:null}}}", id() );
- ASSERT( bt()->keyNode( 0 ).recordLoc.getOfs() & 1 ); // check 'unused' key
- }
- };
-
- class DelInternalSplitPromoteLeft : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$10:null,$20:null,$30$10:{$25:{$23:null},_:{$27:null}},$40:null,$50:null,$60:null,$70:null,$80:null,$90:null,$100:null}", id() );
- int unused = 0;
- ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << bigNumString( 0x30, 0x10 ) );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$60:{$10:null,$20:null,$27:{$23:null,$25:null},$40:null,$50:null},_:{$70:null,$80:null,$90:null,$100:null}}", id() );
- }
- };
-
- class DelInternalSplitPromoteRight : public Base {
- public:
- void run() {
- string ns = id().indexNamespace();
- ArtificialTree::setTree( "{$10:null,$20:null,$30:null,$40:null,$50:null,$60:null,$70:null,$80:null,$90:null,$100$10:{$95:{$93:null},_:{$97:null}}}", id() );
- int unused = 0;
- ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- BSONObj k = BSON( "" << bigNumString( 0x100, 0x10 ) );
-// dump();
- ASSERT( unindex( k ) );
-// dump();
- ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), &unused, true ) );
- ASSERT_EQUALS( 0, unused );
- ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
- ArtificialTree::checkStructure( "{$80:{$10:null,$20:null,$30:null,$40:null,$50:null,$60:null,$70:null},_:{$90:null,$97:{$93:null,$95:null}}}", id() );
- }
- };
-
- class All : public Suite {
- public:
- All() : Suite( "btree" ) {
- }
+#define BtreeBucket BtreeBucket<V0>
+#define btree btree<V0>
+#define btreemod btreemod<V0>
+#define testName "btree"
+#define BTVERSION 0
+namespace BtreeTests0 {
+ #include "btreetests.inl"
+}
- void setupTests() {
- add< Create >();
- add< SimpleInsertDelete >();
- add< SplitRightHeavyBucket >();
- add< SplitLeftHeavyBucket >();
- add< MissingLocate >();
- add< MissingLocateMultiBucket >();
- add< SERVER983 >();
- add< DontReuseUnused >();
- add< PackUnused >();
- add< DontDropReferenceKey >();
- add< MergeBucketsLeft >();
- add< MergeBucketsRight >();
-// add< MergeBucketsHead >();
- add< MergeBucketsDontReplaceHead >();
- add< MergeBucketsDelInternal >();
- add< MergeBucketsRightNull >();
- add< DontMergeSingleBucket >();
- add< ParentMergeNonRightToLeft >();
- add< ParentMergeNonRightToRight >();
- add< CantMergeRightNoMerge >();
- add< CantMergeLeftNoMerge >();
- add< MergeOption >();
- add< ForceMergeLeft >();
- add< ForceMergeRight >();
- add< RecursiveMerge >();
- add< RecursiveMergeRightBucket >();
- add< RecursiveMergeDoubleRightBucket >();
- add< MergeSizeJustRightRight >();
- add< MergeSizeJustRightLeft >();
- add< MergeSizeRight >();
- add< MergeSizeLeft >();
- add< NoMergeBelowMarkRight >();
- add< NoMergeBelowMarkLeft >();
- add< MergeSizeRightTooBig >();
- add< MergeSizeLeftTooBig >();
- add< BalanceOneLeftToRight >();
- add< BalanceOneRightToLeft >();
- add< BalanceThreeLeftToRight >();
- add< BalanceThreeRightToLeft >();
- add< BalanceSingleParentKey >();
- add< PackEmpty >();
- add< PackedDataSizeEmpty >();
- add< BalanceSingleParentKeyPackParent >();
- add< BalanceSplitParent >();
- add< EvenRebalanceLeft >();
- add< EvenRebalanceLeftCusp >();
- add< EvenRebalanceRight >();
- add< EvenRebalanceRightCusp >();
- add< EvenRebalanceCenter >();
- add< OddRebalanceLeft >();
- add< OddRebalanceRight >();
- add< OddRebalanceCenter >();
- add< RebalanceEmptyRight >();
- add< RebalanceEmptyLeft >();
- add< NoMoveAtLowWaterMarkRight >();
- add< MoveBelowLowWaterMarkRight >();
- add< NoMoveAtLowWaterMarkLeft >();
- add< MoveBelowLowWaterMarkLeft >();
- add< PreferBalanceLeft >();
- add< PreferBalanceRight >();
- add< RecursiveMergeThenBalance >();
- add< MergeRightEmpty >();
- add< MergeMinRightEmpty >();
- add< MergeLeftEmpty >();
- add< MergeMinLeftEmpty >();
- add< BalanceRightEmpty >();
- add< BalanceLeftEmpty >();
- add< DelEmptyNoNeighbors >();
- add< DelEmptyEmptyNeighbors >();
- add< DelInternal >();
- add< DelInternalReplaceWithUnused >();
- add< DelInternalReplaceRight >();
- add< DelInternalPromoteKey >();
- add< DelInternalPromoteRightKey >();
- add< DelInternalReplacementPrevNonNull >();
- add< DelInternalReplacementNextNonNull >();
- add< DelInternalSplitPromoteLeft >();
- add< DelInternalSplitPromoteRight >();
- }
- } myall;
+#undef BtreeBucket
+#undef btree
+#undef btreemod
+#define BtreeBucket BtreeBucket<V1>
+#define btree btree<V1>
+#define btreemod btreemod<V1>
+#undef testName
+#define testName "btree1"
+#undef BTVERSION
+#define BTVERSION 1
+namespace BtreeTests1 {
+ #include "btreetests.inl"
}
diff --git a/dbtests/btreetests.inl b/dbtests/btreetests.inl
new file mode 100644
index 0000000..ed9f0ea
--- /dev/null
+++ b/dbtests/btreetests.inl
@@ -0,0 +1,1702 @@
+ typedef BtreeBucket::_KeyNode _KeyNode;
+
+ const char* ns() {
+ return "unittests.btreetests";
+ }
+
+ // dummy, valid record loc
+ const DiskLoc recordLoc() {
+ return DiskLoc( 0, 2 );
+ }
+
+ class Ensure {
+ public:
+ Ensure() {
+ _c.ensureIndex( ns(), BSON( "a" << 1 ), false, "testIndex",
+ false, // given two versions not sure if cache true would mess us up...
+ false, BTVERSION);
+ }
+ ~Ensure() {
+ _c.dropCollection( ns() );
+ //_c.dropIndexes( ns() );
+ }
+ private:
+ DBDirectClient _c;
+ };
+
+ class Base : public Ensure {
+ public:
+ Base() :
+ _context( ns() ) {
+ {
+ bool f = false;
+ assert( f = true );
+ massert( 10402 , "assert is misdefined", f);
+ }
+ }
+ virtual ~Base() {}
+ static string bigNumString( long long n, int len = 800 ) {
+ char sub[17];
+ sprintf( sub, "%.16llx", n );
+ string val( len, ' ' );
+ for( int i = 0; i < len; ++i ) {
+ val[ i ] = sub[ i % 16 ];
+ }
+ return val;
+ }
+ protected:
+ const BtreeBucket* bt() {
+ return id().head.btree();
+ }
+ DiskLoc dl() {
+ return id().head;
+ }
+ IndexDetails& id() {
+ NamespaceDetails *nsd = nsdetails( ns() );
+ assert( nsd );
+ return nsd->idx( 1 );
+ }
+ void checkValid( int nKeys ) {
+ ASSERT( bt() );
+ ASSERT( bt()->isHead() );
+ bt()->assertValid( order(), true );
+ ASSERT_EQUALS( nKeys, bt()->fullValidate( dl(), order(), 0, true ) );
+ }
+ void dump() {
+ bt()->dumpTree( dl(), order() );
+ }
+ void insert( BSONObj &key ) {
+ const BtreeBucket *b = bt();
+ b->bt_insert( dl(), recordLoc(), key, Ordering::make(order()), true, id(), true );
+ getDur().commitIfNeeded();
+ }
+ bool unindex( BSONObj &key ) {
+ getDur().commitIfNeeded();
+ return bt()->unindex( dl(), id(), key, recordLoc() );
+ }
+ static BSONObj simpleKey( char c, int n = 1 ) {
+ BSONObjBuilder builder;
+ string val( n, c );
+ builder.append( "a", val );
+ return builder.obj();
+ }
+ void locate( BSONObj &key, int expectedPos,
+ bool expectedFound, const DiskLoc &expectedLocation,
+ int direction = 1 ) {
+ int pos;
+ bool found;
+ DiskLoc location =
+ bt()->locate( id(), dl(), key, Ordering::make(order()), pos, found, recordLoc(), direction );
+ ASSERT_EQUALS( expectedFound, found );
+ ASSERT( location == expectedLocation );
+ ASSERT_EQUALS( expectedPos, pos );
+ }
+ bool present( BSONObj &key, int direction ) {
+ int pos;
+ bool found;
+ bt()->locate( id(), dl(), key, Ordering::make(order()), pos, found, recordLoc(), direction );
+ return found;
+ }
+ BSONObj order() {
+ return id().keyPattern();
+ }
+ const BtreeBucket *child( const BtreeBucket *b, int i ) {
+ assert( i <= b->nKeys() );
+ DiskLoc d;
+ if ( i == b->nKeys() ) {
+ d = b->getNextChild();
+ }
+ else {
+ d = b->keyNode( i ).prevChildBucket;
+ }
+ assert( !d.isNull() );
+ return d.btree();
+ }
+ void checkKey( char i ) {
+ stringstream ss;
+ ss << i;
+ checkKey( ss.str() );
+ }
+ void checkKey( const string &k ) {
+ BSONObj key = BSON( "" << k );
+// log() << "key: " << key << endl;
+ ASSERT( present( key, 1 ) );
+ ASSERT( present( key, -1 ) );
+ }
+ private:
+ dblock lk_;
+ Client::Context _context;
+ };
+
+ class Create : public Base {
+ public:
+ void run() {
+ checkValid( 0 );
+ }
+ };
+
+ class SimpleInsertDelete : public Base {
+ public:
+ void run() {
+ BSONObj key = simpleKey( 'z' );
+ insert( key );
+
+ checkValid( 1 );
+ locate( key, 0, true, dl() );
+
+ unindex( key );
+
+ checkValid( 0 );
+ locate( key, 0, false, DiskLoc() );
+ }
+ };
+
+ class SplitUnevenBucketBase : public Base {
+ public:
+ virtual ~SplitUnevenBucketBase() {}
+ void run() {
+ for ( int i = 0; i < 10; ++i ) {
+ BSONObj shortKey = simpleKey( shortToken( i ), 1 );
+ insert( shortKey );
+ BSONObj longKey = simpleKey( longToken( i ), 800 );
+ insert( longKey );
+ }
+ checkValid( 20 );
+ ASSERT_EQUALS( 1, bt()->nKeys() );
+ checkSplit();
+ }
+ protected:
+ virtual char shortToken( int i ) const = 0;
+ virtual char longToken( int i ) const = 0;
+ static char leftToken( int i ) {
+ return 'a' + i;
+ }
+ static char rightToken( int i ) {
+ return 'z' - i;
+ }
+ virtual void checkSplit() = 0;
+ };
+
+ class SplitRightHeavyBucket : public SplitUnevenBucketBase {
+ private:
+ virtual char shortToken( int i ) const {
+ return leftToken( i );
+ }
+ virtual char longToken( int i ) const {
+ return rightToken( i );
+ }
+ virtual void checkSplit() {
+ ASSERT_EQUALS( 15, child( bt(), 0 )->nKeys() );
+ ASSERT_EQUALS( 4, child( bt(), 1 )->nKeys() );
+ }
+ };
+
+ class SplitLeftHeavyBucket : public SplitUnevenBucketBase {
+ private:
+ virtual char shortToken( int i ) const {
+ return rightToken( i );
+ }
+ virtual char longToken( int i ) const {
+ return leftToken( i );
+ }
+ virtual void checkSplit() {
+ ASSERT_EQUALS( 4, child( bt(), 0 )->nKeys() );
+ ASSERT_EQUALS( 15, child( bt(), 1 )->nKeys() );
+ }
+ };
+
+ class MissingLocate : public Base {
+ public:
+ void run() {
+ for ( int i = 0; i < 3; ++i ) {
+ BSONObj k = simpleKey( 'b' + 2 * i );
+ insert( k );
+ }
+
+ locate( 1, 'a', 'b', dl() );
+ locate( 1, 'c', 'd', dl() );
+ locate( 1, 'e', 'f', dl() );
+ locate( 1, 'g', 'g' + 1, DiskLoc() ); // of course, 'h' isn't in the index.
+
+ // old behavior
+ // locate( -1, 'a', 'b', dl() );
+ // locate( -1, 'c', 'd', dl() );
+ // locate( -1, 'e', 'f', dl() );
+ // locate( -1, 'g', 'f', dl() );
+
+ locate( -1, 'a', 'a' - 1, DiskLoc() ); // of course, 'a' - 1 isn't in the index
+ locate( -1, 'c', 'b', dl() );
+ locate( -1, 'e', 'd', dl() );
+ locate( -1, 'g', 'f', dl() );
+ }
+ private:
+ void locate( int direction, char token, char expectedMatch,
+ DiskLoc expectedLocation ) {
+ BSONObj k = simpleKey( token );
+ int expectedPos = ( expectedMatch - 'b' ) / 2;
+ Base::locate( k, expectedPos, false, expectedLocation, direction );
+ }
+ };
+
+ class MissingLocateMultiBucket : public Base {
+ public:
+ void run() {
+ for ( int i = 0; i < 8; ++i ) {
+ insert( i );
+ }
+ insert( 9 );
+ insert( 8 );
+// dump();
+ BSONObj straddle = key( 'i' );
+ locate( straddle, 0, false, dl(), 1 );
+ straddle = key( 'k' );
+ locate( straddle, 0, false, dl(), -1 );
+ }
+ private:
+ BSONObj key( char c ) {
+ return simpleKey( c, 800 );
+ }
+ void insert( int i ) {
+ BSONObj k = key( 'b' + 2 * i );
+ Base::insert( k );
+ }
+ };
+
+ class SERVER983 : public Base {
+ public:
+ void run() {
+ for ( int i = 0; i < 10; ++i ) {
+ insert( i );
+ }
+// dump();
+ BSONObj straddle = key( 'o' );
+ locate( straddle, 0, false, dl(), 1 );
+ straddle = key( 'q' );
+ locate( straddle, 0, false, dl(), -1 );
+ }
+ private:
+ BSONObj key( char c ) {
+ return simpleKey( c, 800 );
+ }
+ void insert( int i ) {
+ BSONObj k = key( 'b' + 2 * i );
+ Base::insert( k );
+ }
+ };
+
+ class DontReuseUnused : public Base {
+ public:
+ void run() {
+ for ( int i = 0; i < 10; ++i ) {
+ insert( i );
+ }
+// dump();
+ BSONObj root = key( 'p' );
+ unindex( root );
+ Base::insert( root );
+ locate( root, 0, true, bt()->getNextChild(), 1 );
+ }
+ private:
+ BSONObj key( char c ) {
+ return simpleKey( c, 800 );
+ }
+ void insert( int i ) {
+ BSONObj k = key( 'b' + 2 * i );
+ Base::insert( k );
+ }
+ };
+
+ class PackUnused : public Base {
+ public:
+ void run() {
+ for ( long long i = 0; i < 1000000; i += 1000 ) {
+ insert( i );
+ }
+ string orig, after;
+ {
+ stringstream ss;
+ bt()->shape( ss );
+ orig = ss.str();
+ }
+ vector< string > toDel;
+ vector< string > other;
+ BSONObjBuilder start;
+ start.appendMinKey( "a" );
+ BSONObjBuilder end;
+ end.appendMaxKey( "a" );
+ auto_ptr< BtreeCursor > c( BtreeCursor::make( nsdetails( ns() ), 1, id(), start.done(), end.done(), false, 1 ) );
+ while( c->ok() ) {
+ if ( c->curKeyHasChild() ) {
+ toDel.push_back( c->currKey().firstElement().valuestr() );
+ }
+ else {
+ other.push_back( c->currKey().firstElement().valuestr() );
+ }
+ c->advance();
+ }
+ ASSERT( toDel.size() > 0 );
+ for( vector< string >::const_iterator i = toDel.begin(); i != toDel.end(); ++i ) {
+ BSONObj o = BSON( "a" << *i );
+ unindex( o );
+ }
+ ASSERT( other.size() > 0 );
+ for( vector< string >::const_iterator i = other.begin(); i != other.end(); ++i ) {
+ BSONObj o = BSON( "a" << *i );
+ unindex( o );
+ }
+
+ long long unused = 0;
+ ASSERT_EQUALS( 0, bt()->fullValidate( dl(), order(), &unused, true ) );
+
+ for ( long long i = 50000; i < 50100; ++i ) {
+ insert( i );
+ }
+
+ long long unused2 = 0;
+ ASSERT_EQUALS( 100, bt()->fullValidate( dl(), order(), &unused2, true ) );
+
+// log() << "old unused: " << unused << ", new unused: " << unused2 << endl;
+//
+ ASSERT( unused2 <= unused );
+ }
+ protected:
+ void insert( long long n ) {
+ string val = bigNumString( n );
+ BSONObj k = BSON( "a" << val );
+ Base::insert( k );
+ }
+ };
+
+ class DontDropReferenceKey : public PackUnused {
+ public:
+ void run() {
+ // with 80 root node is full
+ for ( long long i = 0; i < 80; i += 1 ) {
+ insert( i );
+ }
+
+ BSONObjBuilder start;
+ start.appendMinKey( "a" );
+ BSONObjBuilder end;
+ end.appendMaxKey( "a" );
+ BSONObj l = bt()->keyNode( 0 ).key.toBson();
+ string toInsert;
+ auto_ptr< BtreeCursor > c( BtreeCursor::make( nsdetails( ns() ), 1, id(), start.done(), end.done(), false, 1 ) );
+ while( c->ok() ) {
+ if ( c->currKey().woCompare( l ) > 0 ) {
+ toInsert = c->currKey().firstElement().valuestr();
+ break;
+ }
+ c->advance();
+ }
+ // too much work to try to make this happen through inserts and deletes
+ // we are intentionally manipulating the btree bucket directly here
+ BtreeBucket::Loc* L = const_cast< BtreeBucket::Loc* >( &bt()->keyNode( 1 ).prevChildBucket );
+ getDur().writing(L)->Null();
+ getDur().writingInt( const_cast< BtreeBucket::Loc& >( bt()->keyNode( 1 ).recordLoc ).GETOFS() ) |= 1; // make unused
+ BSONObj k = BSON( "a" << toInsert );
+ Base::insert( k );
+ }
+ };
+
+ class MergeBuckets : public Base {
+ public:
+ virtual ~MergeBuckets() {}
+ void run() {
+ for ( int i = 0; i < 10; ++i ) {
+ insert( i );
+ }
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ int expectedCount = 10 - unindexKeys();
+// dump();
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ long long unused = 0;
+ ASSERT_EQUALS( expectedCount, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ }
+ protected:
+ BSONObj key( char c ) {
+ return simpleKey( c, 800 );
+ }
+ void insert( int i ) {
+ BSONObj k = key( 'b' + 2 * i );
+ Base::insert( k );
+ }
+ virtual int unindexKeys() = 0;
+ };
+
+ class MergeBucketsLeft : public MergeBuckets {
+ virtual int unindexKeys() {
+ BSONObj k = key( 'b' );
+ unindex( k );
+ k = key( 'b' + 2 );
+ unindex( k );
+ k = key( 'b' + 4 );
+ unindex( k );
+ k = key( 'b' + 6 );
+ unindex( k );
+ return 4;
+ }
+ };
+
+ class MergeBucketsRight : public MergeBuckets {
+ virtual int unindexKeys() {
+ BSONObj k = key( 'b' + 2 * 9 );
+ unindex( k );
+ return 1;
+ }
+ };
+
+ // deleting from head won't coalesce yet
+// class MergeBucketsHead : public MergeBuckets {
+// virtual BSONObj unindexKey() { return key( 'p' ); }
+// };
+
+ class MergeBucketsDontReplaceHead : public Base {
+ public:
+ void run() {
+ for ( int i = 0; i < 18; ++i ) {
+ insert( i );
+ }
+ // dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = key( 'a' + 17 );
+ unindex( k );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ long long unused = 0;
+ ASSERT_EQUALS( 17, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ }
+ private:
+ BSONObj key( char c ) {
+ return simpleKey( c, 800 );
+ }
+ void insert( int i ) {
+ BSONObj k = key( 'a' + i );
+ Base::insert( k );
+ }
+ };
+
+ // Tool to construct custom trees for tests.
+ class ArtificialTree : public BtreeBucket {
+ public:
+ void push( const BSONObj &key, const DiskLoc &child ) {
+ KeyOwned k(key);
+ pushBack( dummyDiskLoc(), k, Ordering::make( BSON( "a" << 1 ) ), child );
+ }
+ void setNext( const DiskLoc &child ) {
+ nextChild = child;
+ }
+ static DiskLoc make( IndexDetails &id ) {
+ DiskLoc ret = addBucket( id );
+ is( ret )->init();
+ getDur().commitIfNeeded();
+ return ret;
+ }
+ static ArtificialTree *is( const DiskLoc &l ) {
+ return static_cast< ArtificialTree * >( l.btreemod() );
+ }
+ static DiskLoc makeTree( const string &spec, IndexDetails &id ) {
+ return makeTree( fromjson( spec ), id );
+ }
+ static DiskLoc makeTree( const BSONObj &spec, IndexDetails &id ) {
+ DiskLoc node = make( id );
+ ArtificialTree *n = ArtificialTree::is( node );
+ BSONObjIterator i( spec );
+ while( i.more() ) {
+ BSONElement e = i.next();
+ DiskLoc child;
+ if ( e.type() == Object ) {
+ child = makeTree( e.embeddedObject(), id );
+ }
+ if ( e.fieldName() == string( "_" ) ) {
+ n->setNext( child );
+ }
+ else {
+ n->push( BSON( "" << expectedKey( e.fieldName() ) ), child );
+ }
+ }
+ n->fixParentPtrs( node );
+ return node;
+ }
+ static void setTree( const string &spec, IndexDetails &id ) {
+ set( makeTree( spec, id ), id );
+ }
+ static void set( const DiskLoc &l, IndexDetails &id ) {
+ ArtificialTree::is( id.head )->deallocBucket( id.head, id );
+ getDur().writingDiskLoc(id.head) = l;
+ }
+ static string expectedKey( const char *spec ) {
+ if ( spec[ 0 ] != '$' ) {
+ return spec;
+ }
+ char *endPtr;
+ // parsing a long long is a pain, so just allow shorter keys for now
+ unsigned long long num = strtol( spec + 1, &endPtr, 16 );
+ int len = 800;
+ if( *endPtr == '$' ) {
+ len = strtol( endPtr + 1, 0, 16 );
+ }
+ return Base::bigNumString( num, len );
+ }
+ static void checkStructure( const BSONObj &spec, const IndexDetails &id, const DiskLoc node ) {
+ ArtificialTree *n = ArtificialTree::is( node );
+ BSONObjIterator j( spec );
+ for( int i = 0; i < n->n; ++i ) {
+ ASSERT( j.more() );
+ BSONElement e = j.next();
+ KeyNode kn = n->keyNode( i );
+ string expected = expectedKey( e.fieldName() );
+ ASSERT( present( id, BSON( "" << expected ), 1 ) );
+ ASSERT( present( id, BSON( "" << expected ), -1 ) );
+ ASSERT_EQUALS( expected, kn.key.toBson().firstElement().valuestr() );
+ if ( kn.prevChildBucket.isNull() ) {
+ ASSERT( e.type() == jstNULL );
+ }
+ else {
+ ASSERT( e.type() == Object );
+ checkStructure( e.embeddedObject(), id, kn.prevChildBucket );
+ }
+ }
+ if ( n->nextChild.isNull() ) {
+ // maybe should allow '_' field with null value?
+ ASSERT( !j.more() );
+ }
+ else {
+ BSONElement e = j.next();
+ ASSERT_EQUALS( string( "_" ), e.fieldName() );
+ ASSERT( e.type() == Object );
+ checkStructure( e.embeddedObject(), id, n->nextChild );
+ }
+ ASSERT( !j.more() );
+ }
+ static void checkStructure( const string &spec, const IndexDetails &id ) {
+ checkStructure( fromjson( spec ), id, id.head );
+ }
+ static bool present( const IndexDetails &id, const BSONObj &key, int direction ) {
+ int pos;
+ bool found;
+ id.head.btree()->locate( id, id.head, key, Ordering::make(id.keyPattern()), pos, found, recordLoc(), direction );
+ return found;
+ }
+ int headerSize() const { return BtreeBucket::headerSize(); }
+ int packedDataSize( int pos ) const { return BtreeBucket::packedDataSize( pos ); }
+ void fixParentPtrs( const DiskLoc &thisLoc ) { BtreeBucket::fixParentPtrs( thisLoc ); }
+ void forcePack() {
+ topSize += emptySize;
+ emptySize = 0;
+ setNotPacked();
+ }
+ private:
+ DiskLoc dummyDiskLoc() const { return DiskLoc( 0, 2 ); }
+ };
+
+ /**
+ * We could probably refactor the following tests, but it's easier to debug
+ * them in the present state.
+ */
+
+ class MergeBucketsDelInternal : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},bb:null,_:{c:null}},_:{f:{e:null},_:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "bb" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 7, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{b:{a:null},d:{c:null},f:{e:null},_:{g:null}}", id() );
+ }
+ };
+
+ class MergeBucketsRightNull : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},bb:null,cc:{c:null}},_:{f:{e:null},h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "bb" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{b:{a:null},cc:{c:null},d:null,f:{e:null},h:{g:null}}", id() );
+ }
+ };
+
+ // not yet handling this case
+ class DontMergeSingleBucket : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},c:null}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "c" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{d:{b:{a:null}}}", id() );
+ }
+ };
+
+ class ParentMergeNonRightToLeft : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},bb:null,cc:{c:null}},i:{f:{e:null},h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "bb" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ // child does not currently replace parent in this case
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{i:{b:{a:null},cc:{c:null},d:null,f:{e:null},h:{g:null}}}", id() );
+ }
+ };
+
+ class ParentMergeNonRightToRight : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},cc:{c:null}},i:{f:{e:null},ff:null,h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "ff" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ // child does not currently replace parent in this case
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{i:{b:{a:null},cc:{c:null},d:null,f:{e:null},h:{g:null}}}", id() );
+ }
+ };
+
+ class CantMergeRightNoMerge : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{d:{b:{a:null},bb:null,cc:{c:null}},dd:null,_:{f:{e:null},h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "bb" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{d:{b:{a:null},cc:{c:null}},dd:null,_:{f:{e:null},h:{g:null}}}", id() );
+ }
+ };
+
+ class CantMergeLeftNoMerge : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{c:{b:{a:null}},d:null,_:{f:{e:null},g:null}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 7, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "g" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 6, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{c:{b:{a:null}},d:null,_:{f:{e:null}}}", id() );
+ }
+ };
+
+ class MergeOption : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{c:{b:{a:null}},f:{e:{d:null},ee:null},_:{h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "ee" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{c:{b:{a:null}},_:{e:{d:null},f:null,h:{g:null}}}", id() );
+ }
+ };
+
+ class ForceMergeLeft : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{c:{b:{a:null}},f:{e:{d:null},ee:null},ff:null,_:{h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "ee" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{f:{b:{a:null},c:null,e:{d:null}},ff:null,_:{h:{g:null}}}", id() );
+ }
+ };
+
+ class ForceMergeRight : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{c:{b:{a:null}},cc:null,f:{e:{d:null},ee:null},_:{h:{g:null}}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 7, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "ee" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{c:{b:{a:null}},cc:null,_:{e:{d:null},f:null,h:{g:null}}}", id() );
+ }
+ };
+
+ class RecursiveMerge : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{h:{e:{b:{a:null},c:null,d:null},g:{f:null}},j:{i:null}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 10, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "c" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ // height is not currently reduced in this case
+ ArtificialTree::checkStructure( "{j:{g:{b:{a:null},d:null,e:null,f:null},h:null,i:null}}", id() );
+ }
+ };
+
+ class RecursiveMergeRightBucket : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{h:{e:{b:{a:null},c:null,d:null},g:{f:null}},_:{i:null}}", id() );
+// dump();
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 9, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+
+ BSONObj k = BSON( "" << "c" );
+ assert( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{g:{b:{a:null},d:null,e:null,f:null},h:null,i:null}", id() );
+ }
+ };
+
+ class RecursiveMergeDoubleRightBucket : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( "{h:{e:{b:{a:null},c:null,d:null},_:{f:null}},_:{i:null}}", id() );
+ string ns = id().indexNamespace();
+ ASSERT_EQUALS( 8, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "c" );
+ assert( unindex( k ) );
+ long long keyCount = bt()->fullValidate( dl(), order(), 0, true );
+ ASSERT_EQUALS( 7, keyCount );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ // no recursion currently in this case
+ ArtificialTree::checkStructure( "{h:{b:{a:null},d:null,e:null,f:null},_:{i:null}}", id() );
+ }
+ };
+
+ class MergeSizeBase : public Base {
+ public:
+ MergeSizeBase() : _count() {}
+ virtual ~MergeSizeBase() {}
+ void run() {
+ typedef ArtificialTree A;
+ A::set( A::make( id() ), id() );
+ A* root = A::is( dl() );
+ DiskLoc left = A::make( id() );
+ root->push( biggestKey( 'm' ), left );
+ _count = 1;
+ A* l = A::is( left );
+ DiskLoc right = A::make( id() );
+ root->setNext( right );
+ A* r = A::is( right );
+ root->fixParentPtrs( dl() );
+
+ //ASSERT_EQUALS( bigSize(), bigSize() / 2 * 2 );
+ fillToExactSize( l, leftSize(), 'a' );
+ fillToExactSize( r, rightSize(), 'n' );
+ ASSERT( leftAdditional() <= 2 );
+ if ( leftAdditional() >= 2 ) {
+ l->push( bigKey( 'k' ), DiskLoc() );
+ }
+ if ( leftAdditional() >= 1 ) {
+ l->push( bigKey( 'l' ), DiskLoc() );
+ }
+ ASSERT( rightAdditional() <= 2 );
+ if ( rightAdditional() >= 2 ) {
+ r->push( bigKey( 'y' ), DiskLoc() );
+ }
+ if ( rightAdditional() >= 1 ) {
+ r->push( bigKey( 'z' ), DiskLoc() );
+ }
+ _count += leftAdditional() + rightAdditional();
+
+// dump();
+
+ initCheck();
+ string ns = id().indexNamespace();
+ const char *keys = delKeys();
+ for( const char *i = keys; *i; ++i ) {
+ long long unused = 0;
+ ASSERT_EQUALS( _count, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = bigKey( *i );
+ unindex( k );
+// dump();
+ --_count;
+ }
+
+// dump();
+
+ long long unused = 0;
+ ASSERT_EQUALS( _count, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ validate();
+ if ( !merge() ) {
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ }
+ else {
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ }
+ }
+ protected:
+ virtual int leftAdditional() const { return 2; }
+ virtual int rightAdditional() const { return 2; }
+ virtual void initCheck() {}
+ virtual void validate() {}
+ virtual int leftSize() const = 0;
+ virtual int rightSize() const = 0;
+ virtual const char * delKeys() const { return "klyz"; }
+ virtual bool merge() const { return true; }
+ void fillToExactSize( ArtificialTree *t, int targetSize, char startKey ) {
+ int size = 0;
+ while( size < targetSize ) {
+ int space = targetSize - size;
+ int nextSize = space - sizeof( _KeyNode );
+ assert( nextSize > 0 );
+ BSONObj newKey = key( startKey++, nextSize );
+ t->push( newKey, DiskLoc() );
+ size += BtreeBucket::KeyOwned(newKey).dataSize() + sizeof( _KeyNode );
+ _count += 1;
+ }
+ if( t->packedDataSize( 0 ) != targetSize ) {
+ ASSERT_EQUALS( t->packedDataSize( 0 ), targetSize );
+ }
+ }
+ static BSONObj key( char a, int size ) {
+ if ( size >= bigSize() ) {
+ return bigKey( a );
+ }
+ return simpleKey( a, size - ( bigSize() - 801 ) );
+ }
+ static BSONObj bigKey( char a ) {
+ return simpleKey( a, 801 );
+ }
+ static BSONObj biggestKey( char a ) {
+ int size = BtreeBucket::getKeyMax() - bigSize() + 801;
+ return simpleKey( a, size );
+ }
+ static int bigSize() {
+ return BtreeBucket::KeyOwned(bigKey( 'a' )).dataSize();
+ }
+ static int biggestSize() {
+ return BtreeBucket::KeyOwned(biggestKey( 'a' )).dataSize();
+ }
+ int _count;
+ };
+
+ class MergeSizeJustRightRight : public MergeSizeBase {
+ protected:
+ virtual int rightSize() const { return BtreeBucket::lowWaterMark() - 1; }
+ virtual int leftSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) - ( BtreeBucket::lowWaterMark() - 1 ); }
+ };
+
+ class MergeSizeJustRightLeft : public MergeSizeBase {
+ protected:
+ virtual int leftSize() const { return BtreeBucket::lowWaterMark() - 1; }
+ virtual int rightSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) - ( BtreeBucket::lowWaterMark() - 1 ); }
+ virtual const char * delKeys() const { return "yzkl"; }
+ };
+
+ class MergeSizeRight : public MergeSizeJustRightRight {
+ virtual int rightSize() const { return MergeSizeJustRightRight::rightSize() - 1; }
+ virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() + 1; }
+ };
+
+ class MergeSizeLeft : public MergeSizeJustRightLeft {
+ virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() + 1; }
+ virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize() - 1; }
+ };
+
+ class NoMergeBelowMarkRight : public MergeSizeJustRightRight {
+ virtual int rightSize() const { return MergeSizeJustRightRight::rightSize() + 1; }
+ virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() - 1; }
+ virtual bool merge() const { return false; }
+ };
+
+ class NoMergeBelowMarkLeft : public MergeSizeJustRightLeft {
+ virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() - 1; }
+ virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize() + 1; }
+ virtual bool merge() const { return false; }
+ };
+
+ class MergeSizeRightTooBig : public MergeSizeJustRightLeft {
+ virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() + 1; }
+ virtual bool merge() const { return false; }
+ };
+
+ class MergeSizeLeftTooBig : public MergeSizeJustRightRight {
+ virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() + 1; }
+ virtual bool merge() const { return false; }
+ };
+
+ class BalanceOneLeftToRight : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},b:{$20:null,$30:null,$40:null,$50:null,a:null},_:{c:null}}", id() );
+ ASSERT_EQUALS( 14, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x40 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$6:{$1:null,$2:null,$3:null,$4:null,$5:null},b:{$10:null,$20:null,$30:null,$50:null,a:null},_:{c:null}}", id() );
+ }
+ };
+
+ class BalanceOneRightToLeft : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null},b:{$20:null,$30:null,$40:null,$50:null,$60:null,$70:null},_:{c:null}}", id() );
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x3 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$20:{$1:null,$2:null,$4:null,$10:null},b:{$30:null,$40:null,$50:null,$60:null,$70:null},_:{c:null}}", id() );
+ }
+ };
+
+ class BalanceThreeLeftToRight : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$20:{$1:{$0:null},$3:{$2:null},$5:{$4:null},$7:{$6:null},$9:{$8:null},$11:{$10:null},$13:{$12:null},_:{$14:null}},b:{$30:null,$40:{$35:null},$50:{$45:null}},_:{c:null}}", id() );
+ ASSERT_EQUALS( 23, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 14, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x30 ) );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 22, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 14, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$9:{$1:{$0:null},$3:{$2:null},$5:{$4:null},$7:{$6:null},_:{$8:null}},b:{$11:{$10:null},$13:{$12:null},$20:{$14:null},$40:{$35:null},$50:{$45:null}},_:{c:null}}", id() );
+ }
+ };
+
+ class BalanceThreeRightToLeft : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$20:{$1:{$0:null},$3:{$2:null},$5:null,_:{$14:null}},b:{$30:{$25:null},$40:{$35:null},$50:{$45:null},$60:{$55:null},$70:{$65:null},$80:{$75:null},$90:{$85:null},$100:{$95:null}},_:{c:null}}", id() );
+ ASSERT_EQUALS( 25, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 15, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x5 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 24, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 15, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$50:{$1:{$0:null},$3:{$2:null},$20:{$14:null},$30:{$25:null},$40:{$35:null},_:{$45:null}},b:{$60:{$55:null},$70:{$65:null},$80:{$75:null},$90:{$85:null},$100:{$95:null}},_:{c:null}}", id() );
+ }
+ };
+
+ class BalanceSingleParentKey : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},_:{$20:null,$30:null,$40:null,$50:null,a:null}}", id() );
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x40 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$6:{$1:null,$2:null,$3:null,$4:null,$5:null},_:{$10:null,$20:null,$30:null,$50:null,a:null}}", id() );
+ }
+ };
+
+ class PackEmpty : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null}", id() );
+ BSONObj k = BSON( "" << "a" );
+ ASSERT( unindex( k ) );
+ ArtificialTree *t = ArtificialTree::is( dl() );
+ t->forcePack();
+ Tester::checkEmpty( t, id() );
+ }
+ class Tester : public ArtificialTree {
+ public:
+ static void checkEmpty( ArtificialTree *a, const IndexDetails &id ) {
+ Tester *t = static_cast< Tester * >( a );
+ ASSERT_EQUALS( 0, t->n );
+ ASSERT( !( t->flags & Packed ) );
+ Ordering o = Ordering::make( id.keyPattern() );
+ int zero = 0;
+ t->_packReadyForMod( o, zero );
+ ASSERT_EQUALS( 0, t->n );
+ ASSERT_EQUALS( 0, t->topSize );
+ ASSERT_EQUALS( BtreeBucket::bodySize(), t->emptySize );
+ ASSERT( t->flags & Packed );
+ }
+ };
+ };
+
+ class PackedDataSizeEmpty : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null}", id() );
+ BSONObj k = BSON( "" << "a" );
+ ASSERT( unindex( k ) );
+ ArtificialTree *t = ArtificialTree::is( dl() );
+ t->forcePack();
+ Tester::checkEmpty( t, id() );
+ }
+ class Tester : public ArtificialTree {
+ public:
+ static void checkEmpty( ArtificialTree *a, const IndexDetails &id ) {
+ Tester *t = static_cast< Tester * >( a );
+ ASSERT_EQUALS( 0, t->n );
+ ASSERT( !( t->flags & Packed ) );
+ int zero = 0;
+ ASSERT_EQUALS( 0, t->packedDataSize( zero ) );
+ ASSERT( !( t->flags & Packed ) );
+ }
+ };
+ };
+
+ class BalanceSingleParentKeyPackParent : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},_:{$20:null,$30:null,$40:null,$50:null,a:null}}", id() );
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ // force parent pack
+ ArtificialTree::is( dl() )->forcePack();
+ BSONObj k = BSON( "" << bigNumString( 0x40 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 11, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$6:{$1:null,$2:null,$3:null,$4:null,$5:null},_:{$10:null,$20:null,$30:null,$50:null,a:null}}", id() );
+ }
+ };
+
+ class BalanceSplitParent : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10$10:{$1:null,$2:null,$3:null,$4:null},$100:{$20:null,$30:null,$40:null,$50:null,$60:null,$70:null,$80:null},$200:null,$300:null,$400:null,$500:null,$600:null,$700:null,$800:null,$900:null,_:{c:null}}", id() );
+ ASSERT_EQUALS( 22, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x3 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 21, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 6, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$500:{$30:{$1:null,$2:null,$4:null,$10$10:null,$20:null},$100:{$40:null,$50:null,$60:null,$70:null,$80:null},$200:null,$300:null,$400:null},_:{$600:null,$700:null,$800:null,$900:null,_:{c:null}}}", id() );
+ }
+ };
+
+ class RebalancedSeparatorBase : public Base {
+ public:
+ void run() {
+ ArtificialTree::setTree( treeSpec(), id() );
+ modTree();
+ Tester::checkSeparator( id(), expectedSeparator() );
+ }
+ virtual string treeSpec() const = 0;
+ virtual int expectedSeparator() const = 0;
+ virtual void modTree() {}
+ struct Tester : public ArtificialTree {
+ static void checkSeparator( const IndexDetails& id, int expected ) {
+ ASSERT_EQUALS( expected, static_cast< Tester * >( id.head.btreemod() )->rebalancedSeparatorPos( id.head, 0 ) );
+ }
+ };
+ };
+
+ class EvenRebalanceLeft : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$7:{$1:null,$2$31f:null,$3:null,$4$31f:null,$5:null,$6:null},_:{$8:null,$9:null,$10$31e:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class EvenRebalanceLeftCusp : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$6:{$1:null,$2$31f:null,$3:null,$4$31f:null,$5:null},_:{$7:null,$8:null,$9$31e:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class EvenRebalanceRight : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$3:{$1:null,$2$31f:null},_:{$4$31f:null,$5:null,$6:null,$7:null,$8$31e:null,$9:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class EvenRebalanceRightCusp : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$4$31f:{$1:null,$2$31f:null,$3:null},_:{$5:null,$6:null,$7$31e:null,$8:null,$9:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class EvenRebalanceCenter : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$5:{$1:null,$2$31f:null,$3:null,$4$31f:null},_:{$6:null,$7$31e:null,$8:null,$9:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class OddRebalanceLeft : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$6$31f:{$1:null,$2:null,$3:null,$4:null,$5:null},_:{$7:null,$8:null,$9:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class OddRebalanceRight : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$4:{$1:null,$2:null,$3:null},_:{$5:null,$6:null,$7:null,$8$31f:null,$9:null,$10:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class OddRebalanceCenter : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$5:{$1:null,$2:null,$3:null,$4:null},_:{$6:null,$7:null,$8:null,$9:null,$10$31f:null}}"; }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class RebalanceEmptyRight : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$a:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null,$7:null,$8:null,$9:null},_:{$b:null}}"; }
+ virtual void modTree() {
+ BSONObj k = BSON( "" << bigNumString( 0xb ) );
+ ASSERT( unindex( k ) );
+ }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class RebalanceEmptyLeft : public RebalancedSeparatorBase {
+ virtual string treeSpec() const { return "{$a:{$1:null},_:{$11:null,$12:null,$13:null,$14:null,$15:null,$16:null,$17:null,$18:null,$19:null}}"; }
+ virtual void modTree() {
+ BSONObj k = BSON( "" << bigNumString( 0x1 ) );
+ ASSERT( unindex( k ) );
+ }
+ virtual int expectedSeparator() const { return 4; }
+ };
+
+ class NoMoveAtLowWaterMarkRight : public MergeSizeJustRightRight {
+ virtual int rightSize() const { return MergeSizeJustRightRight::rightSize() + 1; }
+ virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key.toBson(); }
+ virtual void validate() { ASSERT_EQUALS( _oldTop, bt()->keyNode( 0 ).key.toBson() ); }
+ virtual bool merge() const { return false; }
+ protected:
+ BSONObj _oldTop;
+ };
+
+ class MoveBelowLowWaterMarkRight : public NoMoveAtLowWaterMarkRight {
+ virtual int rightSize() const { return MergeSizeJustRightRight::rightSize(); }
+ virtual int leftSize() const { return MergeSizeJustRightRight::leftSize() + 1; }
+ // different top means we rebalanced
+ virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key.toBson() ) ); }
+ };
+
+ class NoMoveAtLowWaterMarkLeft : public MergeSizeJustRightLeft {
+ virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize() + 1; }
+ virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key.toBson(); }
+ virtual void validate() { ASSERT_EQUALS( _oldTop, bt()->keyNode( 0 ).key.toBson() ); }
+ virtual bool merge() const { return false; }
+ protected:
+ BSONObj _oldTop;
+ };
+
+ class MoveBelowLowWaterMarkLeft : public NoMoveAtLowWaterMarkLeft {
+ virtual int leftSize() const { return MergeSizeJustRightLeft::leftSize(); }
+ virtual int rightSize() const { return MergeSizeJustRightLeft::rightSize() + 1; }
+ // different top means we rebalanced
+ virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key.toBson() ) ); }
+ };
+
+ class PreferBalanceLeft : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null,$2:null,$3:null,$4:null,$5:null,$6:null},$20:{$11:null,$12:null,$13:null,$14:null},_:{$30:null}}", id() );
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x12 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$5:{$1:null,$2:null,$3:null,$4:null},$20:{$6:null,$10:null,$11:null,$13:null,$14:null},_:{$30:null}}", id() );
+ }
+ };
+
+ class PreferBalanceRight : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$1:null},$20:{$11:null,$12:null,$13:null,$14:null},_:{$31:null,$32:null,$33:null,$34:null,$35:null,$36:null}}", id() );
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x12 ) );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$10:{$1:null},$31:{$11:null,$13:null,$14:null,$20:null},_:{$32:null,$33:null,$34:null,$35:null,$36:null}}", id() );
+ }
+ };
+
+ class RecursiveMergeThenBalance : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:{$5:{$1:null,$2:null},$8:{$6:null,$7:null}},_:{$20:null,$30:null,$40:null,$50:null,$60:null,$70:null,$80:null,$90:null}}", id() );
+ ASSERT_EQUALS( 15, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x7 ) );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 14, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$40:{$8:{$1:null,$2:null,$5:null,$6:null},$10:null,$20:null,$30:null},_:{$50:null,$60:null,$70:null,$80:null,$90:null}}", id() );
+ }
+ };
+
+ class MergeRightEmpty : public MergeSizeBase {
+ protected:
+ virtual int rightAdditional() const { return 1; }
+ virtual int leftAdditional() const { return 1; }
+ virtual const char * delKeys() const { return "lz"; }
+ virtual int rightSize() const { return 0; }
+ virtual int leftSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ); }
+ };
+
+ class MergeMinRightEmpty : public MergeSizeBase {
+ protected:
+ virtual int rightAdditional() const { return 1; }
+ virtual int leftAdditional() const { return 0; }
+ virtual const char * delKeys() const { return "z"; }
+ virtual int rightSize() const { return 0; }
+ virtual int leftSize() const { return bigSize() + sizeof( _KeyNode ); }
+ };
+
+ class MergeLeftEmpty : public MergeSizeBase {
+ protected:
+ virtual int rightAdditional() const { return 1; }
+ virtual int leftAdditional() const { return 1; }
+ virtual const char * delKeys() const { return "zl"; }
+ virtual int leftSize() const { return 0; }
+ virtual int rightSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ); }
+ };
+
+ class MergeMinLeftEmpty : public MergeSizeBase {
+ protected:
+ virtual int leftAdditional() const { return 1; }
+ virtual int rightAdditional() const { return 0; }
+ virtual const char * delKeys() const { return "l"; }
+ virtual int leftSize() const { return 0; }
+ virtual int rightSize() const { return bigSize() + sizeof( _KeyNode ); }
+ };
+
+ class BalanceRightEmpty : public MergeRightEmpty {
+ protected:
+ virtual int leftSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) + 1; }
+ virtual bool merge() const { return false; }
+ virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key.toBson(); }
+ virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key.toBson() ) ); }
+ private:
+ BSONObj _oldTop;
+ };
+
+ class BalanceLeftEmpty : public MergeLeftEmpty {
+ protected:
+ virtual int rightSize() const { return BtreeBucket::bodySize() - biggestSize() - sizeof( _KeyNode ) + 1; }
+ virtual bool merge() const { return false; }
+ virtual void initCheck() { _oldTop = bt()->keyNode( 0 ).key.toBson(); }
+ virtual void validate() { ASSERT( !( _oldTop == bt()->keyNode( 0 ).key.toBson() ) ); }
+ private:
+ BSONObj _oldTop;
+ };
+
+ class DelEmptyNoNeighbors : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{b:{a:null}}", id() );
+ ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "a" );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 1, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{b:null}", id() );
+ }
+ };
+
+ class DelEmptyEmptyNeighbors : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,c:{b:null},d:null}", id() );
+ ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "b" );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), 0, true ) );
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{a:null,c:null,d:null}", id() );
+ }
+ };
+
+ class DelInternal : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,c:{b:null},d:null}", id() );
+ long long unused = 0;
+ ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "c" );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{a:null,b:null,d:null}", id() );
+ }
+ };
+
+ class DelInternalReplaceWithUnused : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,c:{b:null},d:null}", id() );
+ getDur().writingInt( const_cast< BtreeBucket::Loc& >( bt()->keyNode( 1 ).prevChildBucket.btree()->keyNode( 0 ).recordLoc ).GETOFS() ) |= 1; // make unused
+ long long unused = 0;
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 1, unused );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "c" );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ unused = 0;
+ ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 1, unused );
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ // doesn't discriminate between used and unused
+ ArtificialTree::checkStructure( "{a:null,b:null,d:null}", id() );
+ }
+ };
+
+ class DelInternalReplaceRight : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,_:{b:null}}", id() );
+ long long unused = 0;
+ ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "a" );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ unused = 0;
+ ASSERT_EQUALS( 1, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 1, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{b:null}", id() );
+ }
+ };
+
+ class DelInternalPromoteKey : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,y:{d:{c:{b:null}},_:{e:null}},z:null}", id() );
+ long long unused = 0;
+ ASSERT_EQUALS( 7, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 5, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "y" );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ unused = 0;
+ ASSERT_EQUALS( 6, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{a:null,e:{c:{b:null},d:null},z:null}", id() );
+ }
+ };
+
+ class DelInternalPromoteRightKey : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,_:{e:{c:null},_:{f:null}}}", id() );
+ long long unused = 0;
+ ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "a" );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ unused = 0;
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 2, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{c:null,_:{e:null,f:null}}", id() );
+ }
+ };
+
+ class DelInternalReplacementPrevNonNull : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,d:{c:{b:null}},e:null}", id() );
+ long long unused = 0;
+ ASSERT_EQUALS( 5, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "d" );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 4, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 1, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{a:null,d:{c:{b:null}},e:null}", id() );
+ ASSERT( bt()->keyNode( 1 ).recordLoc.getOfs() & 1 ); // check 'unused' key
+ }
+ };
+
+ class DelInternalReplacementNextNonNull : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{a:null,_:{c:null,_:{d:null}}}", id() );
+ long long unused = 0;
+ ASSERT_EQUALS( 3, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << "a" );
+ // dump();
+ ASSERT( unindex( k ) );
+ // dump();
+ ASSERT_EQUALS( 2, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 1, unused );
+ ASSERT_EQUALS( 3, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{a:null,_:{c:null,_:{d:null}}}", id() );
+ ASSERT( bt()->keyNode( 0 ).recordLoc.getOfs() & 1 ); // check 'unused' key
+ }
+ };
+
+ class DelInternalSplitPromoteLeft : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:null,$20:null,$30$10:{$25:{$23:null},_:{$27:null}},$40:null,$50:null,$60:null,$70:null,$80:null,$90:null,$100:null}", id() );
+ long long unused = 0;
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x30, 0x10 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$60:{$10:null,$20:null,$27:{$23:null,$25:null},$40:null,$50:null},_:{$70:null,$80:null,$90:null,$100:null}}", id() );
+ }
+ };
+
+ class DelInternalSplitPromoteRight : public Base {
+ public:
+ void run() {
+ string ns = id().indexNamespace();
+ ArtificialTree::setTree( "{$10:null,$20:null,$30:null,$40:null,$50:null,$60:null,$70:null,$80:null,$90:null,$100$10:{$95:{$93:null},_:{$97:null}}}", id() );
+ long long unused = 0;
+ ASSERT_EQUALS( 13, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ BSONObj k = BSON( "" << bigNumString( 0x100, 0x10 ) );
+// dump();
+ ASSERT( unindex( k ) );
+// dump();
+ ASSERT_EQUALS( 12, bt()->fullValidate( dl(), order(), &unused, true ) );
+ ASSERT_EQUALS( 0, unused );
+ ASSERT_EQUALS( 4, nsdetails( ns.c_str() )->stats.nrecords );
+ ArtificialTree::checkStructure( "{$80:{$10:null,$20:null,$30:null,$40:null,$50:null,$60:null,$70:null},_:{$90:null,$97:{$93:null,$95:null}}}", id() );
+ }
+ };
+
+ class SignedZeroDuplication : public Base {
+ public:
+ void run() {
+ ASSERT_EQUALS( 0.0, -0.0 );
+ DBDirectClient c;
+ c.ensureIndex( ns(), BSON( "b" << 1 ), true );
+ c.insert( ns(), BSON( "b" << 0.0 ) );
+ c.insert( ns(), BSON( "b" << 1.0 ) );
+ c.update( ns(), BSON( "b" << 1.0 ), BSON( "b" << -0.0 ) );
+ ASSERT_EQUALS( 1U, c.count( ns(), BSON( "b" << 0.0 ) ) );
+ }
+ };
+
+ class All : public Suite {
+ public:
+ All() : Suite( testName ) {
+ }
+
+ void setupTests() {
+ add< Create >();
+ add< SimpleInsertDelete >();
+ add< SplitRightHeavyBucket >();
+ add< SplitLeftHeavyBucket >();
+ add< MissingLocate >();
+ add< MissingLocateMultiBucket >();
+ add< SERVER983 >();
+ add< DontReuseUnused >();
+ add< PackUnused >();
+ add< DontDropReferenceKey >();
+ add< MergeBucketsLeft >();
+ add< MergeBucketsRight >();
+// add< MergeBucketsHead >();
+ add< MergeBucketsDontReplaceHead >();
+ add< MergeBucketsDelInternal >();
+ add< MergeBucketsRightNull >();
+ add< DontMergeSingleBucket >();
+ add< ParentMergeNonRightToLeft >();
+ add< ParentMergeNonRightToRight >();
+ add< CantMergeRightNoMerge >();
+ add< CantMergeLeftNoMerge >();
+ add< MergeOption >();
+ add< ForceMergeLeft >();
+ add< ForceMergeRight >();
+ add< RecursiveMerge >();
+ add< RecursiveMergeRightBucket >();
+ add< RecursiveMergeDoubleRightBucket >();
+ add< MergeSizeJustRightRight >();
+ add< MergeSizeJustRightLeft >();
+ add< MergeSizeRight >();
+ add< MergeSizeLeft >();
+ add< NoMergeBelowMarkRight >();
+ add< NoMergeBelowMarkLeft >();
+ add< MergeSizeRightTooBig >();
+ add< MergeSizeLeftTooBig >();
+ add< BalanceOneLeftToRight >();
+ add< BalanceOneRightToLeft >();
+ add< BalanceThreeLeftToRight >();
+ add< BalanceThreeRightToLeft >();
+ add< BalanceSingleParentKey >();
+ add< PackEmpty >();
+ add< PackedDataSizeEmpty >();
+ add< BalanceSingleParentKeyPackParent >();
+ add< BalanceSplitParent >();
+ add< EvenRebalanceLeft >();
+ add< EvenRebalanceLeftCusp >();
+ add< EvenRebalanceRight >();
+ add< EvenRebalanceRightCusp >();
+ add< EvenRebalanceCenter >();
+ add< OddRebalanceLeft >();
+ add< OddRebalanceRight >();
+ add< OddRebalanceCenter >();
+ add< RebalanceEmptyRight >();
+ add< RebalanceEmptyLeft >();
+ add< NoMoveAtLowWaterMarkRight >();
+ add< MoveBelowLowWaterMarkRight >();
+ add< NoMoveAtLowWaterMarkLeft >();
+ add< MoveBelowLowWaterMarkLeft >();
+ add< PreferBalanceLeft >();
+ add< PreferBalanceRight >();
+ add< RecursiveMergeThenBalance >();
+ add< MergeRightEmpty >();
+ add< MergeMinRightEmpty >();
+ add< MergeLeftEmpty >();
+ add< MergeMinLeftEmpty >();
+ add< BalanceRightEmpty >();
+ add< BalanceLeftEmpty >();
+ add< DelEmptyNoNeighbors >();
+ add< DelEmptyEmptyNeighbors >();
+ add< DelInternal >();
+ add< DelInternalReplaceWithUnused >();
+ add< DelInternalReplaceRight >();
+ add< DelInternalPromoteKey >();
+ add< DelInternalPromoteRightKey >();
+ add< DelInternalReplacementPrevNonNull >();
+ add< DelInternalReplacementNextNonNull >();
+ add< DelInternalSplitPromoteLeft >();
+ add< DelInternalSplitPromoteRight >();
+ add< SignedZeroDuplication >();
+ }
+ } myall;
diff --git a/dbtests/clienttests.cpp b/dbtests/clienttests.cpp
index f51b765..a64894b 100644
--- a/dbtests/clienttests.cpp
+++ b/dbtests/clienttests.cpp
@@ -156,7 +156,7 @@ namespace ClientTests {
public:
Create() : Base( "Create" ) {}
void run() {
- db.createCollection( "unittests.clienttests.create", 0, true );
+ db.createCollection( "unittests.clienttests.create", 4096, true );
BSONObj info;
ASSERT( db.runCommand( "unittests", BSON( "collstats" << "clienttests.create" ), info ) );
}
diff --git a/dbtests/cursortests.cpp b/dbtests/cursortests.cpp
index ddd7b03..cf66186 100644
--- a/dbtests/cursortests.cpp
+++ b/dbtests/cursortests.cpp
@@ -18,10 +18,10 @@
*/
#include "pch.h"
-#include "../db/db.h"
#include "../db/clientcursor.h"
#include "../db/instance.h"
#include "../db/btree.h"
+#include "../db/queryutil.h"
#include "dbtests.h"
namespace CursorTests {
@@ -33,11 +33,12 @@ namespace CursorTests {
class Base {
protected:
+ static const char *ns() { return "unittests.cursortests.Base"; }
FieldRangeVector *vec( int *vals, int len, int direction = 1 ) {
- FieldRangeSet s( "", BSON( "a" << 1 ) );
+ FieldRangeSet s( "", BSON( "a" << 1 ), true );
for( int i = 0; i < len; i += 2 ) {
_objs.push_back( BSON( "a" << BSON( "$gte" << vals[ i ] << "$lte" << vals[ i + 1 ] ) ) );
- FieldRangeSet s2( "", _objs.back() );
+ FieldRangeSet s2( "", _objs.back(), true );
if ( i == 0 ) {
s.range( "a" ) = s2.range( "a" );
}
@@ -45,8 +46,11 @@ namespace CursorTests {
s.range( "a" ) |= s2.range( "a" );
}
}
- return new FieldRangeVector( s, BSON( "a" << 1 ), direction );
+ // orphan idxSpec for this test
+ IndexSpec *idxSpec = new IndexSpec( BSON( "a" << 1 ) );
+ return new FieldRangeVector( s, *idxSpec, direction );
}
+ DBDirectClient _c;
private:
vector< BSONObj > _objs;
};
@@ -65,7 +69,8 @@ namespace CursorTests {
int v[] = { 1, 2, 4, 6 };
boost::shared_ptr< FieldRangeVector > frv( vec( v, 4 ) );
Client::Context ctx( ns );
- BtreeCursor c( nsdetails( ns ), 1, nsdetails( ns )->idx(1), frv, 1 );
+ scoped_ptr<BtreeCursor> _c( BtreeCursor::make( nsdetails( ns ), 1, nsdetails( ns )->idx(1), frv, 1 ) );
+ BtreeCursor &c = *_c.get();
ASSERT_EQUALS( "BtreeCursor a_1 multi", c.toString() );
double expected[] = { 1, 2, 4, 5, 6 };
for( int i = 0; i < 5; ++i ) {
@@ -93,7 +98,8 @@ namespace CursorTests {
int v[] = { -50, 2, 40, 60, 109, 200 };
boost::shared_ptr< FieldRangeVector > frv( vec( v, 6 ) );
Client::Context ctx( ns );
- BtreeCursor c( nsdetails( ns ), 1, nsdetails( ns )->idx(1), frv, 1 );
+ scoped_ptr<BtreeCursor> _c( BtreeCursor::make(nsdetails( ns ), 1, nsdetails( ns )->idx(1), frv, 1 ) );
+ BtreeCursor &c = *_c.get();
ASSERT_EQUALS( "BtreeCursor a_1 multi", c.toString() );
double expected[] = { 0, 1, 2, 109 };
for( int i = 0; i < 4; ++i ) {
@@ -119,7 +125,8 @@ namespace CursorTests {
int v[] = { 1, 2, 4, 6 };
boost::shared_ptr< FieldRangeVector > frv( vec( v, 4, -1 ) );
Client::Context ctx( ns );
- BtreeCursor c( nsdetails( ns ), 1, nsdetails( ns )->idx(1), frv, -1 );
+ scoped_ptr<BtreeCursor> _c( BtreeCursor::make( nsdetails( ns ), 1, nsdetails( ns )->idx(1), frv, -1 ) );
+ BtreeCursor& c = *_c.get();
ASSERT_EQUALS( "BtreeCursor a_1 reverse multi", c.toString() );
double expected[] = { 6, 5, 4, 2, 1 };
for( int i = 0; i < 5; ++i ) {
@@ -144,16 +151,23 @@ namespace CursorTests {
_c.insert( ns(), o );
}
void check( const BSONObj &spec ) {
- _c.ensureIndex( ns(), idx() );
+ {
+ BSONObj keypat = idx();
+ //cout << keypat.toString() << endl;
+ _c.ensureIndex( ns(), idx() );
+ }
+
Client::Context ctx( ns() );
- FieldRangeSet frs( ns(), spec );
- boost::shared_ptr< FieldRangeVector > frv( new FieldRangeVector( frs, idx(), direction() ) );
- BtreeCursor c( nsdetails( ns() ), 1, nsdetails( ns() )->idx( 1 ), frv, direction() );
+ FieldRangeSet frs( ns(), spec, true );
+ // orphan spec for this test.
+ IndexSpec *idxSpec = new IndexSpec( idx() );
+ boost::shared_ptr< FieldRangeVector > frv( new FieldRangeVector( frs, *idxSpec, direction() ) );
+ scoped_ptr<BtreeCursor> c( BtreeCursor::make( nsdetails( ns() ), 1, nsdetails( ns() )->idx( 1 ), frv, direction() ) );
Matcher m( spec );
int count = 0;
- while( c.ok() ) {
- ASSERT( m.matches( c.current() ) );
- c.advance();
+ while( c->ok() ) {
+ ASSERT( m.matches( c->current() ) );
+ c->advance();
++count;
}
int expectedCount = 0;
@@ -246,6 +260,29 @@ namespace CursorTests {
}
virtual BSONObj idx() const { return BSON( "a" << 1 << "b" << 1 ); }
};
+
+ class AbortImplicitScan : public Base {
+ public:
+ void run() {
+ dblock lk;
+ IndexSpec idx( BSON( "a" << 1 << "b" << 1 ) );
+ _c.ensureIndex( ns(), idx.keyPattern );
+ for( int i = 0; i < 300; ++i ) {
+ _c.insert( ns(), BSON( "a" << i << "b" << 5 ) );
+ }
+ FieldRangeSet frs( ns(), BSON( "b" << 3 ), true );
+ boost::shared_ptr<FieldRangeVector> frv( new FieldRangeVector( frs, idx, 1 ) );
+ Client::Context ctx( ns() );
+ scoped_ptr<BtreeCursor> c( BtreeCursor::make( nsdetails( ns() ), 1, nsdetails( ns() )->idx(1), frv, 1 ) );
+ int initialNscanned = c->nscanned();
+ ASSERT( initialNscanned < 200 );
+ ASSERT( c->ok() );
+ c->advance();
+ ASSERT( c->nscanned() > initialNscanned );
+ ASSERT( c->nscanned() < 200 );
+ ASSERT( c->ok() );
+ }
+ };
} // namespace BtreeCursorTests
@@ -262,6 +299,7 @@ namespace CursorTests {
add< BtreeCursorTests::EqIn >();
add< BtreeCursorTests::RangeEq >();
add< BtreeCursorTests::RangeIn >();
+ add< BtreeCursorTests::AbortImplicitScan >();
}
} myall;
} // namespace CursorTests
diff --git a/dbtests/dbtests.cpp b/dbtests/dbtests.cpp
index 8ede08d..ac44edf 100644
--- a/dbtests/dbtests.cpp
+++ b/dbtests/dbtests.cpp
@@ -19,8 +19,11 @@
#include "pch.h"
#include "dbtests.h"
+#include "../util/unittest.h"
int main( int argc, char** argv ) {
static StaticObserver StaticObserver;
+ doPreServerStatupInits();
+ UnitTest::runTests();
return Suite::run(argc, argv, "/tmp/unittest");
}
diff --git a/dbtests/directclienttests.cpp b/dbtests/directclienttests.cpp
index 204bf92..860eb7e 100644
--- a/dbtests/directclienttests.cpp
+++ b/dbtests/directclienttests.cpp
@@ -18,12 +18,12 @@
*/
#include "pch.h"
-#include "../db/query.h"
+#include "../db/ops/query.h"
#include "../db/db.h"
#include "../db/instance.h"
#include "../db/json.h"
#include "../db/lasterror.h"
-#include "../db/update.h"
+#include "../db/ops/update.h"
#include "../util/timer.h"
#include "dbtests.h"
@@ -58,9 +58,9 @@ namespace DirectClientTests {
if( pass ) {
BSONObj info;
BSONObj cmd = BSON( "captrunc" << "b" << "n" << 1 << "inc" << true );
- cout << cmd.toString() << endl;
+ //cout << cmd.toString() << endl;
bool ok = client().runCommand("a", cmd, info);
- cout << info.toString() << endl;
+ //cout << info.toString() << endl;
assert(ok);
}
@@ -69,12 +69,35 @@ namespace DirectClientTests {
}
};
+ class InsertMany : ClientBase {
+ public:
+ virtual void run(){
+ vector<BSONObj> objs;
+ objs.push_back(BSON("_id" << 1));
+ objs.push_back(BSON("_id" << 1));
+ objs.push_back(BSON("_id" << 2));
+
+
+ client().dropCollection(ns);
+ client().insert(ns, objs);
+ ASSERT_EQUALS(client().getLastErrorDetailed()["code"].numberInt(), 11000);
+ ASSERT_EQUALS((int)client().count(ns), 1);
+
+ client().dropCollection(ns);
+ client().insert(ns, objs, InsertOption_ContinueOnError);
+ ASSERT_EQUALS(client().getLastErrorDetailed()["code"].numberInt(), 11000);
+ ASSERT_EQUALS((int)client().count(ns), 2);
+ }
+
+ };
+
class All : public Suite {
public:
All() : Suite( "directclient" ) {
}
void setupTests() {
add< Capped >();
+ add< InsertMany >();
}
} myall;
}
diff --git a/dbtests/framework.cpp b/dbtests/framework.cpp
index c92c8d6..95ed8b3 100644
--- a/dbtests/framework.cpp
+++ b/dbtests/framework.cpp
@@ -26,6 +26,7 @@
#include "framework.h"
#include "../util/file_allocator.h"
#include "../db/dur.h"
+#include "../util/background.h"
#ifndef _WIN32
#include <cxxabi.h>
@@ -78,7 +79,12 @@ namespace mongo {
Result * Result::cur = 0;
+ int minutesRunning = 0; // reset to 0 each time a new test starts
+ mutex minutesRunningMutex("minutesRunningMutex");
+ string currentTestName;
+
Result * Suite::run( const string& filter ) {
+ // set tlogLevel to -1 to suppress tlog() output in a test program
tlogLevel = -1;
log(1) << "\t about to setupTests" << endl;
@@ -107,6 +113,12 @@ namespace mongo {
stringstream err;
err << tc->getName() << "\t";
+ {
+ scoped_lock lk(minutesRunningMutex);
+ minutesRunning = 0;
+ currentTestName = tc->getName();
+ }
+
try {
tc->run();
passes = true;
@@ -146,6 +158,30 @@ namespace mongo {
<< options << "suite: run the specified test suite(s) only" << endl;
}
+ class TestWatchDog : public BackgroundJob {
+ public:
+ virtual string name() const { return "TestWatchDog"; }
+ virtual void run(){
+
+ while (true) {
+ sleepsecs(60);
+
+ scoped_lock lk(minutesRunningMutex);
+ minutesRunning++; //reset to 0 when new test starts
+
+ if (minutesRunning > 30){
+ log() << currentTestName << " has been running for more than 30 minutes. aborting." << endl;
+ ::abort();
+ }
+ else if (minutesRunning > 1){
+ warning() << currentTestName << " has been running for more than " << minutesRunning-1 << " minutes." << endl;
+ }
+ }
+ }
+ };
+
+ unsigned perfHist = 1;
+
int Suite::run( int argc , char** argv , string default_dbpath ) {
unsigned long long seed = time( 0 );
string dbpathSpec;
@@ -168,10 +204,12 @@ namespace mongo {
("dur", "enable journaling")
("nodur", "disable journaling (currently the default)")
("seed", po::value<unsigned long long>(&seed), "random number seed")
+ ("perfHist", po::value<unsigned>(&perfHist), "number of back runs of perf stats to display")
;
hidden_options.add_options()
("suites", po::value< vector<string> >(), "test suites to run")
+ ("nopreallocj", "disable journal prealloc")
;
positional_options.add("suites", -1);
@@ -201,13 +239,19 @@ namespace mongo {
return EXIT_CLEAN;
}
+ bool nodur = false;
if( params.count("nodur") ) {
+ nodur = true;
cmdLine.dur = false;
}
if( params.count("dur") || cmdLine.dur ) {
cmdLine.dur = true;
}
+ if( params.count("nopreallocj") ) {
+ cmdLine.preallocj = false;
+ }
+
if (params.count("debug") || params.count("verbose") ) {
logLevel = 1;
}
@@ -255,8 +299,18 @@ namespace mongo {
srand( (unsigned) seed );
printGitVersion();
printSysInfo();
+ DEV log() << "_DEBUG build" << endl;
+ if( sizeof(void*)==4 )
+ log() << "32bit" << endl;
log() << "random seed: " << seed << endl;
+ if( time(0) % 3 == 0 && !nodur ) {
+ cmdLine.dur = true;
+ log() << "****************" << endl;
+ log() << "running with journaling enabled to test that. dbtests will do this occasionally even if --dur is not specified." << endl;
+ log() << "****************" << endl;
+ }
+
FileAllocator::get()->start();
vector<string> suites;
@@ -272,10 +326,14 @@ namespace mongo {
dur::startup();
if( debug && cmdLine.dur ) {
- cout << "setting cmdLine.durOptions=8" << endl;
- cmdLine.durOptions = 8;
+ log() << "_DEBUG: automatically enabling cmdLine.durOptions=8 (DurParanoid)" << endl;
+ // this was commented out. why too slow or something? :
+ cmdLine.durOptions |= 8;
}
+ TestWatchDog twd;
+ twd.go();
+
int ret = run(suites,filter);
#if !defined(_WIN32) && !defined(__sunos__)
@@ -315,8 +373,6 @@ namespace mongo {
Logstream::get().flush();
cout << "**************************************************" << endl;
- cout << "**************************************************" << endl;
- cout << "**************************************************" << endl;
int rc = 0;
@@ -386,4 +442,5 @@ namespace mongo {
}
void setupSignals( bool inFork ) {}
+
}
diff --git a/dbtests/jsobjtests.cpp b/dbtests/jsobjtests.cpp
index 6804d71..82baaaa 100644
--- a/dbtests/jsobjtests.cpp
+++ b/dbtests/jsobjtests.cpp
@@ -18,23 +18,93 @@
*/
#include "pch.h"
+#include "../bson/util/builder.h"
#include "../db/jsobj.h"
#include "../db/jsobjmanipulator.h"
#include "../db/json.h"
#include "../db/repl.h"
#include "../db/extsort.h"
-
#include "dbtests.h"
#include "../util/mongoutils/checksum.h"
+#include "../db/key.h"
+#include "../db/btree.h"
namespace JsobjTests {
+
+ IndexInterface& indexInterfaceForTheseTests = (time(0)%2) ? *IndexDetails::iis[0] : *IndexDetails::iis[1];
+
+ void keyTest(const BSONObj& o, bool mustBeCompact = false) {
+ static KeyV1Owned *kLast;
+ static BSONObj last;
+
+ KeyV1Owned *key = new KeyV1Owned(o);
+ KeyV1Owned& k = *key;
+
+ ASSERT( !mustBeCompact || k.isCompactFormat() );
+
+ BSONObj x = k.toBson();
+ int res = o.woCompare(x, BSONObj(), /*considerfieldname*/false);
+ if( res ) {
+ cout << o.toString() << endl;
+ k.toBson();
+ cout << x.toString() << endl;
+ o.woCompare(x, BSONObj(), /*considerfieldname*/false);
+ ASSERT( res == 0 );
+ }
+ ASSERT( k.woEqual(k) );
+ ASSERT( !k.isCompactFormat() || k.dataSize() < o.objsize() );
+
+ {
+ // check BSONObj::equal. this part not a KeyV1 test.
+ int res = o.woCompare(last);
+ ASSERT( (res==0) == o.equal(last) );
+ }
+
+ if( kLast ) {
+ int r1 = o.woCompare(last, BSONObj(), false);
+ int r2 = k.woCompare(*kLast, Ordering::make(BSONObj()));
+ bool ok = (r1<0 && r2<0) || (r1>0&&r2>0) || r1==r2;
+ if( !ok ) {
+ cout << "r1r2 " << r1 << ' ' << r2 << endl;
+ cout << "o:" << o.toString() << endl;
+ cout << "last:" << last.toString() << endl;
+ cout << "k:" << k.toString() << endl;
+ cout << "kLast:" << kLast->toString() << endl;
+ int r3 = k.woCompare(*kLast, Ordering::make(BSONObj()));
+ cout << r3 << endl;
+ }
+ ASSERT(ok);
+ if( k.isCompactFormat() && kLast->isCompactFormat() ) { // only check if not bson as bson woEqual is broken! (or was may2011)
+ if( k.woEqual(*kLast) != (r2 == 0) ) { // check woEqual matches
+ cout << r2 << endl;
+ cout << k.toString() << endl;
+ cout << kLast->toString() << endl;
+ k.woEqual(*kLast);
+ ASSERT(false);
+ }
+ }
+ }
+
+ delete kLast;
+ kLast = key;
+ last = o.getOwned();
+ }
+
class BufBuilderBasic {
public:
void run() {
- BufBuilder b( 0 );
- b.appendStr( "foo" );
- ASSERT_EQUALS( 4, b.len() );
- ASSERT( strcmp( "foo", b.buf() ) == 0 );
+ {
+ BufBuilder b( 0 );
+ b.appendStr( "foo" );
+ ASSERT_EQUALS( 4, b.len() );
+ ASSERT( strcmp( "foo", b.buf() ) == 0 );
+ }
+ {
+ mongo::StackBufBuilder b;
+ b.appendStr( "foo" );
+ ASSERT_EQUALS( 4, b.len() );
+ ASSERT( strcmp( "foo", b.buf() ) == 0 );
+ }
}
};
@@ -42,6 +112,9 @@ namespace JsobjTests {
public:
void run() {
ASSERT_EQUALS( 1, BSONElement().size() );
+
+ BSONObj x;
+ ASSERT_EQUALS( 1, x.firstElement().size() );
}
};
@@ -172,16 +245,36 @@ namespace JsobjTests {
ASSERT( BSON( "" << "b" << "" << "b" ).woCompare( BSON( "" << "c" ) , key ) < 0 );
{
+ // test a big key
+ string x(2000, 'z');
+ BSONObj o = BSON( "q" << x );
+ keyTest(o, false);
+ }
+ {
+ string y(200, 'w');
+ BSONObjBuilder b;
+ for( int i = 0; i < 10; i++ ) {
+ b.append("x", y);
+ }
+ keyTest(b.obj(), true);
+ }
+ {
+ double nan = numeric_limits<double>::quiet_NaN();
+ BSONObj o = BSON( "y" << nan );
+ keyTest(o);
+ }
+
+ {
BSONObjBuilder b;
b.append( "" , "c" );
b.appendNull( "" );
BSONObj o = b.obj();
+ keyTest(o);
ASSERT( o.woSortOrder( BSON( "" << "b" << "" << "h" ) , key ) > 0 );
ASSERT( BSON( "" << "b" << "" << "h" ).woSortOrder( o , key ) < 0 );
}
-
ASSERT( BSON( "" << "a" ).woCompare( BSON( "" << "a" << "" << "c" ) ) < 0 );
{
BSONObjBuilder b;
@@ -196,6 +289,13 @@ namespace JsobjTests {
class TimestampTest : public Base {
public:
void run() {
+ Client *c = currentClient.get();
+ if( c == 0 ) {
+ Client::initThread("pretouchN");
+ c = &cc();
+ }
+ writelock lk(""); // for initTimestamp
+
BSONObjBuilder b;
b.appendTimestamp( "a" );
BSONObj o = b.done();
@@ -236,21 +336,34 @@ namespace JsobjTests {
double inf = numeric_limits< double >::infinity();
double nan = numeric_limits< double >::quiet_NaN();
double nan2 = numeric_limits< double >::signaling_NaN();
+ ASSERT( isNaN(nan) );
+ ASSERT( isNaN(nan2) );
+ ASSERT( !isNaN(inf) );
ASSERT( BSON( "a" << inf ).woCompare( BSON( "a" << inf ) ) == 0 );
- ASSERT( BSON( "a" << inf ).woCompare( BSON( "a" << 1 ) ) < 0 );
- ASSERT( BSON( "a" << 1 ).woCompare( BSON( "a" << inf ) ) > 0 );
+ ASSERT( BSON( "a" << inf ).woCompare( BSON( "a" << 1 ) ) > 0 );
+ ASSERT( BSON( "a" << 1 ).woCompare( BSON( "a" << inf ) ) < 0 );
ASSERT( BSON( "a" << nan ).woCompare( BSON( "a" << nan ) ) == 0 );
ASSERT( BSON( "a" << nan ).woCompare( BSON( "a" << 1 ) ) < 0 );
+
+ ASSERT( BSON( "a" << nan ).woCompare( BSON( "a" << 5000000000LL ) ) < 0 );
+
+ {
+ KeyV1Owned a( BSON( "a" << nan ) );
+ KeyV1Owned b( BSON( "a" << 1 ) );
+ Ordering o = Ordering::make(BSON("a"<<1));
+ ASSERT( a.woCompare(b, o) < 0 );
+ }
+
ASSERT( BSON( "a" << 1 ).woCompare( BSON( "a" << nan ) ) > 0 );
ASSERT( BSON( "a" << nan2 ).woCompare( BSON( "a" << nan2 ) ) == 0 );
ASSERT( BSON( "a" << nan2 ).woCompare( BSON( "a" << 1 ) ) < 0 );
ASSERT( BSON( "a" << 1 ).woCompare( BSON( "a" << nan2 ) ) > 0 );
- ASSERT( BSON( "a" << inf ).woCompare( BSON( "a" << nan ) ) == 0 );
- ASSERT( BSON( "a" << inf ).woCompare( BSON( "a" << nan2 ) ) == 0 );
+ ASSERT( BSON( "a" << inf ).woCompare( BSON( "a" << nan ) ) > 0 );
+ ASSERT( BSON( "a" << inf ).woCompare( BSON( "a" << nan2 ) ) > 0 );
ASSERT( BSON( "a" << nan ).woCompare( BSON( "a" << nan2 ) ) == 0 );
}
};
@@ -322,6 +435,14 @@ namespace JsobjTests {
struct AppendIntOrLL {
void run() {
const long long billion = 1000*1000*1000;
+
+ {
+ BSONObjBuilder b;
+ b.appendIntOrLL("L4", -4*billion);
+ keyTest(b.obj());
+ keyTest( BSON("" << billion) );
+ }
+
BSONObjBuilder b;
b.appendIntOrLL("i1", 1);
b.appendIntOrLL("i2", -1);
@@ -336,6 +457,7 @@ namespace JsobjTests {
b.appendIntOrLL("L6", -16*billion);
BSONObj o = b.obj();
+ keyTest(o);
ASSERT(o["i1"].type() == NumberInt);
ASSERT(o["i1"].number() == 1);
@@ -371,6 +493,7 @@ namespace JsobjTests {
b.appendNumber( "e" , 1024LL*1024*1024*1024*1024*1024 );
BSONObj o = b.obj();
+ keyTest(o);
ASSERT( o["a"].type() == NumberInt );
ASSERT( o["b"].type() == NumberDouble );
@@ -386,7 +509,144 @@ namespace JsobjTests {
void run() {
string spec = "{ a: [ \"a\", \"b\" ] }";
ASSERT_EQUALS( spec, fromjson( spec ).toString() );
- }
+
+ BSONObj x = BSON( "a" << "astring" << "b" << "str" );
+ keyTest(x);
+ keyTest(x);
+ BSONObj y = BSON( "a" << "astring" << "b" << "stra" );
+ keyTest(y);
+ y = BSON( "a" << "" );
+ keyTest(y);
+
+ keyTest( BSON("abc" << true ) );
+ keyTest( BSON("abc" << false ) );
+ keyTest( BSON("abc" << false << "b" << true ) );
+
+ Date_t now = jsTime();
+ keyTest( BSON("" << now << "" << 3 << "" << jstNULL << "" << true) );
+ keyTest( BSON("" << now << "" << 3 << "" << BSONObj() << "" << true) );
+
+ {
+ {
+ // check signed dates with new key format
+ KeyV1Owned a( BSONObjBuilder().appendDate("", -50).obj() );
+ KeyV1Owned b( BSONObjBuilder().appendDate("", 50).obj() );
+ ASSERT( a.woCompare(b, Ordering::make(BSONObj())) < 0 );
+ }
+ {
+ // backward compatibility
+ KeyBson a( BSONObjBuilder().appendDate("", -50).obj() );
+ KeyBson b( BSONObjBuilder().appendDate("", 50).obj() );
+ ASSERT( a.woCompare(b, Ordering::make(BSONObj())) > 0 );
+ }
+ {
+ // this is an uncompactible key:
+ BSONObj uc1 = BSONObjBuilder().appendDate("", -50).appendCode("", "abc").obj();
+ BSONObj uc2 = BSONObjBuilder().appendDate("", 55).appendCode("", "abc").obj();
+ ASSERT( uc1.woCompare(uc2, Ordering::make(BSONObj())) < 0 );
+ {
+ KeyV1Owned a(uc1);
+ KeyV1Owned b(uc2);
+ ASSERT( !a.isCompactFormat() );
+ ASSERT( a.woCompare(b, Ordering::make(BSONObj())) < 0 );
+ }
+ {
+ KeyBson a(uc1);
+ KeyBson b(uc2);
+ ASSERT( !a.isCompactFormat() );
+ ASSERT( a.woCompare(b, Ordering::make(BSONObj())) > 0 );
+ }
+ }
+ }
+
+ {
+ BSONObjBuilder b;
+ b.appendBinData("f", 8, (BinDataType) 1, "aaaabbbb");
+ b.appendBinData("e", 3, (BinDataType) 1, "aaa");
+ b.appendBinData("b", 1, (BinDataType) 1, "x");
+ BSONObj o = b.obj();
+ keyTest( o, true );
+ }
+
+ {
+ // check (non)equality
+ BSONObj a = BSONObjBuilder().appendBinData("", 8, (BinDataType) 1, "abcdefgh").obj();
+ BSONObj b = BSONObjBuilder().appendBinData("", 8, (BinDataType) 1, "abcdefgj").obj();
+ ASSERT( !a.equal(b) );
+ int res_ab = a.woCompare(b);
+ ASSERT( res_ab != 0 );
+ keyTest( a, true );
+ keyTest( b, true );
+
+ // check subtypes do not equal
+ BSONObj c = BSONObjBuilder().appendBinData("", 8, (BinDataType) 4, "abcdefgh").obj();
+ BSONObj d = BSONObjBuilder().appendBinData("", 8, (BinDataType) 0x81, "abcdefgh").obj();
+ ASSERT( !a.equal(c) );
+ int res_ac = a.woCompare(c);
+ ASSERT( res_ac != 0 );
+ keyTest( c, true );
+ ASSERT( !a.equal(d) );
+ int res_ad = a.woCompare(d);
+ ASSERT( res_ad != 0 );
+ keyTest( d, true );
+
+ KeyV1Owned A(a);
+ KeyV1Owned B(b);
+ KeyV1Owned C(c);
+ KeyV1Owned D(d);
+ ASSERT( !A.woEqual(B) );
+ ASSERT( A.woCompare(B, Ordering::make(BSONObj())) < 0 && res_ab < 0 );
+ ASSERT( !A.woEqual(C) );
+ ASSERT( A.woCompare(C, Ordering::make(BSONObj())) < 0 && res_ac < 0 );
+ ASSERT( !A.woEqual(D) );
+ ASSERT( A.woCompare(D, Ordering::make(BSONObj())) < 0 && res_ad < 0 );
+ }
+
+ {
+ BSONObjBuilder b;
+ b.appendBinData("f", 33, (BinDataType) 1, "123456789012345678901234567890123");
+ BSONObj o = b.obj();
+ keyTest( o, false );
+ }
+
+ {
+ for( int i = 1; i <= 3; i++ ) {
+ for( int j = 1; j <= 3; j++ ) {
+ BSONObjBuilder b;
+ b.appendBinData("f", i, (BinDataType) j, "abc");
+ BSONObj o = b.obj();
+ keyTest( o, j != ByteArrayDeprecated );
+ }
+ }
+ }
+
+ {
+ BSONObjBuilder b;
+ b.appendBinData("f", 1, (BinDataType) 133, "a");
+ BSONObj o = b.obj();
+ keyTest( o, true );
+ }
+
+ {
+ BSONObjBuilder b;
+ b.append("AA", 3);
+ b.appendBinData("f", 0, (BinDataType) 0, "");
+ b.appendBinData("e", 3, (BinDataType) 7, "aaa");
+ b.appendBinData("b", 1, (BinDataType) 128, "x");
+ b.append("z", 3);
+ b.appendBinData("bb", 0, (BinDataType) 129, "x");
+ BSONObj o = b.obj();
+ keyTest( o, true );
+ }
+
+ {
+ // 9 is not supported in compact format. so test a non-compact case here.
+ BSONObjBuilder b;
+ b.appendBinData("f", 9, (BinDataType) 0, "aaaabbbbc");
+ BSONObj o = b.obj();
+ keyTest( o );
+ }
+ }
};
class ToStringNumber {
@@ -405,6 +665,8 @@ namespace JsobjTests {
b.append( "g" , -123.456 );
BSONObj x = b.obj();
+ keyTest(x);
+
ASSERT_EQUALS( "4", x["a"].toString( false , true ) );
ASSERT_EQUALS( "5.0", x["b"].toString( false , true ) );
ASSERT_EQUALS( "6", x["c"].toString( false , true ) );
@@ -421,11 +683,44 @@ namespace JsobjTests {
class NullString {
public:
void run() {
+ {
+ BSONObjBuilder b;
+ const char x[] = {'a', 0, 'b', 0};
+ b.append("field", x, 4);
+ b.append("z", true);
+ BSONObj B = b.obj();
+ //cout << B.toString() << endl;
+
+ BSONObjBuilder a;
+ const char xx[] = {'a', 0, 'c', 0};
+ a.append("field", xx, 4);
+ a.append("z", true);
+ BSONObj A = a.obj();
+
+ BSONObjBuilder c;
+ const char xxx[] = {'a', 0, 'c', 0, 0};
+ c.append("field", xxx, 5);
+ c.append("z", true);
+ BSONObj C = c.obj();
+
+ // test that nulls are ok within bson strings
+ ASSERT( !(A == B) );
+ ASSERT( A > B );
+
+ ASSERT( !(B == C) );
+ ASSERT( C > B );
+
+ // check iteration is ok
+ ASSERT( B["z"].Bool() && A["z"].Bool() && C["z"].Bool() );
+ }
+
BSONObjBuilder b;
b.append("a", "a\0b", 4);
- b.append("b", string("a\0b", 3));
+ string z("a\0b", 3);
+ b.append("b", z);
b.appendAs(b.asTempObj()["a"], "c");
BSONObj o = b.obj();
+ keyTest(o);
stringstream ss;
ss << 'a' << '\0' << 'b';
@@ -438,6 +733,7 @@ namespace JsobjTests {
ASSERT_EQUALS(o["c"].valuestrsize(), 3+1);
ASSERT_EQUALS(o["c"].str(), ss.str());
+
}
};
@@ -479,6 +775,7 @@ namespace JsobjTests {
ASSERT_EQUALS( 2 , o.getFieldDotted( "b.a" ).numberInt() );
ASSERT_EQUALS( 3 , o.getFieldDotted( "c.0.a" ).numberInt() );
ASSERT_EQUALS( 4 , o.getFieldDotted( "c.1.a" ).numberInt() );
+ keyTest(o);
}
};
@@ -796,6 +1093,7 @@ namespace JsobjTests {
b.appendOID( "b" , 0 , false );
b.appendOID( "c" , 0 , true );
BSONObj o = b.obj();
+ keyTest(o);
ASSERT( o["a"].__oid().str() == "000000000000000000000000" );
ASSERT( o["b"].__oid().str() == "000000000000000000000000" );
@@ -1074,11 +1372,10 @@ namespace JsobjTests {
stringstream ss;
ss << "type: " << t;
string s = ss.str();
- massert( 10403 , s , min( t ).woCompare( max( t ) ) < 0 );
- massert( 10404 , s , max( t ).woCompare( min( t ) ) > 0 );
- massert( 10405 , s , min( t ).woCompare( min( t ) ) == 0 );
- massert( 10406 , s , max( t ).woCompare( max( t ) ) == 0 );
- massert( 10407 , s , abs( min( t ).firstElement().canonicalType() - max( t ).firstElement().canonicalType() ) <= 10 );
+ ASSERT( min( t ).woCompare( max( t ) ) <= 0 );
+ ASSERT( max( t ).woCompare( min( t ) ) >= 0 );
+ ASSERT( min( t ).woCompare( min( t ) ) == 0 );
+ ASSERT( max( t ).woCompare( max( t ) ) == 0 );
}
}
};
@@ -1091,7 +1388,7 @@ namespace JsobjTests {
assert( BSON( "b" << 11 ).woCompare( x.extractFields( BSON( "b" << 1 ) ) ) == 0 );
assert( x.woCompare( x.extractFields( BSON( "a" << 1 << "b" << 1 ) ) ) == 0 );
- assert( (string)"a" == x.extractFields( BSON( "a" << 1 << "c" << 1 ) ).firstElement().fieldName() );
+ assert( (string)"a" == x.extractFields( BSON( "a" << 1 << "c" << 1 ) ).firstElementFieldName() );
}
};
@@ -1147,7 +1444,8 @@ namespace JsobjTests {
class Basic1 {
public:
void run() {
- BSONObjExternalSorter sorter;
+ BSONObjExternalSorter sorter(indexInterfaceForTheseTests);
+
sorter.add( BSON( "x" << 10 ) , 5 , 1);
sorter.add( BSON( "x" << 2 ) , 3 , 1 );
sorter.add( BSON( "x" << 5 ) , 6 , 1 );
@@ -1179,7 +1477,7 @@ namespace JsobjTests {
class Basic2 {
public:
void run() {
- BSONObjExternalSorter sorter( BSONObj() , 10 );
+ BSONObjExternalSorter sorter( indexInterfaceForTheseTests, BSONObj() , 10 );
sorter.add( BSON( "x" << 10 ) , 5 , 11 );
sorter.add( BSON( "x" << 2 ) , 3 , 1 );
sorter.add( BSON( "x" << 5 ) , 6 , 1 );
@@ -1212,7 +1510,7 @@ namespace JsobjTests {
class Basic3 {
public:
void run() {
- BSONObjExternalSorter sorter( BSONObj() , 10 );
+ BSONObjExternalSorter sorter( indexInterfaceForTheseTests, BSONObj() , 10 );
sorter.sort();
auto_ptr<BSONObjExternalSorter::Iterator> i = sorter.iterator();
@@ -1225,7 +1523,7 @@ namespace JsobjTests {
class ByDiskLock {
public:
void run() {
- BSONObjExternalSorter sorter;
+ BSONObjExternalSorter sorter(indexInterfaceForTheseTests);
sorter.add( BSON( "x" << 10 ) , 5 , 4);
sorter.add( BSON( "x" << 2 ) , 3 , 0 );
sorter.add( BSON( "x" << 5 ) , 6 , 2 );
@@ -1259,7 +1557,7 @@ namespace JsobjTests {
class Big1 {
public:
void run() {
- BSONObjExternalSorter sorter( BSONObj() , 2000 );
+ BSONObjExternalSorter sorter( indexInterfaceForTheseTests, BSONObj() , 2000 );
for ( int i=0; i<10000; i++ ) {
sorter.add( BSON( "x" << rand() % 10000 ) , 5 , i );
}
@@ -1284,7 +1582,7 @@ namespace JsobjTests {
public:
void run() {
const int total = 100000;
- BSONObjExternalSorter sorter( BSONObj() , total * 2 );
+ BSONObjExternalSorter sorter( indexInterfaceForTheseTests, BSONObj() , total * 2 );
for ( int i=0; i<total; i++ ) {
sorter.add( BSON( "a" << "b" ) , 5 , i );
}
@@ -1314,7 +1612,7 @@ namespace JsobjTests {
b.appendNull("");
BSONObj x = b.obj();
- BSONObjExternalSorter sorter;
+ BSONObjExternalSorter sorter(indexInterfaceForTheseTests);
sorter.add(x, DiskLoc(3,7));
sorter.add(x, DiskLoc(4,7));
sorter.add(x, DiskLoc(2,7));
@@ -1422,6 +1720,8 @@ namespace JsobjTests {
ASSERT_EQUALS(obj, arr);
BSONObj o = BSON( "obj" << obj << "arr" << arr << "arr2" << BSONArray(obj) );
+ keyTest(o);
+
ASSERT_EQUALS(o["obj"].type(), Object);
ASSERT_EQUALS(o["arr"].type(), Array);
ASSERT_EQUALS(o["arr2"].type(), Array);
@@ -1488,8 +1788,8 @@ namespace JsobjTests {
while ( j.more() )
l += strlen( j.next().fieldName() );
}
- unsigned long long tm = t.micros();
- cout << "time: " << tm << endl;
+ //unsigned long long tm = t.micros();
+ //cout << "time: " << tm << endl;
}
}
@@ -1528,7 +1828,7 @@ namespace JsobjTests {
{
char * crap = (char*)malloc( x.objsize() );
memcpy( crap , x.objdata() , x.objsize() );
- BSONObj y( crap , false );
+ BSONObj y( crap );
ASSERT_EQUALS( x , y );
free( crap );
}
@@ -1540,7 +1840,7 @@ namespace JsobjTests {
foo[0] = 123123123;
int state = 0;
try {
- BSONObj y( crap , false );
+ BSONObj y( crap );
state = 1;
}
catch ( std::exception& e ) {
@@ -1562,7 +1862,7 @@ namespace JsobjTests {
BSONElement a = x["a"];
BSONElement b = x["b"];
BSONElement c = x["c"];
- cout << "c: " << c << endl;
+ //cout << "c: " << c << endl;
ASSERT( a.woCompare( b ) != 0 );
ASSERT( a.woCompare( b , false ) == 0 );
@@ -1607,6 +1907,7 @@ namespace JsobjTests {
void run() {
BSONObj x = BSON( "a" << BSON( "b" << 1 ) );
BSONObj y = BSON( "a" << BSON( "b" << 1.0 ) );
+ keyTest(x); keyTest(y);
ASSERT_EQUALS( x , y );
ASSERT_EQUALS( 0 , x.woCompare( y ) );
}
@@ -1736,27 +2037,27 @@ namespace JsobjTests {
<< "asdasdasdas" << "asldkasldjasldjasldjlasjdlasjdlasdasdasdasdasdasdasd" );
{
- Timer t;
+ //Timer t;
for ( int i=0; i<N; i++ )
x.md5();
- int millis = t.millis();
- cout << "md5 : " << millis << endl;
+ //int millis = t.millis();
+ //cout << "md5 : " << millis << endl;
}
{
- Timer t;
+ //Timer t;
for ( int i=0; i<N; i++ )
x.toString();
- int millis = t.millis();
- cout << "toString : " << millis << endl;
+ //int millis = t.millis();
+ //cout << "toString : " << millis << endl;
}
{
- Timer t;
+ //Timer t;
for ( int i=0; i<N; i++ )
checksum( x.objdata() , x.objsize() );
- int millis = t.millis();
- cout << "checksum : " << millis << endl;
+ //int millis = t.millis();
+ //cout << "checksum : " << millis << endl;
}
}
@@ -1770,6 +2071,7 @@ namespace JsobjTests {
void setupTests() {
add< BufBuilderBasic >();
add< BSONElementBasic >();
+ add< BSONObjTests::NullString >();
add< BSONObjTests::Create >();
add< BSONObjTests::WoCompareBasic >();
add< BSONObjTests::NumericCompareBasic >();
@@ -1786,7 +2088,6 @@ namespace JsobjTests {
add< BSONObjTests::AppendNumber >();
add< BSONObjTests::ToStringArray >();
add< BSONObjTests::ToStringNumber >();
- add< BSONObjTests::NullString >();
add< BSONObjTests::AppendAs >();
add< BSONObjTests::ArrayAppendAs >();
add< BSONObjTests::GetField >();
diff --git a/dbtests/jsontests.cpp b/dbtests/jsontests.cpp
index b630523..efaee44 100644
--- a/dbtests/jsontests.cpp
+++ b/dbtests/jsontests.cpp
@@ -386,7 +386,8 @@ namespace JsonTests {
b.appendMaxKey( "u" );
BSONObj o = b.obj();
- cout << o.jsonString() << endl;
+ o.jsonString();
+ //cout << o.jsonString() << endl;
}
};
@@ -499,16 +500,21 @@ namespace JsonTests {
}
};
- class FancyNumber {
- public:
- virtual ~FancyNumber() {}
- void run() {
- ASSERT_EQUALS( int( 1000000 * bson().firstElement().number() ),
- int( 1000000 * fromjson( json() ).firstElement().number() ) );
+ class RealNumber : public Base {
+ virtual BSONObj bson() const {
+ BSONObjBuilder b;
+ b.append( "a", strtod( "0.7", 0 ) );
+ return b.obj();
}
+ virtual string json() const {
+ return "{ \"a\" : 0.7 }";
+ }
+ };
+
+ class FancyNumber : public Base {
virtual BSONObj bson() const {
BSONObjBuilder b;
- b.append( "a", -4.4433e-2 );
+ b.append( "a", strtod( "-4.4433e-2", 0 ) );
return b.obj();
}
virtual string json() const {
@@ -1124,6 +1130,7 @@ namespace JsonTests {
add< FromJsonTests::ReservedFieldName >();
add< FromJsonTests::OkDollarFieldName >();
add< FromJsonTests::SingleNumber >();
+ add< FromJsonTests::RealNumber >();
add< FromJsonTests::FancyNumber >();
add< FromJsonTests::TwoElements >();
add< FromJsonTests::Subobject >();
diff --git a/dbtests/jstests.cpp b/dbtests/jstests.cpp
index c33b200..f4be230 100644
--- a/dbtests/jstests.cpp
+++ b/dbtests/jstests.cpp
@@ -106,24 +106,25 @@ namespace JSTests {
void run() {
Scope * s = globalScriptEngine->newScope();
- s->invoke( "x=5;" , BSONObj() );
+ s->invoke( "x=5;" , 0, 0 );
ASSERT( 5 == s->getNumber( "x" ) );
- s->invoke( "return 17;" , BSONObj() );
+ s->invoke( "return 17;" , 0, 0 );
ASSERT( 17 == s->getNumber( "return" ) );
- s->invoke( "function(){ return 17; }" , BSONObj() );
+ s->invoke( "function(){ return 17; }" , 0, 0 );
ASSERT( 17 == s->getNumber( "return" ) );
s->setNumber( "x" , 1.76 );
- s->invoke( "return x == 1.76; " , BSONObj() );
+ s->invoke( "return x == 1.76; " , 0, 0 );
ASSERT( s->getBoolean( "return" ) );
s->setNumber( "x" , 1.76 );
- s->invoke( "return x == 1.79; " , BSONObj() );
+ s->invoke( "return x == 1.79; " , 0, 0 );
ASSERT( ! s->getBoolean( "return" ) );
- s->invoke( "function( z ){ return 5 + z; }" , BSON( "" << 11 ) );
+ BSONObj obj = BSON( "" << 11.0 );
+ s->invoke( "function( z ){ return 5 + z; }" , &obj, 0 );
ASSERT_EQUALS( 16 , s->getNumber( "return" ) );
delete s;
@@ -135,52 +136,51 @@ namespace JSTests {
void run() {
Scope * s = globalScriptEngine->newScope();
- BSONObj o = BSON( "x" << 17 << "y" << "eliot" << "z" << "sara" );
+ BSONObj o = BSON( "x" << 17.0 << "y" << "eliot" << "z" << "sara" );
s->setObject( "blah" , o );
- s->invoke( "return blah.x;" , BSONObj() );
+ s->invoke( "return blah.x;" , 0, 0 );
ASSERT_EQUALS( 17 , s->getNumber( "return" ) );
- s->invoke( "return blah.y;" , BSONObj() );
+ s->invoke( "return blah.y;" , 0, 0 );
ASSERT_EQUALS( "eliot" , s->getString( "return" ) );
- s->setThis( & o );
- s->invoke( "return this.z;" , BSONObj() );
+ s->invoke( "return this.z;" , 0, &o );
ASSERT_EQUALS( "sara" , s->getString( "return" ) );
- s->invoke( "return this.z == 'sara';" , BSONObj() );
+ s->invoke( "return this.z == 'sara';" , 0, &o );
ASSERT_EQUALS( true , s->getBoolean( "return" ) );
- s->invoke( "this.z == 'sara';" , BSONObj() );
+ s->invoke( "this.z == 'sara';" , 0, &o );
ASSERT_EQUALS( true , s->getBoolean( "return" ) );
- s->invoke( "this.z == 'asara';" , BSONObj() );
+ s->invoke( "this.z == 'asara';" , 0, &o );
ASSERT_EQUALS( false , s->getBoolean( "return" ) );
- s->invoke( "return this.x == 17;" , BSONObj() );
+ s->invoke( "return this.x == 17;" , 0, &o );
ASSERT_EQUALS( true , s->getBoolean( "return" ) );
- s->invoke( "return this.x == 18;" , BSONObj() );
+ s->invoke( "return this.x == 18;" , 0, &o );
ASSERT_EQUALS( false , s->getBoolean( "return" ) );
- s->invoke( "function(){ return this.x == 17; }" , BSONObj() );
+ s->invoke( "function(){ return this.x == 17; }" , 0, &o );
ASSERT_EQUALS( true , s->getBoolean( "return" ) );
- s->invoke( "function(){ return this.x == 18; }" , BSONObj() );
+ s->invoke( "function(){ return this.x == 18; }" , 0, &o );
ASSERT_EQUALS( false , s->getBoolean( "return" ) );
- s->invoke( "function (){ return this.x == 17; }" , BSONObj() );
+ s->invoke( "function (){ return this.x == 17; }" , 0, &o );
ASSERT_EQUALS( true , s->getBoolean( "return" ) );
- s->invoke( "function z(){ return this.x == 18; }" , BSONObj() );
+ s->invoke( "function z(){ return this.x == 18; }" , 0, &o );
ASSERT_EQUALS( false , s->getBoolean( "return" ) );
- s->invoke( "function (){ this.x == 17; }" , BSONObj() );
+ s->invoke( "function (){ this.x == 17; }" , 0, &o );
ASSERT_EQUALS( false , s->getBoolean( "return" ) );
- s->invoke( "function z(){ this.x == 18; }" , BSONObj() );
+ s->invoke( "function z(){ this.x == 18; }" , 0, &o );
ASSERT_EQUALS( false , s->getBoolean( "return" ) );
- s->invoke( "x = 5; for( ; x <10; x++){ a = 1; }" , BSONObj() );
+ s->invoke( "x = 5; for( ; x <10; x++){ a = 1; }" , 0, &o );
ASSERT_EQUALS( 10 , s->getNumber( "x" ) );
delete s;
@@ -192,12 +192,12 @@ namespace JSTests {
void run() {
Scope * s = globalScriptEngine->newScope();
- s->invoke( "z = { num : 1 };" , BSONObj() );
+ s->invoke( "z = { num : 1 };" , 0, 0 );
BSONObj out = s->getObject( "z" );
ASSERT_EQUALS( 1 , out["num"].number() );
ASSERT_EQUALS( 1 , out.nFields() );
- s->invoke( "z = { x : 'eliot' };" , BSONObj() );
+ s->invoke( "z = { x : 'eliot' };" , 0, 0 );
out = s->getObject( "z" );
ASSERT_EQUALS( (string)"eliot" , out["x"].valuestr() );
ASSERT_EQUALS( 1 , out.nFields() );
@@ -219,7 +219,7 @@ namespace JSTests {
s->localConnect( "blah" );
- s->invoke( "z = { _id : new ObjectId() , a : 123 };" , BSONObj() );
+ s->invoke( "z = { _id : new ObjectId() , a : 123 };" , 0, 0 );
BSONObj out = s->getObject( "z" );
ASSERT_EQUALS( 123 , out["a"].number() );
ASSERT_EQUALS( jstOID , out["_id"].type() );
@@ -228,13 +228,13 @@ namespace JSTests {
s->setObject( "a" , out );
- s->invoke( "y = { _id : a._id , a : 124 };" , BSONObj() );
+ s->invoke( "y = { _id : a._id , a : 124 };" , 0, 0 );
out = s->getObject( "y" );
ASSERT_EQUALS( 124 , out["a"].number() );
ASSERT_EQUALS( jstOID , out["_id"].type() );
ASSERT_EQUALS( out["_id"].__oid().str() , save.str() );
- s->invoke( "y = { _id : new ObjectId( a._id ) , a : 125 };" , BSONObj() );
+ s->invoke( "y = { _id : new ObjectId( a._id ) , a : 125 };" , 0, 0 );
out = s->getObject( "y" );
ASSERT_EQUALS( 125 , out["a"].number() );
ASSERT_EQUALS( jstOID , out["_id"].type() );
@@ -274,15 +274,15 @@ namespace JSTests {
BSONObj o = BSON( "x" << 17 << "y" << "eliot" << "z" << "sara" << "zz" << BSONObj() );
s->setObject( "blah" , o , true );
- s->invoke( "blah.y = 'e'", BSONObj() );
+ s->invoke( "blah.y = 'e'", 0, 0 );
BSONObj out = s->getObject( "blah" );
ASSERT( strlen( out["y"].valuestr() ) > 1 );
- s->invoke( "blah.a = 19;" , BSONObj() );
+ s->invoke( "blah.a = 19;" , 0, 0 );
out = s->getObject( "blah" );
ASSERT( out["a"].eoo() );
- s->invoke( "blah.zz.a = 19;" , BSONObj() );
+ s->invoke( "blah.zz.a = 19;" , 0, 0 );
out = s->getObject( "blah" );
ASSERT( out["zz"].embeddedObject()["a"].eoo() );
@@ -290,12 +290,12 @@ namespace JSTests {
out = s->getObject( "blah" );
ASSERT( out["zz"].embeddedObject()["a"].eoo() );
- s->invoke( "delete blah['x']" , BSONObj() );
+ s->invoke( "delete blah['x']" , 0, 0 );
out = s->getObject( "blah" );
ASSERT( !out["x"].eoo() );
// read-only object itself can be overwritten
- s->invoke( "blah = {}", BSONObj() );
+ s->invoke( "blah = {}", 0, 0 );
out = s->getObject( "blah" );
ASSERT( out.isEmpty() );
@@ -328,13 +328,13 @@ namespace JSTests {
}
s->setObject( "x" , o );
- s->invoke( "return x.d.getTime() != 12;" , BSONObj() );
+ s->invoke( "return x.d.getTime() != 12;" , 0, 0 );
ASSERT_EQUALS( true, s->getBoolean( "return" ) );
- s->invoke( "z = x.d.getTime();" , BSONObj() );
+ s->invoke( "z = x.d.getTime();" , 0, 0 );
ASSERT_EQUALS( 123456789 , s->getNumber( "z" ) );
- s->invoke( "z = { z : x.d }" , BSONObj() );
+ s->invoke( "z = { z : x.d }" , 0, 0 );
BSONObj out = s->getObject( "z" );
ASSERT( out["z"].type() == Date );
}
@@ -349,16 +349,16 @@ namespace JSTests {
}
s->setObject( "x" , o );
- s->invoke( "z = x.r.test( 'b' );" , BSONObj() );
+ s->invoke( "z = x.r.test( 'b' );" , 0, 0 );
ASSERT_EQUALS( false , s->getBoolean( "z" ) );
- s->invoke( "z = x.r.test( 'a' );" , BSONObj() );
+ s->invoke( "z = x.r.test( 'a' );" , 0, 0 );
ASSERT_EQUALS( true , s->getBoolean( "z" ) );
- s->invoke( "z = x.r.test( 'ba' );" , BSONObj() );
+ s->invoke( "z = x.r.test( 'ba' );" , 0, 0 );
ASSERT_EQUALS( false , s->getBoolean( "z" ) );
- s->invoke( "z = { a : x.r };" , BSONObj() );
+ s->invoke( "z = { a : x.r };" , 0, 0 );
BSONObj out = s->getObject("z");
ASSERT_EQUALS( (string)"^a" , out["a"].regex() );
@@ -402,7 +402,7 @@ namespace JSTests {
s->setObject( "z" , b.obj() );
- ASSERT( s->invoke( "y = { a : z.a , b : z.b , c : z.c , d: z.d }" , BSONObj() ) == 0 );
+ ASSERT( s->invoke( "y = { a : z.a , b : z.b , c : z.c , d: z.d }" , 0, 0 ) == 0 );
BSONObj out = s->getObject( "y" );
ASSERT_EQUALS( Timestamp , out["a"].type() );
@@ -436,7 +436,7 @@ namespace JSTests {
ASSERT_EQUALS( NumberDouble , o["b"].type() );
s->setObject( "z" , o );
- s->invoke( "return z" , BSONObj() );
+ s->invoke( "return z" , 0, 0 );
BSONObj out = s->getObject( "return" );
ASSERT_EQUALS( 5 , out["a"].number() );
ASSERT_EQUALS( 5.6 , out["b"].number() );
@@ -454,7 +454,7 @@ namespace JSTests {
}
s->setObject( "z" , o , false );
- s->invoke( "return z" , BSONObj() );
+ s->invoke( "return z" , 0, 0 );
out = s->getObject( "return" );
ASSERT_EQUALS( 5 , out["a"].number() );
ASSERT_EQUALS( 5.6 , out["b"].number() );
@@ -487,7 +487,7 @@ namespace JSTests {
ASSERT_EQUALS( NumberDouble , out["a"].embeddedObjectUserCheck()["0"].type() );
ASSERT_EQUALS( NumberInt , out["a"].embeddedObjectUserCheck()["1"].type() );
- s->invokeSafe( "z.z = 5;" , BSONObj() );
+ s->invokeSafe( "z.z = 5;" , 0, 0 );
out = s->getObject( "z" );
ASSERT_EQUALS( 5 , out["z"].number() );
ASSERT_EQUALS( NumberDouble , out["a"].embeddedObjectUserCheck()["0"].type() );
@@ -625,10 +625,10 @@ namespace JSTests {
for ( int i=5; i<100 ; i += 10 ) {
s->setObject( "a" , build(i) , false );
- s->invokeSafe( "tojson( a )" , BSONObj() );
+ s->invokeSafe( "tojson( a )" , 0, 0 );
s->setObject( "a" , build(5) , true );
- s->invokeSafe( "tojson( a )" , BSONObj() );
+ s->invokeSafe( "tojson( a )" , 0, 0 );
}
delete s;
@@ -715,9 +715,8 @@ namespace JSTests {
}
//cout << "ELIOT: " << b.jsonString() << endl;
- s->setThis( &b );
// its ok if this is handled by js, just can't create a c++ exception
- s->invoke( "x=this.x.length;" , BSONObj() );
+ s->invoke( "x=this.x.length;" , 0, &b );
}
};
@@ -735,7 +734,7 @@ namespace JSTests {
s->setObject( "foo" , b.obj() );
}
- s->invokeSafe( "out = {}; out.a = foo.a; foo.b(); foo.c();" , BSONObj() );
+ s->invokeSafe( "out = {}; out.a = foo.a; foo.b(); foo.c();" , 0, 0 );
BSONObj out = s->getObject( "out" );
ASSERT_EQUALS( 1 , out["a"].number() );
@@ -845,8 +844,8 @@ namespace JSTests {
s->setObject( "x" , in );
}
- s->invokeSafe( "myb = x.b; print( myb ); printjson( myb );" , BSONObj() );
- s->invokeSafe( "y = { c : myb };" , BSONObj() );
+ s->invokeSafe( "myb = x.b; print( myb ); printjson( myb );" , 0, 0 );
+ s->invokeSafe( "y = { c : myb };" , 0, 0 );
BSONObj out = s->getObject( "y" );
ASSERT_EQUALS( BinData , out["c"].type() );
@@ -855,7 +854,7 @@ namespace JSTests {
ASSERT_EQUALS( 0 , in["b"].woCompare( out["c"] , false ) );
// check that BinData js class is utilized
- s->invokeSafe( "q = x.b.toString();", BSONObj() );
+ s->invokeSafe( "q = x.b.toString();", 0, 0 );
stringstream expected;
expected << "BinData(" << BinDataGeneral << ",\"" << base64 << "\")";
ASSERT_EQUALS( expected.str(), s->getString( "q" ) );
@@ -863,12 +862,12 @@ namespace JSTests {
stringstream scriptBuilder;
scriptBuilder << "z = { c : new BinData( " << BinDataGeneral << ", \"" << base64 << "\" ) };";
string script = scriptBuilder.str();
- s->invokeSafe( script.c_str(), BSONObj() );
+ s->invokeSafe( script.c_str(), 0, 0 );
out = s->getObject( "z" );
// pp( "out" , out["c"] );
ASSERT_EQUALS( 0 , in["b"].woCompare( out["c"] , false ) );
- s->invokeSafe( "a = { f: new BinData( 128, \"\" ) };", BSONObj() );
+ s->invokeSafe( "a = { f: new BinData( 128, \"\" ) };", 0, 0 );
out = s->getObject( "a" );
int len = -1;
out[ "f" ].binData( len );
@@ -896,19 +895,18 @@ namespace JSTests {
class Speed1 {
public:
void run() {
- BSONObj start = BSON( "x" << 5 );
+ BSONObj start = BSON( "x" << 5.0 );
BSONObj empty;
auto_ptr<Scope> s;
s.reset( globalScriptEngine->newScope() );
ScriptingFunction f = s->createFunction( "return this.x + 6;" );
- s->setThis( &start );
Timer t;
double n = 0;
for ( ; n < 100000; n++ ) {
- s->invoke( f , empty );
+ s->invoke( f , &empty, &start );
ASSERT_EQUALS( 11 , s->getNumber( "return" ) );
}
//cout << "speed1: " << ( n / t.millis() ) << " ops/ms" << endl;
@@ -921,23 +919,22 @@ namespace JSTests {
auto_ptr<Scope> s;
s.reset( globalScriptEngine->newScope() );
- s->invokeSafe( "x = 5;" , BSONObj() );
+ s->invokeSafe( "x = 5;" , 0, 0 );
{
BSONObjBuilder b;
s->append( b , "z" , "x" );
ASSERT_EQUALS( BSON( "z" << 5 ) , b.obj() );
}
- s->invokeSafe( "x = function(){ return 17; }" , BSONObj() );
+ s->invokeSafe( "x = function(){ return 17; }" , 0, 0 );
BSONObj temp;
{
BSONObjBuilder b;
s->append( b , "z" , "x" );
temp = b.obj();
- s->setThis( &temp );
}
- s->invokeSafe( "foo = this.z();" , BSONObj() );
+ s->invokeSafe( "foo = this.z();" , 0, &temp );
ASSERT_EQUALS( 17 , s->getNumber( "foo" ) );
}
};
diff --git a/dbtests/mockdbclient.h b/dbtests/mockdbclient.h
deleted file mode 100644
index fda0963..0000000
--- a/dbtests/mockdbclient.h
+++ /dev/null
@@ -1,97 +0,0 @@
-// mockdbclient.h - mocked out client for testing.
-
-/**
- * Copyright (C) 2008 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#pragma once
-
-#include "../client/dbclient.h"
-#include "../db/commands.h"
-#include "../db/replpair.h"
-
-class MockDBClientConnection : public DBClientConnection {
-public:
- MockDBClientConnection() : connect_() {}
- virtual
- BSONObj findOne(const string &ns, const Query& query, const BSONObj *fieldsToReturn = 0, int queryOptions = 0) {
- return one_;
- }
- virtual
- bool connect(const char * serverHostname, string& errmsg) {
- return connect_;
- }
- virtual
- bool connect(const HostAndPort& , string& errmsg) {
- return connect_;
- }
- virtual
- bool isMaster(bool& isMaster, BSONObj *info=0) {
- return isMaster_;
- }
- void one( const BSONObj &one ) {
- one_ = one;
- }
- void connect( bool val ) {
- connect_ = val;
- }
- void setIsMaster( bool val ) {
- isMaster_ = val;
- }
-private:
- BSONObj one_;
- bool connect_;
- bool isMaster_;
-};
-
-class DirectDBClientConnection : public DBClientConnection {
-public:
- struct ConnectionCallback {
- virtual ~ConnectionCallback() {}
- virtual void beforeCommand() {}
- virtual void afterCommand() {}
- };
- DirectDBClientConnection( ReplPair *rp, ConnectionCallback *cc = 0 ) :
- rp_( rp ),
- cc_( cc ) {
- }
- virtual BSONObj findOne(const string &ns, const Query& query, const BSONObj *fieldsToReturn = 0, int queryOptions = 0) {
- BSONObj c = query.obj.copy();
- if ( cc_ ) cc_->beforeCommand();
- SetGlobalReplPair s( rp_ );
- BSONObjBuilder result;
- result.append( "ok", Command::runAgainstRegistered( "admin.$cmd", c, result ) ? 1.0 : 0.0 );
- if ( cc_ ) cc_->afterCommand();
- return result.obj();
- }
- virtual bool connect( const string &serverHostname, string& errmsg ) {
- return true;
- }
-private:
- ReplPair *rp_;
- ConnectionCallback *cc_;
- class SetGlobalReplPair {
- public:
- SetGlobalReplPair( ReplPair *rp ) {
- backup_ = replPair;
- replPair = rp;
- }
- ~SetGlobalReplPair() {
- replPair = backup_;
- }
- private:
- ReplPair *backup_;
- };
-};
diff --git a/dbtests/namespacetests.cpp b/dbtests/namespacetests.cpp
index c2be0b0..bbb8f5e 100644
--- a/dbtests/namespacetests.cpp
+++ b/dbtests/namespacetests.cpp
@@ -27,6 +27,9 @@
#include "dbtests.h"
namespace NamespaceTests {
+
+ const int MinExtentSize = 4096;
+
namespace IndexDetailsTests {
class Base {
dblock lk;
@@ -41,12 +44,13 @@ namespace NamespaceTests {
ASSERT( theDataFileMgr.findAll( ns() )->eof() );
}
protected:
- void create() {
+ void create( bool sparse = false ) {
NamespaceDetailsTransient::get_w( ns() ).deletedIndex();
BSONObjBuilder builder;
builder.append( "ns", ns() );
builder.append( "name", "testIndex" );
builder.append( "key", key() );
+ builder.append( "sparse", sparse );
BSONObj bobj = builder.done();
id_.info = theDataFileMgr.insert( ns(), bobj.objdata(), bobj.objsize() );
// head not needed for current tests
@@ -87,8 +91,8 @@ namespace NamespaceTests {
b.append( "c", 4 );
return b.obj();
}
- static void checkSize( int expected, const BSONObjSetDefaultOrder &objs ) {
- ASSERT_EQUALS( BSONObjSetDefaultOrder::size_type( expected ), objs.size() );
+ static void checkSize( int expected, const BSONObjSet &objs ) {
+ ASSERT_EQUALS( BSONObjSet::size_type( expected ), objs.size() );
}
static void assertEquals( const BSONObj &a, const BSONObj &b ) {
if ( a.woCompare( b ) != 0 ) {
@@ -125,7 +129,7 @@ namespace NamespaceTests {
b.append( "b", 4 );
b.append( "a", 5 );
e.append( "", 5 );
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( b.done(), keys );
checkSize( 1, keys );
assertEquals( e.obj(), *keys.begin() );
@@ -141,7 +145,7 @@ namespace NamespaceTests {
a.append( "a", b.done() );
a.append( "c", "foo" );
e.append( "", 4 );
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( a.done(), keys );
checkSize( 1, keys );
ASSERT_EQUALS( e.obj(), *keys.begin() );
@@ -159,11 +163,11 @@ namespace NamespaceTests {
BSONObjBuilder b;
b.append( "a", shortArray()) ;
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( b.done(), keys );
checkSize( 3, keys );
int j = 1;
- for ( BSONObjSetDefaultOrder::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
+ for ( BSONObjSet::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
BSONObjBuilder b;
b.append( "", j );
assertEquals( b.obj(), *i );
@@ -179,11 +183,11 @@ namespace NamespaceTests {
b.append( "a", shortArray() );
b.append( "b", 2 );
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( b.done(), keys );
checkSize( 3, keys );
int j = 1;
- for ( BSONObjSetDefaultOrder::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
+ for ( BSONObjSet::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
BSONObjBuilder b;
b.append( "", j );
b.append( "", 2 );
@@ -204,11 +208,11 @@ namespace NamespaceTests {
b.append( "first", 5 );
b.append( "a", shortArray()) ;
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( b.done(), keys );
checkSize( 3, keys );
int j = 1;
- for ( BSONObjSetDefaultOrder::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
+ for ( BSONObjSet::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
BSONObjBuilder b;
b.append( "", 5 );
b.append( "", j );
@@ -233,11 +237,11 @@ namespace NamespaceTests {
BSONObjBuilder a;
a.append( "a", b.done() );
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( a.done(), keys );
checkSize( 3, keys );
int j = 1;
- for ( BSONObjSetDefaultOrder::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
+ for ( BSONObjSet::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
BSONObjBuilder b;
b.append( "", j );
assertEquals( b.obj(), *i );
@@ -257,7 +261,7 @@ namespace NamespaceTests {
b.append( "a", shortArray() );
b.append( "b", shortArray() );
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
ASSERT_EXCEPTION( id().getKeysFromObject( b.done(), keys ),
UserException );
}
@@ -277,11 +281,11 @@ namespace NamespaceTests {
BSONObjBuilder b;
b.append( "a", elts );
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( b.done(), keys );
checkSize( 3, keys );
int j = 1;
- for ( BSONObjSetDefaultOrder::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
+ for ( BSONObjSet::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
BSONObjBuilder b;
b.append( "", j );
assertEquals( b.obj(), *i );
@@ -304,11 +308,11 @@ namespace NamespaceTests {
b.append( "a", elts );
b.append( "d", 99 );
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( b.done(), keys );
checkSize( 3, keys );
int j = 1;
- for ( BSONObjSetDefaultOrder::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
+ for ( BSONObjSet::iterator i = keys.begin(); i != keys.end(); ++i, ++j ) {
BSONObjBuilder c;
c.append( "", j );
c.append( "", 99 );
@@ -336,12 +340,13 @@ namespace NamespaceTests {
elts.push_back( simpleBC( i ) );
BSONObjBuilder b;
b.append( "a", elts );
-
- BSONObjSetDefaultOrder keys;
- id().getKeysFromObject( b.done(), keys );
+ BSONObj obj = b.obj();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( obj, keys );
checkSize( 4, keys );
- BSONObjSetDefaultOrder::iterator i = keys.begin();
- assertEquals( nullObj(), *i++ );
+ BSONObjSet::iterator i = keys.begin();
+ assertEquals( nullObj(), *i++ ); // see SERVER-3377
for ( int j = 1; j < 4; ++i, ++j ) {
BSONObjBuilder b;
b.append( "", j );
@@ -366,7 +371,7 @@ namespace NamespaceTests {
BSONObjBuilder b;
b.append( "a", elts );
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( b.done(), keys );
checkSize( 1, keys );
assertEquals( nullObj(), *keys.begin() );
@@ -381,7 +386,7 @@ namespace NamespaceTests {
public:
void run() {
create();
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( BSON( "b" << 1 ), keys );
checkSize( 1, keys );
assertEquals( nullObj(), *keys.begin() );
@@ -396,7 +401,7 @@ namespace NamespaceTests {
public:
void run() {
create();
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( fromjson( "{a:[1,2]}" ), keys );
checkSize( 1, keys );
assertEquals( nullObj(), *keys.begin() );
@@ -413,14 +418,14 @@ namespace NamespaceTests {
create();
{
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( fromjson( "{x:'a',y:'b'}" ) , keys );
checkSize( 1 , keys );
assertEquals( BSON( "" << "a" << "" << "b" ) , *keys.begin() );
}
{
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( fromjson( "{x:'a'}" ) , keys );
checkSize( 1 , keys );
BSONObjBuilder b;
@@ -442,7 +447,7 @@ namespace NamespaceTests {
public:
void run() {
create();
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( fromjson( "{a:[{b:[2]}]}" ), keys );
checkSize( 1, keys );
assertEquals( BSON( "" << 2 ), *keys.begin() );
@@ -457,7 +462,7 @@ namespace NamespaceTests {
public:
void run() {
create();
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
ASSERT_EXCEPTION( id().getKeysFromObject( fromjson( "{a:[{b:[1],c:[2]}]}" ), keys ),
UserException );
}
@@ -471,10 +476,10 @@ namespace NamespaceTests {
public:
void run() {
create();
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( fromjson( "{a:[{b:1},{c:2}]}" ), keys );
checkSize( 2, keys );
- BSONObjSetDefaultOrder::iterator i = keys.begin();
+ BSONObjSet::iterator i = keys.begin();
{
BSONObjBuilder e;
e.appendNull( "" );
@@ -499,7 +504,7 @@ namespace NamespaceTests {
public:
void run() {
create();
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( fromjson( "{a:[{b:1},{b:[1,2,3]}]}" ), keys );
checkSize( 3, keys );
}
@@ -514,7 +519,7 @@ namespace NamespaceTests {
void run() {
create();
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( fromjson( "{a:[1,2]}" ), keys );
checkSize(2, keys );
keys.clear();
@@ -529,16 +534,56 @@ namespace NamespaceTests {
id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
checkSize(1, keys );
+ ASSERT_EQUALS( Undefined, keys.begin()->firstElement().type() );
keys.clear();
}
};
+
+ class DoubleArray : Base {
+ public:
+ void run() {
+ create();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[1,2]}" ), keys );
+ checkSize(2, keys );
+ BSONObjSet::const_iterator i = keys.begin();
+ ASSERT_EQUALS( BSON( "" << 1 << "" << 1 ), *i );
+ ++i;
+ ASSERT_EQUALS( BSON( "" << 2 << "" << 2 ), *i );
+ keys.clear();
+ }
+
+ protected:
+ BSONObj key() const {
+ return BSON( "a" << 1 << "a" << 1 );
+ }
+ };
+
+ class DoubleEmptyArray : Base {
+ public:
+ void run() {
+ create();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
+ checkSize(1, keys );
+ ASSERT_EQUALS( fromjson( "{'':undefined,'':undefined}" ), *keys.begin() );
+ keys.clear();
+ }
+
+ protected:
+ BSONObj key() const {
+ return BSON( "a" << 1 << "a" << 1 );
+ }
+ };
class MultiEmptyArray : Base {
public:
void run() {
create();
- BSONObjSetDefaultOrder keys;
+ BSONObjSet keys;
id().getKeysFromObject( fromjson( "{a:1,b:[1,2]}" ), keys );
checkSize(2, keys );
keys.clear();
@@ -555,7 +600,9 @@ namespace NamespaceTests {
id().getKeysFromObject( fromjson( "{a:1,b:[]}" ), keys );
checkSize(1, keys );
//cout << "YO : " << *(keys.begin()) << endl;
- ASSERT_EQUALS( NumberInt , keys.begin()->firstElement().type() );
+ BSONObjIterator i( *keys.begin() );
+ ASSERT_EQUALS( NumberInt , i.next().type() );
+ ASSERT_EQUALS( Undefined , i.next().type() );
keys.clear();
}
@@ -564,8 +611,313 @@ namespace NamespaceTests {
return aAndB();
}
};
+
+ class NestedEmptyArray : Base {
+ public:
+ void run() {
+ create();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':null}" ), *keys.begin() );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "a.b" << 1 ); }
+ };
+
+ class MultiNestedEmptyArray : Base {
+ public:
+ void run() {
+ create();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':null,'':null}" ), *keys.begin() );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "a.b" << 1 << "a.c" << 1 ); }
+ };
+
+ class UnevenNestedEmptyArray : public Base {
+ public:
+ void run() {
+ create();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':undefined,'':null}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[{b:1}]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':{b:1},'':1}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[{b:[]}]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':{b:[]},'':undefined}" ), *keys.begin() );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "a" << 1 << "a.b" << 1 ); }
+ };
+
+ class ReverseUnevenNestedEmptyArray : public Base {
+ public:
+ void run() {
+ create();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':null,'':undefined}" ), *keys.begin() );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "a.b" << 1 << "a" << 1 ); }
+ };
+
+ class SparseReverseUnevenNestedEmptyArray : public Base {
+ public:
+ void run() {
+ create( true );
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':null,'':undefined}" ), *keys.begin() );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "a.b" << 1 << "a" << 1 ); }
+ };
+
+ class SparseEmptyArray : public Base {
+ public:
+ void run() {
+ create( true );
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:1}" ), keys );
+ checkSize( 0, keys );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
+ checkSize( 0, keys );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[{c:1}]}" ), keys );
+ checkSize( 0, keys );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "a.b" << 1 ); }
+ };
+
+ class SparseEmptyArraySecond : public Base {
+ public:
+ void run() {
+ create( true );
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:1}" ), keys );
+ checkSize( 0, keys );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
+ checkSize( 0, keys );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[{c:1}]}" ), keys );
+ checkSize( 0, keys );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "z" << 1 << "a.b" << 1 ); }
+ };
+
+ class NonObjectMissingNestedField : public Base {
+ public:
+ void run() {
+ create();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':null}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[1]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':null}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[1,{b:1}]}" ), keys );
+ checkSize( 2, keys );
+ BSONObjSet::const_iterator c = keys.begin();
+ ASSERT_EQUALS( fromjson( "{'':null}" ), *c );
+ ++c;
+ ASSERT_EQUALS( fromjson( "{'':1}" ), *c );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "a.b" << 1 ); }
+ };
+
+ class SparseNonObjectMissingNestedField : public Base {
+ public:
+ void run() {
+ create( true );
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
+ checkSize( 0, keys );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[1]}" ), keys );
+ checkSize( 0, keys );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[1,{b:1}]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':1}" ), *keys.begin() );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "a.b" << 1 ); }
+ };
+
+ class IndexedArrayIndex : public Base {
+ public:
+ void run() {
+ create();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[1]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( BSON( "" << 1 ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[[1]]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':[1]}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[[]]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':undefined}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:{'0':1}}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( BSON( "" << 1 ), *keys.begin() );
+ keys.clear();
+
+ ASSERT_EXCEPTION( id().getKeysFromObject( fromjson( "{a:[{'0':1}]}" ), keys ), UserException );
+
+ ASSERT_EXCEPTION( id().getKeysFromObject( fromjson( "{a:[1,{'0':2}]}" ), keys ), UserException );
+ }
+ protected:
+ BSONObj key() const { return BSON( "a.0" << 1 ); }
+ };
+
+ class DoubleIndexedArrayIndex : public Base {
+ public:
+ void run() {
+ create();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[[1]]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':1}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[[]]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':null}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':null}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[[[]]]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':undefined}" ), *keys.begin() );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "a.0.0" << 1 ); }
+ };
+
+ class ObjectWithinArray : public Base {
+ public:
+ void run() {
+ create();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[{b:1}]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':1}" ), *keys.begin() );
+ keys.clear();
+ id().getKeysFromObject( fromjson( "{a:[{b:[1]}]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':1}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[{b:[[1]]}]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':[1]}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[[{b:1}]]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':1}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[[{b:[1]}]]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':1}" ), *keys.begin() );
+ keys.clear();
+ id().getKeysFromObject( fromjson( "{a:[[{b:[[1]]}]]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':[1]}" ), *keys.begin() );
+ keys.clear();
+
+ id().getKeysFromObject( fromjson( "{a:[[{b:[]}]]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':undefined}" ), *keys.begin() );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "a.0.b" << 1 ); }
+ };
+
+ class ArrayWithinObjectWithinArray : public Base {
+ public:
+ void run() {
+ create();
+
+ BSONObjSet keys;
+ id().getKeysFromObject( fromjson( "{a:[{b:[1]}]}" ), keys );
+ checkSize( 1, keys );
+ ASSERT_EQUALS( fromjson( "{'':1}" ), *keys.begin() );
+ keys.clear();
+ }
+ protected:
+ BSONObj key() const { return BSON( "a.0.b.0" << 1 ); }
+ };
+
+ // also test numeric string field names
+
} // namespace IndexDetailsTests
namespace NamespaceDetailsTests {
@@ -622,9 +974,11 @@ namespace NamespaceTests {
NamespaceDetails *nsd() const {
return nsdetails( ns() )->writingWithExtra();
}
- static BSONObj bigObj() {
- string as( 187, 'a' );
+ static BSONObj bigObj(bool bGenID=false) {
BSONObjBuilder b;
+ if (bGenID)
+ b.appendOID("_id", 0, true);
+ string as( 187, 'a' );
b.append( "a", as );
return b.obj();
}
@@ -657,15 +1011,18 @@ namespace NamespaceTests {
public:
void run() {
create();
- BSONObj b = bigObj();
- DiskLoc l[ 6 ];
- for ( int i = 0; i < 6; ++i ) {
+ const int N = 20;
+ const int Q = 16; // these constants depend on the size of the bson object, the extent size allocated by the system too
+ DiskLoc l[ N ];
+ for ( int i = 0; i < N; ++i ) {
+ BSONObj b = bigObj(true);
l[ i ] = theDataFileMgr.insert( ns(), b.objdata(), b.objsize() );
ASSERT( !l[ i ].isNull() );
- ASSERT_EQUALS( 1 + i % 2, nRecords() );
- if ( i > 1 )
- ASSERT( l[ i ] == l[ i - 2 ] );
+ ASSERT( nRecords() <= Q );
+ //ASSERT_EQUALS( 1 + i % 2, nRecords() );
+ if ( i >= 16 )
+ ASSERT( l[ i ] == l[ i - Q] );
}
}
};
@@ -682,14 +1039,15 @@ namespace NamespaceTests {
for ( int i = 0; i < 8; ++i ) {
l[ i ] = theDataFileMgr.insert( ns(), b.objdata(), b.objsize() );
ASSERT( !l[ i ].isNull() );
- ASSERT_EQUALS( i < 2 ? i + 1 : 3 + i % 2, nRecords() );
- if ( i > 3 )
- ASSERT( l[ i ] == l[ i - 4 ] );
+ //ASSERT_EQUALS( i < 2 ? i + 1 : 3 + i % 2, nRecords() );
+ //if ( i > 3 )
+ // ASSERT( l[ i ] == l[ i - 4 ] );
}
+ ASSERT( nRecords() == 8 );
// Too big
BSONObjBuilder bob;
- bob.append( "a", string( 787, 'a' ) );
+ bob.append( "a", string( MinExtentSize + 500, 'a' ) ); // min extent size is now 4096
BSONObj bigger = bob.done();
ASSERT( theDataFileMgr.insert( ns(), bigger.objdata(), bigger.objsize() ).isNull() );
ASSERT_EQUALS( 0, nRecords() );
@@ -710,16 +1068,24 @@ namespace NamespaceTests {
create();
ASSERT_EQUALS( 2, nExtents() );
- BSONObj b = bigObj();
+ BSONObj b = bigObj(true);
- DiskLoc l[ 8 ];
- for ( int i = 0; i < 8; ++i ) {
- l[ i ] = theDataFileMgr.insert( ns(), b.objdata(), b.objsize() );
- ASSERT( !l[ i ].isNull() );
- ASSERT_EQUALS( i < 2 ? i + 1 : 3 + i % 2, nRecords() );
+ int N = MinExtentSize / b.objsize() * nExtents() + 5;
+ int T = N - 4;
+
+ DiskLoc truncAt;
+ //DiskLoc l[ 8 ];
+ for ( int i = 0; i < N; ++i ) {
+ BSONObj bb = bigObj(true);
+ DiskLoc a = theDataFileMgr.insert( ns(), bb.objdata(), bb.objsize() );
+ if( T == i )
+ truncAt = a;
+ ASSERT( !a.isNull() );
+ /*ASSERT_EQUALS( i < 2 ? i + 1 : 3 + i % 2, nRecords() );
if ( i > 3 )
- ASSERT( l[ i ] == l[ i - 4 ] );
+ ASSERT( l[ i ] == l[ i - 4 ] );*/
}
+ ASSERT( nRecords() < N );
NamespaceDetails *nsd = nsdetails(ns());
@@ -736,10 +1102,8 @@ namespace NamespaceTests {
ASSERT( first != last ) ;
}
- DiskLoc d = l[6];
- long long n = nsd->stats.nrecords;
- nsd->cappedTruncateAfter(ns(), d, false);
- ASSERT_EQUALS( nsd->stats.nrecords , n-1 );
+ nsd->cappedTruncateAfter(ns(), truncAt, false);
+ ASSERT_EQUALS( nsd->stats.nrecords , 28 );
{
ForwardCappedCursor c(nsd);
@@ -753,7 +1117,8 @@ namespace NamespaceTests {
// Too big
BSONObjBuilder bob;
- bob.append( "a", string( 787, 'a' ) );
+ bob.appendOID("_id", 0, true);
+ bob.append( "a", string( MinExtentSize + 300, 'a' ) );
BSONObj bigger = bob.done();
ASSERT( theDataFileMgr.insert( ns(), bigger.objdata(), bigger.objsize() ).isNull() );
ASSERT_EQUALS( 0, nRecords() );
@@ -846,7 +1211,22 @@ namespace NamespaceTests {
add< IndexDetailsTests::AlternateMissing >();
add< IndexDetailsTests::MultiComplex >();
add< IndexDetailsTests::EmptyArray >();
+ add< IndexDetailsTests::DoubleArray >();
+ add< IndexDetailsTests::DoubleEmptyArray >();
add< IndexDetailsTests::MultiEmptyArray >();
+ add< IndexDetailsTests::NestedEmptyArray >();
+ add< IndexDetailsTests::MultiNestedEmptyArray >();
+ add< IndexDetailsTests::UnevenNestedEmptyArray >();
+ add< IndexDetailsTests::ReverseUnevenNestedEmptyArray >();
+ add< IndexDetailsTests::SparseReverseUnevenNestedEmptyArray >();
+ add< IndexDetailsTests::SparseEmptyArray >();
+ add< IndexDetailsTests::SparseEmptyArraySecond >();
+ add< IndexDetailsTests::NonObjectMissingNestedField >();
+ add< IndexDetailsTests::SparseNonObjectMissingNestedField >();
+ add< IndexDetailsTests::IndexedArrayIndex >();
+ add< IndexDetailsTests::DoubleIndexedArrayIndex >();
+ add< IndexDetailsTests::ObjectWithinArray >();
+ add< IndexDetailsTests::ArrayWithinObjectWithinArray >();
add< IndexDetailsTests::MissingField >();
add< IndexDetailsTests::SubobjectMissing >();
add< IndexDetailsTests::CompoundMissing >();
diff --git a/dbtests/pairingtests.cpp b/dbtests/pairingtests.cpp
deleted file mode 100644
index 9cca548..0000000
--- a/dbtests/pairingtests.cpp
+++ /dev/null
@@ -1,344 +0,0 @@
-// pairingtests.cpp : Pairing unit tests.
-//
-
-/**
- * Copyright (C) 2008 10gen Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the GNU Affero General Public License, version 3,
- * as published by the Free Software Foundation.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU Affero General Public License for more details.
- *
- * You should have received a copy of the GNU Affero General Public License
- * along with this program. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "pch.h"
-#include "../db/replpair.h"
-#include "dbtests.h"
-#include "mockdbclient.h"
-#include "../db/cmdline.h"
-
-namespace mongo {
- extern PairSync *pairSync;
-} // namespace mongo
-
-namespace PairingTests {
- class Base {
- protected:
- Base() {
- backup = pairSync;
- setSynced();
- }
- ~Base() {
- pairSync = backup;
- dblock lk;
- Helpers::emptyCollection( "local.pair.sync" );
- if ( pairSync->initialSyncCompleted() ) {
- // save to db
- pairSync->setInitialSyncCompleted();
- }
- }
- static void setSynced() {
- init();
- pairSync = synced;
- pairSync->setInitialSyncCompletedLocking();
- ASSERT( pairSync->initialSyncCompleted() );
- }
- static void setNotSynced() {
- init();
- pairSync = notSynced;
- ASSERT( !pairSync->initialSyncCompleted() );
- }
- static void flipSync() {
- if ( pairSync->initialSyncCompleted() )
- setNotSynced();
- else
- setSynced();
- }
- private:
- static void init() {
- dblock lk;
- Helpers::emptyCollection( "local.pair.sync" );
- if ( synced != 0 && notSynced != 0 )
- return;
- notSynced = new PairSync();
- notSynced->init();
- synced = new PairSync();
- synced->init();
- synced->setInitialSyncCompleted();
- Helpers::emptyCollection( "local.pair.sync" );
- }
- PairSync *backup;
- static PairSync *synced;
- static PairSync *notSynced;
- };
- PairSync *Base::synced = 0;
- PairSync *Base::notSynced = 0;
-
- namespace ReplPairTests {
- class Create : public Base {
- public:
- void run() {
- ReplPair rp1( "foo", "bar" );
- checkFields( rp1, "foo", "foo", CmdLine::DefaultDBPort, "bar" );
-
- ReplPair rp2( "foo:1", "bar" );
- checkFields( rp2, "foo:1", "foo", 1, "bar" );
-
- // FIXME Should we accept this input?
- ReplPair rp3( "", "bar" );
- checkFields( rp3, "", "", CmdLine::DefaultDBPort, "bar" );
-
- ASSERT_EXCEPTION( ReplPair( "foo:", "bar" ),
- UserException );
-
- ASSERT_EXCEPTION( ReplPair( "foo:0", "bar" ),
- UserException );
-
- ASSERT_EXCEPTION( ReplPair( "foo:10000000", "bar" ),
- UserException );
-
- ASSERT_EXCEPTION( ReplPair( "foo", "" ),
- UserException );
- }
- private:
- void checkFields( const ReplPair &rp,
- const string &remote,
- const string &remoteHost,
- int remotePort,
- const string &arbHost ) {
- ASSERT( rp.state == ReplPair::State_Negotiating );
- ASSERT_EQUALS( remote, rp.remote );
- ASSERT_EQUALS( remoteHost, rp.remoteHost );
- ASSERT_EQUALS( remotePort, rp.remotePort );
- ASSERT_EQUALS( arbHost, rp.arbHost );
- }
- };
-
- class Dominant : public Base {
- public:
- Dominant() : oldPort_( cmdLine.port ) {
- cmdLine.port = 10;
- }
- ~Dominant() {
- cmdLine.port = oldPort_;
- }
- void run() {
- ASSERT( ReplPair( "b:9", "-" ).dominant( "b" ) );
- ASSERT( !ReplPair( "b:10", "-" ).dominant( "b" ) );
- ASSERT( ReplPair( "b", "-" ).dominant( "c" ) );
- ASSERT( !ReplPair( "b", "-" ).dominant( "a" ) );
- }
- private:
- int oldPort_;
- };
-
- class SetMaster {
- public:
- void run() {
- ReplPair rp( "a", "b" );
- rp.setMaster( ReplPair::State_CantArb, "foo" );
- ASSERT( rp.state == ReplPair::State_CantArb );
- ASSERT_EQUALS( rp.info , "foo" );
- rp.setMaster( ReplPair::State_Confused, "foo" );
- ASSERT( rp.state == ReplPair::State_Confused );
- }
- };
-
- class Negotiate : public Base {
- public:
- void run() {
- ReplPair rp( "a", "b" );
- MockDBClientConnection cc;
-
- cc.one( res( 0, 0 ) );
- rp.negotiate( &cc, "dummy" );
- ASSERT( rp.state == ReplPair::State_Confused );
-
- rp.state = ReplPair::State_Negotiating;
- cc.one( res( 1, 2 ) );
- rp.negotiate( &cc, "dummy" );
- ASSERT( rp.state == ReplPair::State_Negotiating );
-
- cc.one( res( 1, ReplPair::State_Slave ) );
- rp.negotiate( &cc, "dummy" );
- ASSERT( rp.state == ReplPair::State_Slave );
-
- cc.one( res( 1, ReplPair::State_Master ) );
- rp.negotiate( &cc, "dummy" );
- ASSERT( rp.state == ReplPair::State_Master );
- }
- private:
- BSONObj res( int ok, int youAre ) {
- BSONObjBuilder b;
- b.append( "ok", ok );
- b.append( "you_are", youAre );
- return b.obj();
- }
- };
-
- class Arbitrate : public Base {
- public:
- void run() {
- ReplPair rp1( "a", "-" );
- rp1.arbitrate();
- ASSERT( rp1.state == ReplPair::State_Master );
-
- TestableReplPair rp2( false, BSONObj() );
- rp2.arbitrate();
- ASSERT( rp2.state == ReplPair::State_CantArb );
-
- TestableReplPair rp3( true, fromjson( "{ok:0}" ) );
- rp3.arbitrate();
- ASSERT_EQUALS( rp3.state , ReplPair::State_Confused );
-
- TestableReplPair rp4( true, fromjson( "{ok:1,you_are:1}" ) );
- rp4.arbitrate();
- ASSERT( rp4.state == ReplPair::State_Master );
-
- TestableReplPair rp5( true, fromjson( "{ok:1,you_are:0}" ) );
- rp5.arbitrate();
- ASSERT( rp5.state == ReplPair::State_Slave );
-
- TestableReplPair rp6( true, fromjson( "{ok:1,you_are:-1}" ) );
- rp6.arbitrate();
- // unchanged from initial value
- ASSERT( rp6.state == ReplPair::State_Negotiating );
- }
- private:
- class TestableReplPair : public ReplPair {
- public:
- TestableReplPair( bool connect, const BSONObj &one ) :
- ReplPair( "a", "z" ),
- connect_( connect ),
- one_( one ) {
- }
- virtual
- DBClientConnection *newClientConnection() const {
- MockDBClientConnection * c = new MockDBClientConnection();
- c->connect( connect_ );
- c->one( one_ );
- return c;
- }
- private:
- bool connect_;
- BSONObj one_;
- };
- };
- } // namespace ReplPairTests
-
- class DirectConnectBase : public Base {
- public:
- virtual ~DirectConnectBase() {}
- protected:
- void negotiate( ReplPair &a, ReplPair &b ) {
- auto_ptr< DBClientConnection > c( new DirectDBClientConnection( &b, cc() ) );
- a.negotiate( c.get(), "dummy" );
- }
- virtual DirectDBClientConnection::ConnectionCallback *cc() {
- return 0;
- }
- void checkNegotiation( const char *host1, const char *arb1, int state1, int newState1,
- const char *host2, const char *arb2, int state2, int newState2 ) {
- ReplPair one( host1, arb1 );
- one.state = state1;
- ReplPair two( host2, arb2 );
- two.state = state2;
- negotiate( one, two );
- ASSERT( one.state == newState1 );
- ASSERT( two.state == newState2 );
- }
- };
-
- class Negotiate : public DirectConnectBase {
- public:
- void run() {
- checkNegotiation( "a", "-", ReplPair::State_Negotiating, ReplPair::State_Negotiating,
- "b", "-", ReplPair::State_Negotiating, ReplPair::State_Negotiating );
- checkNegotiation( "b", "-", ReplPair::State_Negotiating, ReplPair::State_Slave,
- "a", "-", ReplPair::State_Negotiating, ReplPair::State_Master );
-
- checkNegotiation( "b", "-", ReplPair::State_Master, ReplPair::State_Master,
- "a", "-", ReplPair::State_Negotiating, ReplPair::State_Slave );
-
- // No change when negotiate() called on a.
- checkNegotiation( "a", "-", ReplPair::State_Master, ReplPair::State_Master,
- "b", "-", ReplPair::State_Master, ReplPair::State_Master );
- // Resolve Master - Master.
- checkNegotiation( "b", "-", ReplPair::State_Master, ReplPair::State_Slave,
- "a", "-", ReplPair::State_Master, ReplPair::State_Master );
-
- // FIXME Move from negotiating to master?
- checkNegotiation( "b", "-", ReplPair::State_Slave, ReplPair::State_Slave,
- "a", "-", ReplPair::State_Negotiating, ReplPair::State_Master );
- }
- };
-
- class NegotiateWithCatchup : public DirectConnectBase {
- public:
- void run() {
- // a caught up, b not
- setNotSynced();
- checkNegotiation( "b", "-", ReplPair::State_Negotiating, ReplPair::State_Slave,
- "a", "-", ReplPair::State_Negotiating, ReplPair::State_Master );
- // b caught up, a not
- setSynced();
- checkNegotiation( "b", "-", ReplPair::State_Negotiating, ReplPair::State_Master,
- "a", "-", ReplPair::State_Negotiating, ReplPair::State_Slave );
-
- // a caught up, b not
- setNotSynced();
- checkNegotiation( "b", "-", ReplPair::State_Slave, ReplPair::State_Slave,
- "a", "-", ReplPair::State_Negotiating, ReplPair::State_Master );
- // b caught up, a not
- setSynced();
- checkNegotiation( "b", "-", ReplPair::State_Slave, ReplPair::State_Master,
- "a", "-", ReplPair::State_Negotiating, ReplPair::State_Slave );
- }
- private:
- class NegateCatchup : public DirectDBClientConnection::ConnectionCallback {
- virtual void beforeCommand() {
- Base::flipSync();
- }
- virtual void afterCommand() {
- Base::flipSync();
- }
- };
- virtual DirectDBClientConnection::ConnectionCallback *cc() {
- return &cc_;
- }
- NegateCatchup cc_;
- };
-
- class NobodyCaughtUp : public DirectConnectBase {
- public:
- void run() {
- setNotSynced();
- checkNegotiation( "b", "-", ReplPair::State_Negotiating, ReplPair::State_Negotiating,
- "a", "-", ReplPair::State_Negotiating, ReplPair::State_Slave );
- }
- };
-
- class All : public Suite {
- public:
- All() : Suite( "pairing" ) {
- }
-
- void setupTests() {
- add< ReplPairTests::Create >();
- add< ReplPairTests::Dominant >();
- add< ReplPairTests::SetMaster >();
- add< ReplPairTests::Negotiate >();
- add< ReplPairTests::Arbitrate >();
- add< Negotiate >();
- add< NegotiateWithCatchup >();
- add< NobodyCaughtUp >();
- }
- } myall;
-} // namespace PairingTests
-
diff --git a/dbtests/pdfiletests.cpp b/dbtests/pdfiletests.cpp
index 2844fc4..26c837c 100644
--- a/dbtests/pdfiletests.cpp
+++ b/dbtests/pdfiletests.cpp
@@ -360,7 +360,7 @@ namespace PdfileTests {
if( n == 5 && sizeof(void*)==4 )
break;
MongoDataFile * f = d->addAFile( big , false );
- cout << f->length() << ' ' << n << endl;
+ //cout << f->length() << ' ' << n << endl;
if ( f->length() == l )
break;
l = f->length();
@@ -368,7 +368,7 @@ namespace PdfileTests {
int start = d->numFiles();
for ( int i=0; i<start; i++ )
- d->allocExtent( c1.c_str() , d->getFile( i )->getHeader()->unusedLength , false );
+ d->allocExtent( c1.c_str() , d->getFile( i )->getHeader()->unusedLength , false, false );
ASSERT_EQUALS( start , d->numFiles() );
{
diff --git a/dbtests/perf/perftest.cpp b/dbtests/perf/perftest.cpp
index ef03551..b6219f7 100644
--- a/dbtests/perf/perftest.cpp
+++ b/dbtests/perf/perftest.cpp
@@ -21,7 +21,7 @@
#include "../../client/dbclient.h"
#include "../../db/instance.h"
-#include "../../db/query.h"
+#include "../../db/ops/query.h"
#include "../../db/queryoptimizer.h"
#include "../../util/file_allocator.h"
@@ -330,6 +330,37 @@ namespace BSON {
BSONObj o_;
};
+ template <int LEN>
+ class Copy {
+ public:
+ Copy(){
+ // putting it in a subobject to force copy on getOwned
+ BSONObjBuilder outer;
+ BSONObjBuilder b (outer.subobjStart("inner"));
+ while (b.len() < LEN)
+ b.append(BSONObjBuilder::numStr(b.len()), b.len());
+ b.done();
+ _base = outer.obj();
+ }
+
+ void run() {
+ int iterations = 1000*1000;
+ while (iterations--){
+ BSONObj temp = copy(_base.firstElement().embeddedObject().getOwned());
+ }
+ }
+
+ private:
+ // noinline should force copying even when optimized
+ NOINLINE_DECL BSONObj copy(BSONObj x){
+ return x;
+ }
+
+ BSONObj _base;
+ };
+
+
+
class All : public RunnerSuite {
public:
All() : RunnerSuite( "bson" ) {}
@@ -338,6 +369,10 @@ namespace BSON {
add< ShopwikiParse >();
add< Json >();
add< ShopwikiJson >();
+ add< Copy<10> >();
+ add< Copy<100> >();
+ add< Copy<1000> >();
+ add< Copy<10*1000> >();
}
} all;
@@ -684,12 +719,43 @@ namespace Plan {
add< Query >();
}
} all;
-
} // namespace Plan
+namespace Misc {
+ class TimeMicros64 {
+ public:
+ void run() {
+ int iterations = 1000*1000;
+ while(iterations--){
+ curTimeMicros64();
+ }
+ }
+ };
+
+ class JSTime {
+ public:
+ void run() {
+ int iterations = 1000*1000;
+ while(iterations--){
+ jsTime();
+ }
+ }
+ };
+
+ class All : public RunnerSuite {
+ public:
+ All() : RunnerSuite("misc") {}
+ void setupTests() {
+ add< TimeMicros64 >();
+ add< JSTime >();
+ }
+ } all;
+}
+
int main( int argc, char **argv ) {
logLevel = -1;
client_ = new DBDirectClient();
return Suite::run(argc, argv, "/data/db/perftest");
}
+
diff --git a/dbtests/perftests.cpp b/dbtests/perftests.cpp
index 182595c..3d9b6ee 100644
--- a/dbtests/perftests.cpp
+++ b/dbtests/perftests.cpp
@@ -23,18 +23,33 @@
*/
#include "pch.h"
-#include "../db/query.h"
+#include "../db/ops/query.h"
#include "../db/db.h"
#include "../db/instance.h"
#include "../db/json.h"
#include "../db/lasterror.h"
-#include "../db/update.h"
+#include "../db/ops/update.h"
#include "../db/taskqueue.h"
#include "../util/timer.h"
#include "dbtests.h"
#include "../db/dur_stats.h"
+#include "../util/checksum.h"
+#include "../util/version.h"
+#include "../db/key.h"
+#include "../util/compress.h"
+
+using namespace bson;
+
+namespace mongo {
+ namespace regression {
+ extern unsigned perfHist;
+ }
+}
namespace PerfTests {
+
+ const bool profiling = false;
+
typedef DBDirectClient DBClientType;
//typedef DBClientConnection DBClientType;
@@ -92,29 +107,173 @@ namespace PerfTests {
};
int TaskQueueTest::tot;
- class CappedTest : public ClientBase {
- };
-
class B : public ClientBase {
string _ns;
protected:
const char *ns() { return _ns.c_str(); }
- virtual void prep() = 0;
+
+ // anything you want to do before being timed
+ virtual void prep() { }
virtual void timed() = 0;
// optional 2nd test phase to be timed separately
// return name of it
- virtual const char * timed2() { return 0; }
+ virtual string timed2() { return ""; }
virtual void post() { }
+
virtual string name() = 0;
- virtual unsigned long long expectation() = 0;
- virtual int howLongMillis() { return 5000; }
+ virtual unsigned long long expectation() { return 0; }
+ virtual int expectationTimeMillis() { return -1; }
+
+ // how long to run test. 0 is a sentinel which means just run the timed() method once and time it.
+ virtual int howLongMillis() { return profiling ? 60000 : 5000; }
+
+ /* override if your test output doesn't need that */
+ virtual bool showDurStats() { return true; }
+
+ static DBClientConnection *conn;
+ static unsigned once;
+
public:
+ /* if you want recording of the timings, place the password for the perf database
+ in ./../settings.py:
+ pstatspassword="<pwd>"
+ */
+ void connect() {
+ if( once )
+ return;
+ ++once;
+
+ // no writing to perf db if _DEBUG
+ DEV return;
+
+ const char *fn = "../../settings.py";
+ if( !exists(fn) ) {
+ if( exists("settings.py") )
+ fn = "settings.py";
+ else {
+ cout << "no ../../settings.py or ./settings.py file found. will not write perf stats to pstats db." << endl;
+ cout << "it is recommended this be enabled even on dev boxes" << endl;
+ return;
+ }
+ }
+
+ try {
+ if( conn == 0 ) {
+ MemoryMappedFile f;
+ const char *p = (const char *) f.mapWithOptions(fn, MongoFile::READONLY);
+ string pwd;
+
+ {
+ const char *q = str::after(p, "pstatspassword=\"");
+ if( *q == 0 ) {
+ cout << "info perftests.cpp: no pstatspassword= in settings.py" << endl;
+ return;
+ }
+ else {
+ pwd = str::before(q, '\"');
+ }
+ }
+
+ DBClientConnection *c = new DBClientConnection(false, 0, 10);
+ string err;
+ if( c->connect("perfdb.10gen.cc", err) ) {
+ if( !c->auth("perf", "perf", pwd, err) ) {
+ cout << "info: authentication with stats db failed: " << err << endl;
+ assert(false);
+ }
+ conn = c;
+ }
+ else {
+ cout << err << " (to log perfstats)" << endl;
+ }
+ }
+ }
+ catch(...) { }
+ }
+
+ virtual unsigned batchSize() { return 50; }
+
void say(unsigned long long n, int ms, string s) {
- cout << setw(36) << left << s << ' ' << right << setw(7) << n*1000/ms << "/sec " << setw(4) << ms << "ms" << endl;
- cout << dur::stats.curr->_asObj().toString() << endl;
+ unsigned long long rps = n*1000/ms;
+ cout << "stats " << setw(33) << left << s << ' ' << right << setw(9) << rps << ' ' << right << setw(5) << ms << "ms ";
+ if( showDurStats() )
+ cout << dur::stats.curr->_asCSV();
+ cout << endl;
+
+ connect();
+
+ if( conn && !conn->isFailed() ) {
+ const char *ns = "perf.pstats";
+ if( perfHist ) {
+ static bool needver = true;
+ try {
+ // try to report rps from last time */
+ Query q;
+ {
+ BSONObjBuilder b;
+ b.append("host",getHostName()).append("test",s).append("dur",cmdLine.dur);
+ DEV { b.append("info.DEBUG",true); }
+ else b.appendNull("info.DEBUG");
+ if( sizeof(int*) == 4 )
+ b.append("info.bits", 32);
+ else
+ b.appendNull("info.bits");
+ q = Query(b.obj()).sort("when",-1);
+ }
+ BSONObj fields = BSON( "rps" << 1 << "info" << 1 );
+ vector<BSONObj> v;
+ conn->findN(v, ns, q, perfHist, 0, &fields);
+ for( vector<BSONObj>::iterator i = v.begin(); i != v.end(); i++ ) {
+ BSONObj o = *i;
+ double lastrps = o["rps"].Number();
+ if( lastrps ) {
+ cout << "stats " << setw(33) << right << "new/old:" << ' ' << setw(9);
+ cout << fixed << setprecision(2) << rps / lastrps;
+ if( needver ) {
+ cout << " " << o.getFieldDotted("info.git").toString();
+ }
+ cout << '\n';
+ }
+ }
+ } catch(...) { }
+ cout.flush();
+ needver = false;
+ }
+ {
+ bob b;
+ b.append("host", getHostName());
+ b.appendTimeT("when", time(0));
+ b.append("test", s);
+ b.append("rps", (int) rps);
+ b.append("millis", ms);
+ b.appendBool("dur", cmdLine.dur);
+ if( showDurStats() && cmdLine.dur )
+ b.append("durStats", dur::stats.curr->_asObj());
+ {
+ bob inf;
+ inf.append("version", versionString);
+ if( sizeof(int*) == 4 ) inf.append("bits", 32);
+ DEV inf.append("DEBUG", true);
+#if defined(_WIN32)
+ inf.append("os", "win");
+#endif
+ inf.append("git", gitVersion());
+ inf.append("boost", BOOST_VERSION);
+ b.append("info", inf.obj());
+ }
+ BSONObj o = b.obj();
+ //cout << "inserting " << o.toString() << endl;
+ try {
+ conn->insert(ns, o);
+ }
+ catch ( std::exception& e ) {
+ warning() << "couldn't save perf results: " << e.what() << endl;
+ }
+ }
+ }
}
void run() {
_ns = string("perftest.") + name();
@@ -123,33 +282,54 @@ namespace PerfTests {
prep();
int hlm = howLongMillis();
+ DEV {
+ // don't run very long with _DEBUG - not very meaningful anyway on that build
+ hlm = min(hlm, 500);
+ }
dur::stats._intervalMicros = 0; // no auto rotate
dur::stats.curr->reset();
- Timer t;
+ mongo::Timer t;
unsigned long long n = 0;
- const unsigned Batch = 50;
- do {
- unsigned i;
- for( i = 0; i < Batch; i++ )
- timed();
- n += i;
+ const unsigned Batch = batchSize();
+
+ if( hlm == 0 ) {
+ // means just do once
+ timed();
+ }
+ else {
+ do {
+ unsigned i;
+ for( i = 0; i < Batch; i++ )
+ timed();
+ n += i;
+ } while( t.millis() < hlm );
}
- while( t.millis() < hlm );
+
client().getLastError(); // block until all ops are finished
int ms = t.millis();
+
say(n, ms, name());
- if( n < expectation() ) {
- cout << "\ntest " << name() << " seems slow n:" << n << " ops/sec but expect greater than:" << expectation() << endl;
- cout << endl;
+ int etm = expectationTimeMillis();
+ DEV {
+ }
+ else if( etm > 0 ) {
+ if( ms > etm*2 ) {
+ cout << "test " << name() << " seems slow expected ~" << etm << "ms" << endl;
+ }
+ }
+ else if( n < expectation() ) {
+ cout << "test " << name() << " seems slow n:" << n << " ops/sec but expect greater than:" << expectation() << endl;
}
+ post();
+
{
- const char *test2name = timed2();
- if( test2name ) {
+ string test2name = timed2();
+ if( test2name.size() != 0 ) {
dur::stats.curr->reset();
- Timer t;
+ mongo::Timer t;
unsigned long long n = 0;
while( 1 ) {
unsigned i;
@@ -166,12 +346,374 @@ namespace PerfTests {
}
};
+ DBClientConnection *B::conn;
+ unsigned B::once;
+
+ unsigned dontOptimizeOutHopefully;
+
+ class NonDurTest : public B {
+ public:
+ virtual int howLongMillis() { return 3000; }
+ virtual bool showDurStats() { return false; }
+ };
+
+ class BSONIter : public NonDurTest {
+ public:
+ int n;
+ bo b, sub;
+ string name() { return "BSONIter"; }
+ BSONIter() {
+ n = 0;
+ bo sub = bob().appendTimeT("t", time(0)).appendBool("abool", true).appendBinData("somebin", 3, BinDataGeneral, "abc").appendNull("anullone").obj();
+ b = BSON( "_id" << OID() << "x" << 3 << "yaaaaaa" << 3.00009 << "zz" << 1 << "q" << false << "obj" << sub << "zzzzzzz" << "a string a string" );
+ }
+ void timed() {
+ for( bo::iterator i = b.begin(); i.more(); )
+ if( i.next().fieldName() )
+ n++;
+ for( bo::iterator i = sub.begin(); i.more(); )
+ if( i.next().fieldName() )
+ n++;
+ }
+ };
+
+ class BSONGetFields1 : public NonDurTest {
+ public:
+ int n;
+ bo b, sub;
+ string name() { return "BSONGetFields1By1"; }
+ BSONGetFields1() {
+ n = 0;
+ bo sub = bob().appendTimeT("t", time(0)).appendBool("abool", true).appendBinData("somebin", 3, BinDataGeneral, "abc").appendNull("anullone").obj();
+ b = BSON( "_id" << OID() << "x" << 3 << "yaaaaaa" << 3.00009 << "zz" << 1 << "q" << false << "obj" << sub << "zzzzzzz" << "a string a string" );
+ }
+ void timed() {
+ if( b["x"].eoo() )
+ n++;
+ if( b["q"].eoo() )
+ n++;
+ if( b["zzz"].eoo() )
+ n++;
+ }
+ };
+
+ class BSONGetFields2 : public BSONGetFields1 {
+ public:
+ string name() { return "BSONGetFields"; }
+ void timed() {
+ static const char *names[] = { "x", "q", "zzz" };
+ BSONElement elements[3];
+ b.getFields(3, names, elements);
+ if( elements[0].eoo() )
+ n++;
+ if( elements[1].eoo() )
+ n++;
+ if( elements[2].eoo() )
+ n++;
+ }
+ };
+
+ class KeyTest : public B {
+ public:
+ KeyV1Owned a,b,c;
+ string name() { return "Key-woequal"; }
+ virtual int howLongMillis() { return 3000; }
+ KeyTest() :
+ a(BSON("a"<<1<<"b"<<3.0<<"c"<<"qqq")),
+ b(BSON("a"<<1<<"b"<<3.0<<"c"<<"qqq")),
+ c(BSON("a"<<1<<"b"<<3.0<<"c"<<"qqqb"))
+ {}
+ virtual bool showDurStats() { return false; }
+ void timed() {
+ assert( a.woEqual(b) );
+ assert( !a.woEqual(c) );
+ }
+ };
+
+ unsigned long long aaa;
+
+ class Timer : public B {
+ public:
+ string name() { return "Timer"; }
+ virtual int howLongMillis() { return 1000; }
+ virtual bool showDurStats() { return false; }
+ void timed() {
+ mongo::Timer t;
+ aaa += t.millis();
+ }
+ };
+
+ RWLock lk("testrw");
+ SimpleMutex m("simptst");
+ mongo::mutex mtest("mtest");
+ SpinLock s;
+
+ class mutexspeed : public B {
+ public:
+ string name() { return "mutex"; }
+ virtual int howLongMillis() { return 500; }
+ virtual bool showDurStats() { return false; }
+ void timed() {
+ mongo::mutex::scoped_lock lk(mtest);
+ }
+ };
+ class simplemutexspeed : public B {
+ public:
+ string name() { return "simplemutex"; }
+ virtual int howLongMillis() { return 500; }
+ virtual bool showDurStats() { return false; }
+ void timed() {
+ SimpleMutex::scoped_lock lk(m);
+ }
+ };
+ class spinlockspeed : public B {
+ public:
+ string name() { return "spinlock"; }
+ virtual int howLongMillis() { return 500; }
+ virtual bool showDurStats() { return false; }
+ void timed() {
+ mongo::scoped_spinlock lk(s);
+ }
+ };
+
+ class rlock : public B {
+ public:
+ string name() { return "rlock"; }
+ virtual int howLongMillis() { return 500; }
+ virtual bool showDurStats() { return false; }
+ void timed() {
+ lk.lock_shared();
+ lk.unlock_shared();
+ }
+ };
+ class wlock : public B {
+ public:
+ string name() { return "wlock"; }
+ virtual int howLongMillis() { return 500; }
+ virtual bool showDurStats() { return false; }
+ void timed() {
+ lk.lock();
+ lk.unlock();
+ }
+ };
+
+#if 0
+ class ulock : public B {
+ public:
+ string name() { return "ulock"; }
+ virtual int howLongMillis() { return 500; }
+ virtual bool showDurStats() { return false; }
+ void timed() {
+ lk.lockAsUpgradable();
+ lk.unlockFromUpgradable();
+ }
+ };
+#endif
+
+ class CTM : public B {
+ public:
+ CTM() : last(0), delts(0), n(0) { }
+ string name() { return "curTimeMillis64"; }
+ virtual int howLongMillis() { return 500; }
+ virtual bool showDurStats() { return false; }
+ unsigned long long last;
+ unsigned long long delts;
+ unsigned n;
+ void timed() {
+ unsigned long long x = curTimeMillis64();
+ aaa += x;
+ if( last ) {
+ unsigned long long delt = x-last;
+ if( delt ) {
+ delts += delt;
+ n++;
+ }
+ }
+ last = x;
+ }
+ void post() {
+ // we need to know if timing is highly ungranular - that could be relevant in some places
+ if( n )
+ cout << " avg timer granularity: " << ((double)delts)/n << "ms " << endl;
+ }
+ };
+
+ class Bldr : public B {
+ public:
+ int n;
+ string name() { return "BufBuilder"; }
+ Bldr() {
+ }
+ virtual int howLongMillis() { return 3000; }
+ virtual bool showDurStats() { return false; }
+ void timed() {
+ BufBuilder b;
+ b.appendNum(3);
+ b.appendUChar(' ');
+ b.appendStr("abcd");
+ n += b.len();
+ }
+ };
+
+ class StkBldr : public B {
+ public:
+ virtual int howLongMillis() { return 3000; }
+ int n;
+ string name() { return "StackBufBuilder"; }
+ virtual bool showDurStats() { return false; }
+ void timed() {
+ StackBufBuilder b;
+ b.appendNum(3);
+ b.appendUChar(' ');
+ b.appendStr("abcd");
+ n += b.len();
+ }
+ };
+
+ // if a test is this fast, it was optimized out
+ class Dummy : public B {
+ public:
+ Dummy() { }
+ virtual int howLongMillis() { return 3000; }
+ string name() { return "dummy"; }
+ void timed() {
+ dontOptimizeOutHopefully++;
+ }
+ unsigned long long expectation() { return 1000000; }
+ virtual bool showDurStats() { return false; }
+ };
+
+ // test thread local speed
+ class TLS : public B {
+ public:
+ TLS() { }
+ virtual int howLongMillis() { return 3000; }
+ string name() { return "thread-local-storage"; }
+ void timed() {
+ if( &cc() )
+ dontOptimizeOutHopefully++;
+ }
+ unsigned long long expectation() { return 1000000; }
+ virtual bool showDurStats() { return false; }
+ };
+
+ class Malloc : public B {
+ public:
+ Malloc() { }
+ virtual int howLongMillis() { return 4000; }
+ string name() { return "malloc"; }
+ void timed() {
+ char *p = new char[128];
+ if( dontOptimizeOutHopefully++ > 0 )
+ delete p;
+ }
+ unsigned long long expectation() { return 1000000; }
+ virtual bool showDurStats() { return false; }
+ };
+
+ class Compress : public B {
+ public:
+ const unsigned sz;
+ void *p;
+ Compress() : sz(1024*1024*100+3) { }
+ virtual unsigned batchSize() { return 1; }
+ string name() { return "compress"; }
+ virtual bool showDurStats() { return false; }
+ virtual int howLongMillis() { return 4000; }
+ unsigned long long expectation() { return 1000000; }
+ void prep() {
+ p = malloc(sz);
+ // this isn't a fair test as it is mostly rands but we just want a rough perf check
+ static int last;
+ for (unsigned i = 0; i<sz; i++) {
+ int r = rand();
+ if( (r & 0x300) == 0x300 )
+ r = last;
+ ((char*)p)[i] = r;
+ last = r;
+ }
+ }
+ size_t last;
+ string res;
+ void timed() {
+ mongo::Timer t;
+ string out;
+ size_t len = compress((const char *) p, sz, &out);
+ bool ok = uncompress(out.c_str(), out.size(), &res);
+ ASSERT(ok);
+ static unsigned once;
+ if( once++ == 0 )
+ cout << "compress round trip " << sz/(1024.0*1024) / (t.millis()/1000.0) << "MB/sec\n";
+ //cout << len / (1024.0/1024) << " compressed" << endl;
+ (void)len; //fix unused error while above line is commented out
+ }
+ void post() {
+ ASSERT( memcmp(res.c_str(), p, sz) == 0 );
+ free(p);
+ }
+ };
+
+ // test speed of checksum method
+ class ChecksumTest : public B {
+ public:
+ const unsigned sz;
+ ChecksumTest() : sz(1024*1024*100+3) { }
+ string name() { return "checksum"; }
+ virtual int howLongMillis() { return 2000; }
+ int expectationTimeMillis() { return 5000; }
+ virtual bool showDurStats() { return false; }
+ virtual unsigned batchSize() { return 1; }
+
+ void *p;
+
+ void prep() {
+ {
+ // the checksum code assumes 'standard' rollover on addition overflows. let's check that:
+ unsigned long long x = 0xffffffffffffffffULL;
+ ASSERT( x+2 == 1 );
+ }
+
+ p = malloc(sz);
+ for (unsigned i = 0; i<sz; i++)
+ ((char*)p)[i] = rand();
+ }
+
+ Checksum last;
+
+ void timed() {
+ static int i;
+ Checksum c;
+ c.gen(p, sz);
+ if( i == 0 )
+ last = c;
+ else if( i == 1 ) {
+ ASSERT( c == last );
+ }
+ }
+ void post() {
+ {
+ mongo::Checksum c;
+ c.gen(p, sz-1);
+ ASSERT( c != last );
+ ((char *&)p)[0]++; // check same data, different order, doesn't give same checksum
+ ((char *&)p)[1]--;
+ c.gen(p, sz);
+ ASSERT( c != last );
+ ((char *&)p)[1]++; // check same data, different order, doesn't give same checksum (different longwords case)
+ ((char *&)p)[8]--;
+ c.gen(p, sz);
+ ASSERT( c != last );
+ }
+ free(p);
+ }
+ };
+
class InsertDup : public B {
const BSONObj o;
public:
InsertDup() : o( BSON("_id" << 1) ) { } // dup keys
string name() {
- return "insert duplicate _ids";
+ return "insert-duplicate-_ids";
}
void prep() {
client().insert( ns(), o );
@@ -185,21 +727,32 @@ namespace PerfTests {
unsigned long long expectation() { return 1000; }
};
- class Insert1 : public InsertDup {
+ class Insert1 : public B {
const BSONObj x;
+ OID oid;
+ BSONObj query;
public:
- Insert1() : x( BSON("x" << 99) ) { }
- string name() { return "insert simple"; }
+ Insert1() : x( BSON("x" << 99) ) {
+ oid.init();
+ query = BSON("_id" << oid);
+ }
+ string name() { return "insert-simple"; }
void timed() {
client().insert( ns(), x );
}
+ string timed2() {
+ client().findOne(ns(), query);
+ return "findOne_by_id";
+ }
void post() {
- assert( client().count(ns()) > 100 );
+#if !defined(_DEBUG)
+ assert( client().count(ns()) > 50 );
+#endif
}
unsigned long long expectation() { return 1000; }
};
- class InsertBig : public InsertDup {
+ class InsertBig : public B {
BSONObj x;
virtual int howLongMillis() {
if( sizeof(void*) == 4 )
@@ -214,7 +767,7 @@ namespace PerfTests {
b.appendBinData("bin", 200000, (BinDataType) 129, buf);
x = b.obj();
}
- string name() { return "insert big"; }
+ string name() { return "insert-big"; }
void timed() {
client().insert( ns(), x );
}
@@ -223,7 +776,7 @@ namespace PerfTests {
class InsertRandom : public B {
public:
- string name() { return "random inserts"; }
+ string name() { return "random-inserts"; }
void prep() {
client().insert( ns(), BSONObj() );
client().ensureIndex(ns(), BSON("x"<<1));
@@ -233,8 +786,6 @@ namespace PerfTests {
BSONObj y = BSON("x" << x << "y" << rand() << "z" << 33);
client().insert(ns(), y);
}
- void post() {
- }
unsigned long long expectation() { return 1000; }
};
@@ -246,7 +797,7 @@ namespace PerfTests {
static int rand() {
return std::rand() & 0x7fff;
}
- string name() { return "random upserts"; }
+ virtual string name() { return "random-upserts"; }
void prep() {
client().insert( ns(), BSONObj() );
client().ensureIndex(ns(), BSON("x"<<1));
@@ -258,7 +809,7 @@ namespace PerfTests {
client().update(ns(), q, y, /*upsert*/true);
}
- const char * timed2() {
+ virtual string timed2() {
static BSONObj I = BSON( "$inc" << BSON( "y" << 1 ) );
// test some $inc's
@@ -267,23 +818,31 @@ namespace PerfTests {
BSONObj q = BSON("x" << x);
client().update(ns(), q, I);
- return "inc";
+ return name()+"-inc";
}
- void post() {
- }
unsigned long long expectation() { return 1000; }
};
template <typename T>
class MoreIndexes : public T {
public:
- string name() { return T::name() + " with more indexes"; }
+ string name() { return T::name() + "-with-more-indexes"; }
void prep() {
T::prep();
this->client().ensureIndex(this->ns(), BSON("y"<<1));
this->client().ensureIndex(this->ns(), BSON("z"<<1));
}
+
+ /*
+ virtual string timed2() {
+ string x = T::timed2();
+ if ( x.size() == 0 )
+ return x;
+
+ return x + "-with-more-indexes";
+ }
+ */
};
void t() {
@@ -310,11 +869,8 @@ namespace PerfTests {
class All : public Suite {
public:
- All() : Suite( "perf" )
- {
- }
- ~All() {
- }
+ All() : Suite( "perf" ) { }
+
Result * run( const string& filter ) {
boost::thread a(t);
Result * res = Suite::run(filter);
@@ -323,14 +879,41 @@ namespace PerfTests {
}
void setupTests() {
- add< TaskQueueTest >();
- add< InsertDup >();
- add< Insert1 >();
- add< InsertRandom >();
- add< MoreIndexes<InsertRandom> >();
- add< Update1 >();
- add< MoreIndexes<Update1> >();
- add< InsertBig >();
+ cout
+ << "stats test rps------ time-- "
+ << dur::stats.curr->_CSVHeader() << endl;
+ if( profiling ) {
+ add< Update1 >();
+ }
+ else {
+ add< Dummy >();
+ add< ChecksumTest >();
+ add< Compress >();
+ add< TLS >();
+ add< Malloc >();
+ add< Timer >();
+ add< rlock >();
+ add< wlock >();
+ //add< ulock >();
+ add< mutexspeed >();
+ add< simplemutexspeed >();
+ add< spinlockspeed >();
+ add< CTM >();
+ add< KeyTest >();
+ add< Bldr >();
+ add< StkBldr >();
+ add< BSONIter >();
+ add< BSONGetFields1 >();
+ add< BSONGetFields2 >();
+ add< TaskQueueTest >();
+ add< InsertDup >();
+ add< Insert1 >();
+ add< InsertRandom >();
+ add< MoreIndexes<InsertRandom> >();
+ add< Update1 >();
+ add< MoreIndexes<Update1> >();
+ add< InsertBig >();
+ }
}
} myall;
}
diff --git a/dbtests/queryoptimizertests.cpp b/dbtests/queryoptimizertests.cpp
index 2d6f752..38d631e 100644
--- a/dbtests/queryoptimizertests.cpp
+++ b/dbtests/queryoptimizertests.cpp
@@ -19,12 +19,13 @@
#include "pch.h"
#include "../db/queryoptimizer.h"
-#include "../db/db.h"
-#include "../db/dbhelpers.h"
+#include "../db/querypattern.h"
#include "../db/instance.h"
-#include "../db/query.h"
+#include "../db/ops/query.h"
+#include "../db/ops/delete.h"
#include "dbtests.h"
+
namespace mongo {
extern BSONObj id_obj;
void runQuery(Message& m, QueryMessage& q, Message &response ) {
@@ -36,704 +37,22 @@ namespace mongo {
Message response;
runQuery( m, q, response );
}
+ void __forceLinkGeoPlugin();
+ shared_ptr<Cursor> newQueryOptimizerCursor( const char *ns, const BSONObj &query, const BSONObj &order = BSONObj() );
} // namespace mongo
namespace QueryOptimizerTests {
- namespace FieldRangeTests {
- class Base {
- public:
- virtual ~Base() {}
- void run() {
- const FieldRangeSet s( "ns", query() );
- checkElt( lower(), s.range( "a" ).min() );
- checkElt( upper(), s.range( "a" ).max() );
- ASSERT_EQUALS( lowerInclusive(), s.range( "a" ).minInclusive() );
- ASSERT_EQUALS( upperInclusive(), s.range( "a" ).maxInclusive() );
- }
- protected:
- virtual BSONObj query() = 0;
- virtual BSONElement lower() { return minKey.firstElement(); }
- virtual bool lowerInclusive() { return true; }
- virtual BSONElement upper() { return maxKey.firstElement(); }
- virtual bool upperInclusive() { return true; }
- static void checkElt( BSONElement expected, BSONElement actual ) {
- if ( expected.woCompare( actual, false ) ) {
- log() << "expected: " << expected << ", got: " << actual;
- ASSERT( false );
- }
- }
- };
-
-
- class NumericBase : public Base {
- public:
- NumericBase() {
- o = BSON( "min" << -numeric_limits<double>::max() << "max" << numeric_limits<double>::max() );
- }
-
- virtual BSONElement lower() { return o["min"]; }
- virtual BSONElement upper() { return o["max"]; }
- private:
- BSONObj o;
- };
-
- class Empty : public Base {
- virtual BSONObj query() { return BSONObj(); }
- };
-
- class Eq : public Base {
- public:
- Eq() : o_( BSON( "a" << 1 ) ) {}
- virtual BSONObj query() { return o_; }
- virtual BSONElement lower() { return o_.firstElement(); }
- virtual BSONElement upper() { return o_.firstElement(); }
- BSONObj o_;
- };
-
- class DupEq : public Eq {
- public:
- virtual BSONObj query() { return BSON( "a" << 1 << "b" << 2 << "a" << 1 ); }
- };
-
- class Lt : public NumericBase {
- public:
- Lt() : o_( BSON( "-" << 1 ) ) {}
- virtual BSONObj query() { return BSON( "a" << LT << 1 ); }
- virtual BSONElement upper() { return o_.firstElement(); }
- virtual bool upperInclusive() { return false; }
- BSONObj o_;
- };
-
- class Lte : public Lt {
- virtual BSONObj query() { return BSON( "a" << LTE << 1 ); }
- virtual bool upperInclusive() { return true; }
- };
-
- class Gt : public NumericBase {
- public:
- Gt() : o_( BSON( "-" << 1 ) ) {}
- virtual BSONObj query() { return BSON( "a" << GT << 1 ); }
- virtual BSONElement lower() { return o_.firstElement(); }
- virtual bool lowerInclusive() { return false; }
- BSONObj o_;
- };
-
- class Gte : public Gt {
- virtual BSONObj query() { return BSON( "a" << GTE << 1 ); }
- virtual bool lowerInclusive() { return true; }
- };
-
- class TwoLt : public Lt {
- virtual BSONObj query() { return BSON( "a" << LT << 1 << LT << 5 ); }
- };
-
- class TwoGt : public Gt {
- virtual BSONObj query() { return BSON( "a" << GT << 0 << GT << 1 ); }
- };
-
- class EqGte : public Eq {
- virtual BSONObj query() { return BSON( "a" << 1 << "a" << GTE << 1 ); }
- };
-
- class EqGteInvalid {
- public:
- void run() {
- FieldRangeSet fbs( "ns", BSON( "a" << 1 << "a" << GTE << 2 ) );
- ASSERT( !fbs.matchPossible() );
- }
- };
-
- struct RegexBase : Base {
- void run() { //need to only look at first interval
- FieldRangeSet s( "ns", query() );
- checkElt( lower(), s.range( "a" ).intervals()[0]._lower._bound );
- checkElt( upper(), s.range( "a" ).intervals()[0]._upper._bound );
- ASSERT_EQUALS( lowerInclusive(), s.range( "a" ).intervals()[0]._lower._inclusive );
- ASSERT_EQUALS( upperInclusive(), s.range( "a" ).intervals()[0]._upper._inclusive );
- }
- };
-
- class Regex : public RegexBase {
- public:
- Regex() : o1_( BSON( "" << "abc" ) ), o2_( BSON( "" << "abd" ) ) {}
- virtual BSONObj query() {
- BSONObjBuilder b;
- b.appendRegex( "a", "^abc" );
- return b.obj();
- }
- virtual BSONElement lower() { return o1_.firstElement(); }
- virtual BSONElement upper() { return o2_.firstElement(); }
- virtual bool upperInclusive() { return false; }
- BSONObj o1_, o2_;
- };
-
- class RegexObj : public RegexBase {
- public:
- RegexObj() : o1_( BSON( "" << "abc" ) ), o2_( BSON( "" << "abd" ) ) {}
- virtual BSONObj query() { return BSON("a" << BSON("$regex" << "^abc")); }
- virtual BSONElement lower() { return o1_.firstElement(); }
- virtual BSONElement upper() { return o2_.firstElement(); }
- virtual bool upperInclusive() { return false; }
- BSONObj o1_, o2_;
- };
-
- class UnhelpfulRegex : public RegexBase {
- public:
- UnhelpfulRegex() {
- BSONObjBuilder b;
- b.appendMinForType("lower", String);
- b.appendMaxForType("upper", String);
- limits = b.obj();
- }
-
- virtual BSONObj query() {
- BSONObjBuilder b;
- b.appendRegex( "a", "abc" );
- return b.obj();
- }
- virtual BSONElement lower() { return limits["lower"]; }
- virtual BSONElement upper() { return limits["upper"]; }
- virtual bool upperInclusive() { return false; }
- BSONObj limits;
- };
-
- class In : public Base {
- public:
- In() : o1_( BSON( "-" << -3 ) ), o2_( BSON( "-" << 44 ) ) {}
- virtual BSONObj query() {
- vector< int > vals;
- vals.push_back( 4 );
- vals.push_back( 8 );
- vals.push_back( 44 );
- vals.push_back( -1 );
- vals.push_back( -3 );
- vals.push_back( 0 );
- BSONObjBuilder bb;
- bb.append( "$in", vals );
- BSONObjBuilder b;
- b.append( "a", bb.done() );
- return b.obj();
- }
- virtual BSONElement lower() { return o1_.firstElement(); }
- virtual BSONElement upper() { return o2_.firstElement(); }
- BSONObj o1_, o2_;
- };
-
- class Equality {
- public:
- void run() {
- FieldRangeSet s( "ns", BSON( "a" << 1 ) );
- ASSERT( s.range( "a" ).equality() );
- FieldRangeSet s2( "ns", BSON( "a" << GTE << 1 << LTE << 1 ) );
- ASSERT( s2.range( "a" ).equality() );
- FieldRangeSet s3( "ns", BSON( "a" << GT << 1 << LTE << 1 ) );
- ASSERT( !s3.range( "a" ).equality() );
- FieldRangeSet s4( "ns", BSON( "a" << GTE << 1 << LT << 1 ) );
- ASSERT( !s4.range( "a" ).equality() );
- FieldRangeSet s5( "ns", BSON( "a" << GTE << 1 << LTE << 1 << GT << 1 ) );
- ASSERT( !s5.range( "a" ).equality() );
- FieldRangeSet s6( "ns", BSON( "a" << GTE << 1 << LTE << 1 << LT << 1 ) );
- ASSERT( !s6.range( "a" ).equality() );
- }
- };
-
- class SimplifiedQuery {
- public:
- void run() {
- FieldRangeSet fbs( "ns", BSON( "a" << GT << 1 << GT << 5 << LT << 10 << "b" << 4 << "c" << LT << 4 << LT << 6 << "d" << GTE << 0 << GT << 0 << "e" << GTE << 0 << LTE << 10 ) );
- BSONObj simple = fbs.simplifiedQuery();
- cout << "simple: " << simple << endl;
- ASSERT( !simple.getObjectField( "a" ).woCompare( fromjson( "{$gt:5,$lt:10}" ) ) );
- ASSERT_EQUALS( 4, simple.getIntField( "b" ) );
- ASSERT( !simple.getObjectField( "c" ).woCompare( BSON("$gte" << -numeric_limits<double>::max() << "$lt" << 4 ) ) );
- ASSERT( !simple.getObjectField( "d" ).woCompare( BSON("$gt" << 0 << "$lte" << numeric_limits<double>::max() ) ) );
- ASSERT( !simple.getObjectField( "e" ).woCompare( fromjson( "{$gte:0,$lte:10}" ) ) );
- }
- };
-
- class QueryPatternTest {
- public:
- void run() {
- ASSERT( p( BSON( "a" << 1 ) ) == p( BSON( "a" << 1 ) ) );
- ASSERT( p( BSON( "a" << 1 ) ) == p( BSON( "a" << 5 ) ) );
- ASSERT( p( BSON( "a" << 1 ) ) != p( BSON( "b" << 1 ) ) );
- ASSERT( p( BSON( "a" << 1 ) ) != p( BSON( "a" << LTE << 1 ) ) );
- ASSERT( p( BSON( "a" << 1 ) ) != p( BSON( "a" << 1 << "b" << 2 ) ) );
- ASSERT( p( BSON( "a" << 1 << "b" << 3 ) ) != p( BSON( "a" << 1 ) ) );
- ASSERT( p( BSON( "a" << LT << 1 ) ) == p( BSON( "a" << LTE << 5 ) ) );
- ASSERT( p( BSON( "a" << LT << 1 << GTE << 0 ) ) == p( BSON( "a" << LTE << 5 << GTE << 0 ) ) );
- ASSERT( p( BSON( "a" << 1 ) ) < p( BSON( "a" << 1 << "b" << 1 ) ) );
- ASSERT( !( p( BSON( "a" << 1 << "b" << 1 ) ) < p( BSON( "a" << 1 ) ) ) );
- ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 ) ) == p( BSON( "a" << 4 ), BSON( "b" << "a" ) ) );
- ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 ) ) == p( BSON( "a" << 4 ), BSON( "b" << -1 ) ) );
- ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 ) ) != p( BSON( "a" << 4 ), BSON( "c" << 1 ) ) );
- ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 << "c" << -1 ) ) == p( BSON( "a" << 4 ), BSON( "b" << -1 << "c" << 1 ) ) );
- ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 << "c" << 1 ) ) != p( BSON( "a" << 4 ), BSON( "b" << 1 ) ) );
- ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 ) ) != p( BSON( "a" << 4 ), BSON( "b" << 1 << "c" << 1 ) ) );
- }
- private:
- static QueryPattern p( const BSONObj &query, const BSONObj &sort = BSONObj() ) {
- return FieldRangeSet( "", query ).pattern( sort );
- }
- };
-
- class NoWhere {
- public:
- void run() {
- ASSERT_EQUALS( 0, FieldRangeSet( "ns", BSON( "$where" << 1 ) ).nNontrivialRanges() );
- }
- };
-
- class Numeric {
- public:
- void run() {
- FieldRangeSet f( "", BSON( "a" << 1 ) );
- ASSERT( f.range( "a" ).min().woCompare( BSON( "a" << 2.0 ).firstElement() ) < 0 );
- ASSERT( f.range( "a" ).min().woCompare( BSON( "a" << 0.0 ).firstElement() ) > 0 );
- }
- };
-
- class InLowerBound {
- public:
- void run() {
- FieldRangeSet f( "", fromjson( "{a:{$gt:4,$in:[1,2,3,4,5,6]}}" ) );
- ASSERT( f.range( "a" ).min().woCompare( BSON( "a" << 5.0 ).firstElement(), false ) == 0 );
- ASSERT( f.range( "a" ).max().woCompare( BSON( "a" << 6.0 ).firstElement(), false ) == 0 );
- }
- };
-
- class InUpperBound {
- public:
- void run() {
- FieldRangeSet f( "", fromjson( "{a:{$lt:4,$in:[1,2,3,4,5,6]}}" ) );
- ASSERT( f.range( "a" ).min().woCompare( BSON( "a" << 1.0 ).firstElement(), false ) == 0 );
- ASSERT( f.range( "a" ).max().woCompare( BSON( "a" << 3.0 ).firstElement(), false ) == 0 );
- }
- };
-
- class UnionBound {
- public:
- void run() {
- FieldRangeSet frs( "", fromjson( "{a:{$gt:1,$lt:9},b:{$gt:9,$lt:12}}" ) );
- FieldRange ret = frs.range( "a" );
- ret |= frs.range( "b" );
- ASSERT_EQUALS( 2U, ret.intervals().size() );
- }
- };
-
- class MultiBound {
- public:
- void run() {
- FieldRangeSet frs1( "", fromjson( "{a:{$in:[1,3,5,7,9]}}" ) );
- FieldRangeSet frs2( "", fromjson( "{a:{$in:[2,3,5,8,9]}}" ) );
- FieldRange fr1 = frs1.range( "a" );
- FieldRange fr2 = frs2.range( "a" );
- fr1 &= fr2;
- ASSERT( fr1.min().woCompare( BSON( "a" << 3.0 ).firstElement(), false ) == 0 );
- ASSERT( fr1.max().woCompare( BSON( "a" << 9.0 ).firstElement(), false ) == 0 );
- vector< FieldInterval > intervals = fr1.intervals();
- vector< FieldInterval >::const_iterator j = intervals.begin();
- double expected[] = { 3, 5, 9 };
- for( int i = 0; i < 3; ++i, ++j ) {
- ASSERT_EQUALS( expected[ i ], j->_lower._bound.number() );
- ASSERT( j->_lower._inclusive );
- ASSERT( j->_lower == j->_upper );
- }
- ASSERT( j == intervals.end() );
- }
- };
-
- class DiffBase {
- public:
- virtual ~DiffBase() {}
- void run() {
- FieldRangeSet frs( "", fromjson( obj().toString() ) );
- FieldRange ret = frs.range( "a" );
- ret -= frs.range( "b" );
- check( ret );
- }
- protected:
- void check( const FieldRange &fr ) {
- vector< FieldInterval > fi = fr.intervals();
- ASSERT_EQUALS( len(), fi.size() );
- int i = 0;
- for( vector< FieldInterval >::const_iterator j = fi.begin(); j != fi.end(); ++j ) {
- ASSERT_EQUALS( nums()[ i ], j->_lower._bound.numberInt() );
- ASSERT_EQUALS( incs()[ i ], j->_lower._inclusive );
- ++i;
- ASSERT_EQUALS( nums()[ i ], j->_upper._bound.numberInt() );
- ASSERT_EQUALS( incs()[ i ], j->_upper._inclusive );
- ++i;
- }
- }
- virtual unsigned len() const = 0;
- virtual const int *nums() const = 0;
- virtual const bool *incs() const = 0;
- virtual BSONObj obj() const = 0;
- };
-
- class TwoRangeBase : public DiffBase {
- public:
- TwoRangeBase( string obj, int low, int high, bool lowI, bool highI )
- : _obj( obj ) {
- _n[ 0 ] = low;
- _n[ 1 ] = high;
- _b[ 0 ] = lowI;
- _b[ 1 ] = highI;
- }
- private:
- virtual unsigned len() const { return 1; }
- virtual const int *nums() const { return _n; }
- virtual const bool *incs() const { return _b; }
- virtual BSONObj obj() const { return fromjson( _obj ); }
- string _obj;
- int _n[ 2 ];
- bool _b[ 2 ];
- };
-
- struct Diff1 : public TwoRangeBase {
- Diff1() : TwoRangeBase( "{a:{$gt:1,$lt:2},b:{$gt:3,$lt:4}}", 1, 2, false, false ) {}
- };
-
- struct Diff2 : public TwoRangeBase {
- Diff2() : TwoRangeBase( "{a:{$gt:1,$lt:2},b:{$gt:2,$lt:4}}", 1, 2, false, false ) {}
- };
-
- struct Diff3 : public TwoRangeBase {
- Diff3() : TwoRangeBase( "{a:{$gt:1,$lte:2},b:{$gt:2,$lt:4}}", 1, 2, false, true ) {}
- };
-
- struct Diff4 : public TwoRangeBase {
- Diff4() : TwoRangeBase( "{a:{$gt:1,$lt:2},b:{$gte:2,$lt:4}}", 1, 2, false, false) {}
- };
-
- struct Diff5 : public TwoRangeBase {
- Diff5() : TwoRangeBase( "{a:{$gt:1,$lte:2},b:{$gte:2,$lt:4}}", 1, 2, false, false) {}
- };
-
- struct Diff6 : public TwoRangeBase {
- Diff6() : TwoRangeBase( "{a:{$gt:1,$lte:3},b:{$gte:2,$lt:4}}", 1, 2, false, false) {}
- };
-
- struct Diff7 : public TwoRangeBase {
- Diff7() : TwoRangeBase( "{a:{$gt:1,$lte:3},b:{$gt:2,$lt:4}}", 1, 2, false, true) {}
- };
-
- struct Diff8 : public TwoRangeBase {
- Diff8() : TwoRangeBase( "{a:{$gt:1,$lt:4},b:{$gt:2,$lt:4}}", 1, 2, false, true) {}
- };
-
- struct Diff9 : public TwoRangeBase {
- Diff9() : TwoRangeBase( "{a:{$gt:1,$lt:4},b:{$gt:2,$lte:4}}", 1, 2, false, true) {}
- };
-
- struct Diff10 : public TwoRangeBase {
- Diff10() : TwoRangeBase( "{a:{$gt:1,$lte:4},b:{$gt:2,$lte:4}}", 1, 2, false, true) {}
- };
-
- class SplitRangeBase : public DiffBase {
- public:
- SplitRangeBase( string obj, int low1, bool low1I, int high1, bool high1I, int low2, bool low2I, int high2, bool high2I )
- : _obj( obj ) {
- _n[ 0 ] = low1;
- _n[ 1 ] = high1;
- _n[ 2 ] = low2;
- _n[ 3 ] = high2;
- _b[ 0 ] = low1I;
- _b[ 1 ] = high1I;
- _b[ 2 ] = low2I;
- _b[ 3 ] = high2I;
- }
- private:
- virtual unsigned len() const { return 2; }
- virtual const int *nums() const { return _n; }
- virtual const bool *incs() const { return _b; }
- virtual BSONObj obj() const { return fromjson( _obj ); }
- string _obj;
- int _n[ 4 ];
- bool _b[ 4 ];
- };
-
- struct Diff11 : public SplitRangeBase {
- Diff11() : SplitRangeBase( "{a:{$gt:1,$lte:4},b:{$gt:2,$lt:4}}", 1, false, 2, true, 4, true, 4, true) {}
- };
-
- struct Diff12 : public SplitRangeBase {
- Diff12() : SplitRangeBase( "{a:{$gt:1,$lt:5},b:{$gt:2,$lt:4}}", 1, false, 2, true, 4, true, 5, false) {}
- };
-
- struct Diff13 : public TwoRangeBase {
- Diff13() : TwoRangeBase( "{a:{$gt:1,$lt:5},b:{$gt:1,$lt:4}}", 4, 5, true, false) {}
- };
-
- struct Diff14 : public SplitRangeBase {
- Diff14() : SplitRangeBase( "{a:{$gte:1,$lt:5},b:{$gt:1,$lt:4}}", 1, true, 1, true, 4, true, 5, false) {}
- };
-
- struct Diff15 : public TwoRangeBase {
- Diff15() : TwoRangeBase( "{a:{$gt:1,$lt:5},b:{$gte:1,$lt:4}}", 4, 5, true, false) {}
- };
-
- struct Diff16 : public TwoRangeBase {
- Diff16() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:{$gte:1,$lt:4}}", 4, 5, true, false) {}
- };
-
- struct Diff17 : public TwoRangeBase {
- Diff17() : TwoRangeBase( "{a:{$gt:1,$lt:5},b:{$gt:0,$lt:4}}", 4, 5, true, false) {}
- };
-
- struct Diff18 : public TwoRangeBase {
- Diff18() : TwoRangeBase( "{a:{$gt:1,$lt:5},b:{$gt:0,$lte:4}}", 4, 5, false, false) {}
- };
-
- struct Diff19 : public TwoRangeBase {
- Diff19() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gte:0,$lte:1}}", 1, 5, false, true) {}
- };
-
- struct Diff20 : public TwoRangeBase {
- Diff20() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:{$gte:0,$lte:1}}", 1, 5, false, true) {}
- };
-
- struct Diff21 : public TwoRangeBase {
- Diff21() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gte:0,$lt:1}}", 1, 5, true, true) {}
- };
-
- struct Diff22 : public TwoRangeBase {
- Diff22() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:{$gte:0,$lt:1}}", 1, 5, false, true) {}
- };
-
- struct Diff23 : public TwoRangeBase {
- Diff23() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:{$gte:0,$lt:0.5}}", 1, 5, false, true) {}
- };
-
- struct Diff24 : public TwoRangeBase {
- Diff24() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:0}", 1, 5, false, true) {}
- };
-
- struct Diff25 : public TwoRangeBase {
- Diff25() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:0}", 1, 5, true, true) {}
- };
-
- struct Diff26 : public TwoRangeBase {
- Diff26() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:1}", 1, 5, false, true) {}
- };
-
- struct Diff27 : public TwoRangeBase {
- Diff27() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:1}", 1, 5, false, true) {}
- };
-
- struct Diff28 : public SplitRangeBase {
- Diff28() : SplitRangeBase( "{a:{$gte:1,$lte:5},b:3}", 1, true, 3, false, 3, false, 5, true) {}
- };
-
- struct Diff29 : public TwoRangeBase {
- Diff29() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:5}", 1, 5, true, false) {}
- };
-
- struct Diff30 : public TwoRangeBase {
- Diff30() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:5}", 1, 5, true, false) {}
- };
-
- struct Diff31 : public TwoRangeBase {
- Diff31() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:6}", 1, 5, true, false) {}
- };
-
- struct Diff32 : public TwoRangeBase {
- Diff32() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:6}", 1, 5, true, true) {}
- };
-
- class EmptyBase : public DiffBase {
- public:
- EmptyBase( string obj )
- : _obj( obj ) {}
- private:
- virtual unsigned len() const { return 0; }
- virtual const int *nums() const { return 0; }
- virtual const bool *incs() const { return 0; }
- virtual BSONObj obj() const { return fromjson( _obj ); }
- string _obj;
- };
-
- struct Diff33 : public EmptyBase {
- Diff33() : EmptyBase( "{a:{$gte:1,$lte:5},b:{$gt:0,$lt:6}}" ) {}
- };
-
- struct Diff34 : public EmptyBase {
- Diff34() : EmptyBase( "{a:{$gte:1,$lte:5},b:{$gte:1,$lt:6}}" ) {}
- };
-
- struct Diff35 : public EmptyBase {
- Diff35() : EmptyBase( "{a:{$gt:1,$lte:5},b:{$gte:1,$lt:6}}" ) {}
- };
-
- struct Diff36 : public EmptyBase {
- Diff36() : EmptyBase( "{a:{$gt:1,$lte:5},b:{$gt:1,$lt:6}}" ) {}
- };
-
- struct Diff37 : public TwoRangeBase {
- Diff37() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gt:1,$lt:6}}", 1, 1, true, true ) {}
- };
-
- struct Diff38 : public EmptyBase {
- Diff38() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gt:0,$lt:5}}" ) {}
- };
-
- struct Diff39 : public EmptyBase {
- Diff39() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gt:0,$lte:5}}" ) {}
- };
-
- struct Diff40 : public EmptyBase {
- Diff40() : EmptyBase( "{a:{$gt:1,$lte:5},b:{$gt:0,$lte:5}}" ) {}
- };
-
- struct Diff41 : public TwoRangeBase {
- Diff41() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gt:0,$lt:5}}", 5, 5, true, true ) {}
- };
-
- struct Diff42 : public EmptyBase {
- Diff42() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gt:1,$lt:5}}" ) {}
- };
-
- struct Diff43 : public EmptyBase {
- Diff43() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gt:1,$lte:5}}" ) {}
- };
-
- struct Diff44 : public EmptyBase {
- Diff44() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gte:1,$lt:5}}" ) {}
- };
-
- struct Diff45 : public EmptyBase {
- Diff45() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gte:1,$lte:5}}" ) {}
- };
-
- struct Diff46 : public TwoRangeBase {
- Diff46() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:{$gt:1,$lt:5}}", 5, 5, true, true ) {}
- };
-
- struct Diff47 : public EmptyBase {
- Diff47() : EmptyBase( "{a:{$gt:1,$lte:5},b:{$gt:1,$lte:5}}" ) {}
- };
-
- struct Diff48 : public TwoRangeBase {
- Diff48() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:{$gte:1,$lt:5}}", 5, 5, true, true ) {}
- };
-
- struct Diff49 : public EmptyBase {
- Diff49() : EmptyBase( "{a:{$gt:1,$lte:5},b:{$gte:1,$lte:5}}" ) {}
- };
-
- struct Diff50 : public TwoRangeBase {
- Diff50() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:{$gt:1,$lt:5}}", 1, 1, true, true ) {}
- };
-
- struct Diff51 : public TwoRangeBase {
- Diff51() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:{$gt:1,$lte:5}}", 1, 1, true, true ) {}
- };
-
- struct Diff52 : public EmptyBase {
- Diff52() : EmptyBase( "{a:{$gte:1,$lt:5},b:{$gte:1,$lt:5}}" ) {}
- };
-
- struct Diff53 : public EmptyBase {
- Diff53() : EmptyBase( "{a:{$gte:1,$lt:5},b:{$gte:1,$lte:5}}" ) {}
- };
-
- struct Diff54 : public SplitRangeBase {
- Diff54() : SplitRangeBase( "{a:{$gte:1,$lte:5},b:{$gt:1,$lt:5}}", 1, true, 1, true, 5, true, 5, true ) {}
- };
-
- struct Diff55 : public TwoRangeBase {
- Diff55() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gt:1,$lte:5}}", 1, 1, true, true ) {}
- };
-
- struct Diff56 : public TwoRangeBase {
- Diff56() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gte:1,$lt:5}}", 5, 5, true, true ) {}
- };
-
- struct Diff57 : public EmptyBase {
- Diff57() : EmptyBase( "{a:{$gte:1,$lte:5},b:{$gte:1,$lte:5}}" ) {}
- };
-
- struct Diff58 : public TwoRangeBase {
- Diff58() : TwoRangeBase( "{a:1,b:{$gt:1,$lt:5}}", 1, 1, true, true ) {}
- };
-
- struct Diff59 : public EmptyBase {
- Diff59() : EmptyBase( "{a:1,b:{$gte:1,$lt:5}}" ) {}
- };
-
- struct Diff60 : public EmptyBase {
- Diff60() : EmptyBase( "{a:2,b:{$gte:1,$lt:5}}" ) {}
- };
-
- struct Diff61 : public EmptyBase {
- Diff61() : EmptyBase( "{a:5,b:{$gte:1,$lte:5}}" ) {}
- };
-
- struct Diff62 : public TwoRangeBase {
- Diff62() : TwoRangeBase( "{a:5,b:{$gt:1,$lt:5}}", 5, 5, true, true ) {}
- };
-
- struct Diff63 : public EmptyBase {
- Diff63() : EmptyBase( "{a:5,b:5}" ) {}
- };
-
- struct Diff64 : public TwoRangeBase {
- Diff64() : TwoRangeBase( "{a:{$gte:1,$lte:2},b:{$gt:0,$lte:1}}", 1, 2, false, true ) {}
- };
-
- class DiffMulti1 : public DiffBase {
- public:
- void run() {
- FieldRangeSet frs( "", fromjson( "{a:{$gt:1,$lt:9},b:{$gt:0,$lt:2},c:3,d:{$gt:4,$lt:5},e:{$gt:7,$lt:10}}" ) );
- FieldRange ret = frs.range( "a" );
- FieldRange other = frs.range( "b" );
- other |= frs.range( "c" );
- other |= frs.range( "d" );
- other |= frs.range( "e" );
- ret -= other;
- check( ret );
- }
- protected:
- virtual unsigned len() const { return 3; }
- virtual const int *nums() const { static int n[] = { 2, 3, 3, 4, 5, 7 }; return n; }
- virtual const bool *incs() const { static bool b[] = { true, false, false, true, true, true }; return b; }
- virtual BSONObj obj() const { return BSONObj(); }
- };
-
- class DiffMulti2 : public DiffBase {
- public:
- void run() {
- FieldRangeSet frs( "", fromjson( "{a:{$gt:1,$lt:9},b:{$gt:0,$lt:2},c:3,d:{$gt:4,$lt:5},e:{$gt:7,$lt:10}}" ) );
- FieldRange mask = frs.range( "a" );
- FieldRange ret = frs.range( "b" );
- ret |= frs.range( "c" );
- ret |= frs.range( "d" );
- ret |= frs.range( "e" );
- ret -= mask;
- check( ret );
- }
- protected:
- virtual unsigned len() const { return 2; }
- virtual const int *nums() const { static int n[] = { 0, 1, 9, 10 }; return n; }
- virtual const bool *incs() const { static bool b[] = { false, true, true, false }; return b; }
- virtual BSONObj obj() const { return BSONObj(); }
- };
-
- class SetIntersect {
- public:
- void run() {
- FieldRangeSet frs1( "", fromjson( "{b:{$in:[5,6]},c:7,d:{$in:[8,9]}}" ) );
- FieldRangeSet frs2( "", fromjson( "{a:1,b:5,c:{$in:[7,8]},d:{$in:[8,9]},e:10}" ) );
- frs1 &= frs2;
- ASSERT_EQUALS( fromjson( "{a:1,b:5,c:7,d:{$gte:8,$lte:9},e:10}" ), frs1.simplifiedQuery( BSONObj() ) );
- }
- };
+ void dropCollection( const char *ns ) {
+ string errmsg;
+ BSONObjBuilder result;
+ dropCollection( ns, errmsg, result );
+ }
+
+ namespace QueryPlanTests {
- } // namespace FieldRangeTests
+ using boost::shared_ptr;
- namespace QueryPlanTests {
class Base {
public:
Base() : _ctx( ns() ) , indexNum_( 0 ) {
@@ -743,8 +62,7 @@ namespace QueryOptimizerTests {
~Base() {
if ( !nsd() )
return;
- string s( ns() );
- dropNS( s );
+ dropCollection( ns() );
}
protected:
static const char *ns() { return "unittests.QueryPlanTests"; }
@@ -783,15 +101,15 @@ namespace QueryOptimizerTests {
// There's a limit of 10 indexes total, make sure not to exceed this in a given test.
#define INDEXNO(x) nsd()->idxNo( *this->index( BSON(x) ) )
#define INDEX(x) this->index( BSON(x) )
- auto_ptr< FieldRangeSet > FieldRangeSet_GLOBAL;
-#define FBS(x) ( FieldRangeSet_GLOBAL.reset( new FieldRangeSet( ns(), x ) ), *FieldRangeSet_GLOBAL )
- auto_ptr< FieldRangeSet > FieldRangeSet_GLOBAL2;
-#define FBS2(x) ( FieldRangeSet_GLOBAL2.reset( new FieldRangeSet( ns(), x ) ), *FieldRangeSet_GLOBAL2 )
+ auto_ptr< FieldRangeSetPair > FieldRangeSetPair_GLOBAL;
+#define FRSP(x) ( FieldRangeSetPair_GLOBAL.reset( new FieldRangeSetPair( ns(), x ) ), *FieldRangeSetPair_GLOBAL )
+ auto_ptr< FieldRangeSetPair > FieldRangeSetPair_GLOBAL2;
+#define FRSP2(x) ( FieldRangeSetPair_GLOBAL2.reset( new FieldRangeSetPair( ns(), x ) ), FieldRangeSetPair_GLOBAL2.get() )
class NoIndex : public Base {
public:
void run() {
- QueryPlan p( nsd(), -1, FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSONObj() );
+ QueryPlan p( nsd(), -1, FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSONObj() );
ASSERT( !p.optimal() );
ASSERT( !p.scanAndOrderRequired() );
ASSERT( !p.exactKeyMatch() );
@@ -808,13 +126,13 @@ namespace QueryOptimizerTests {
b2.appendMaxKey( "" );
BSONObj end = b2.obj();
- QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( !p.scanAndOrderRequired() );
ASSERT( !startKey( p ).woCompare( start ) );
ASSERT( !endKey( p ).woCompare( end ) );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << 1 ) );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << 1 ) );
ASSERT( !p2.scanAndOrderRequired() );
- QueryPlan p3( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "b" << 1 ) );
+ QueryPlan p3( nsd(), INDEXNO( "a" << 1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "b" << 1 ) );
ASSERT( p3.scanAndOrderRequired() );
ASSERT( !startKey( p3 ).woCompare( start ) );
ASSERT( !endKey( p3 ).woCompare( end ) );
@@ -824,7 +142,7 @@ namespace QueryOptimizerTests {
class MoreIndexThanNeeded : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( !p.scanAndOrderRequired() );
}
};
@@ -832,13 +150,13 @@ namespace QueryOptimizerTests {
class IndexSigns : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << -1 ) , FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << -1 ) , FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
ASSERT( !p.scanAndOrderRequired() );
ASSERT_EQUALS( 1, p.direction() );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
ASSERT( p2.scanAndOrderRequired() );
ASSERT_EQUALS( 0, p2.direction() );
- QueryPlan p3( nsd(), indexno( id_obj ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "_id" << 1 ) );
+ QueryPlan p3( nsd(), indexno( id_obj ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "_id" << 1 ) );
ASSERT( !p3.scanAndOrderRequired() );
ASSERT_EQUALS( 1, p3.direction() );
}
@@ -855,15 +173,15 @@ namespace QueryOptimizerTests {
b2.appendMaxKey( "" );
b2.appendMinKey( "" );
BSONObj end = b2.obj();
- QueryPlan p( nsd(), INDEXNO( "a" << -1 << "b" << 1 ),FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
+ QueryPlan p( nsd(), INDEXNO( "a" << -1 << "b" << 1 ),FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << 1 << "b" << -1 ) );
ASSERT( !p.scanAndOrderRequired() );
ASSERT_EQUALS( -1, p.direction() );
ASSERT( !startKey( p ).woCompare( start ) );
ASSERT( !endKey( p ).woCompare( end ) );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << -1 << "b" << -1 ) );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << -1 << "b" << -1 ) );
ASSERT( !p2.scanAndOrderRequired() );
ASSERT_EQUALS( -1, p2.direction() );
- QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << -1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << -1 << "b" << -1 ) );
+ QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << -1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << -1 << "b" << -1 ) );
ASSERT( p3.scanAndOrderRequired() );
ASSERT_EQUALS( 0, p3.direction() );
}
@@ -880,11 +198,11 @@ namespace QueryOptimizerTests {
b2.append( "", 3 );
b2.appendMaxKey( "" );
BSONObj end = b2.obj();
- QueryPlan p( nsd(), INDEXNO( "a" << -1 << "b" << 1 ), FBS( BSON( "a" << 3 ) ), FBS2( BSON( "a" << 3 ) ), BSON( "a" << 3 ), BSONObj() );
+ QueryPlan p( nsd(), INDEXNO( "a" << -1 << "b" << 1 ), FRSP( BSON( "a" << 3 ) ), FRSP2( BSON( "a" << 3 ) ), BSON( "a" << 3 ), BSONObj() );
ASSERT( !p.scanAndOrderRequired() );
ASSERT( !startKey( p ).woCompare( start ) );
ASSERT( !endKey( p ).woCompare( end ) );
- QueryPlan p2( nsd(), INDEXNO( "a" << -1 << "b" << 1 ), FBS( BSON( "a" << 3 ) ), FBS2( BSON( "a" << 3 ) ), BSON( "a" << 3 ), BSONObj() );
+ QueryPlan p2( nsd(), INDEXNO( "a" << -1 << "b" << 1 ), FRSP( BSON( "a" << 3 ) ), FRSP2( BSON( "a" << 3 ) ), BSON( "a" << 3 ), BSONObj() );
ASSERT( !p2.scanAndOrderRequired() );
ASSERT( !startKey( p ).woCompare( start ) );
ASSERT( !endKey( p ).woCompare( end ) );
@@ -894,11 +212,11 @@ namespace QueryOptimizerTests {
class EqualWithOrder : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 4 ) ), FBS2( BSON( "a" << 4 ) ), BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "a" << 4 ) ), FRSP2( BSON( "a" << 4 ) ), BSON( "a" << 4 ), BSON( "b" << 1 ) );
ASSERT( !p.scanAndOrderRequired() );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "b" << 4 ) ), FBS2( BSON( "b" << 4 ) ), BSON( "b" << 4 ), BSON( "a" << 1 << "c" << 1 ) );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FRSP( BSON( "b" << 4 ) ), FRSP2( BSON( "b" << 4 ) ), BSON( "b" << 4 ), BSON( "a" << 1 << "c" << 1 ) );
ASSERT( !p2.scanAndOrderRequired() );
- QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 4 ) ), FBS2( BSON( "b" << 4 ) ), BSON( "b" << 4 ), BSON( "a" << 1 << "c" << 1 ) );
+ QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "b" << 4 ) ), FRSP2( BSON( "b" << 4 ) ), BSON( "b" << 4 ), BSON( "a" << 1 << "c" << 1 ) );
ASSERT( p3.scanAndOrderRequired() );
}
};
@@ -906,23 +224,23 @@ namespace QueryOptimizerTests {
class Optimal : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( p.optimal() );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( p2.optimal() );
- QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 ) ), FBS2( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p3( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "a" << 1 ) ), FRSP2( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSON( "a" << 1 ) );
ASSERT( p3.optimal() );
- QueryPlan p4( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 ) ), FBS2( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p4( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "b" << 1 ) ), FRSP2( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSON( "a" << 1 ) );
ASSERT( !p4.optimal() );
- QueryPlan p5( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 ) ), FBS2( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSON( "b" << 1 ) );
+ QueryPlan p5( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "a" << 1 ) ), FRSP2( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSON( "b" << 1 ) );
ASSERT( p5.optimal() );
- QueryPlan p6( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 ) ), FBS2( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSON( "b" << 1 ) );
+ QueryPlan p6( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "b" << 1 ) ), FRSP2( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSON( "b" << 1 ) );
ASSERT( !p6.optimal() );
- QueryPlan p7( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 << "b" << 1 ) ), FBS2( BSON( "a" << 1 << "b" << 1 ) ), BSON( "a" << 1 << "b" << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p7( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "a" << 1 << "b" << 1 ) ), FRSP2( BSON( "a" << 1 << "b" << 1 ) ), BSON( "a" << 1 << "b" << 1 ), BSON( "a" << 1 ) );
ASSERT( p7.optimal() );
- QueryPlan p8( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << 1 << "b" << LT << 1 ) ), FBS2( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p8( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "a" << 1 << "b" << LT << 1 ) ), FRSP2( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSON( "a" << 1 ) );
ASSERT( p8.optimal() );
- QueryPlan p9( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << 1 << "b" << LT << 1 ) ), FBS2( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p9( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FRSP( BSON( "a" << 1 << "b" << LT << 1 ) ), FRSP2( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSON( "a" << 1 ) );
ASSERT( p9.optimal() );
}
};
@@ -930,13 +248,13 @@ namespace QueryOptimizerTests {
class MoreOptimal : public Base {
public:
void run() {
- QueryPlan p10( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << 1 ) ), FBS2( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSONObj() );
+ QueryPlan p10( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FRSP( BSON( "a" << 1 ) ), FRSP2( BSON( "a" << 1 ) ), BSON( "a" << 1 ), BSONObj() );
ASSERT( p10.optimal() );
- QueryPlan p11( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << 1 << "b" << LT << 1 ) ), FBS2( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSONObj() );
+ QueryPlan p11( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FRSP( BSON( "a" << 1 << "b" << LT << 1 ) ), FRSP2( BSON( "a" << 1 << "b" << LT << 1 ) ), BSON( "a" << 1 << "b" << LT << 1 ), BSONObj() );
ASSERT( p11.optimal() );
- QueryPlan p12( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << LT << 1 ) ), FBS2( BSON( "a" << LT << 1 ) ), BSON( "a" << LT << 1 ), BSONObj() );
+ QueryPlan p12( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FRSP( BSON( "a" << LT << 1 ) ), FRSP2( BSON( "a" << LT << 1 ) ), BSON( "a" << LT << 1 ), BSONObj() );
ASSERT( p12.optimal() );
- QueryPlan p13( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FBS( BSON( "a" << LT << 1 ) ), FBS2( BSON( "a" << LT << 1 ) ), BSON( "a" << LT << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p13( nsd(), INDEXNO( "a" << 1 << "b" << 1 << "c" << 1 ), FRSP( BSON( "a" << LT << 1 ) ), FRSP2( BSON( "a" << LT << 1 ) ), BSON( "a" << LT << 1 ), BSON( "a" << 1 ) );
ASSERT( p13.optimal() );
}
};
@@ -944,23 +262,23 @@ namespace QueryOptimizerTests {
class KeyMatch : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( !p.exactKeyMatch() );
- QueryPlan p2( nsd(), INDEXNO( "b" << 1 << "a" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p2( nsd(), INDEXNO( "b" << 1 << "a" << 1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( !p2.exactKeyMatch() );
- QueryPlan p3( nsd(), INDEXNO( "b" << 1 << "a" << 1 ), FBS( BSON( "b" << "z" ) ), FBS2( BSON( "b" << "z" ) ), BSON( "b" << "z" ), BSON( "a" << 1 ) );
+ QueryPlan p3( nsd(), INDEXNO( "b" << 1 << "a" << 1 ), FRSP( BSON( "b" << "z" ) ), FRSP2( BSON( "b" << "z" ) ), BSON( "b" << "z" ), BSON( "a" << 1 ) );
ASSERT( !p3.exactKeyMatch() );
- QueryPlan p4( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FBS( BSON( "c" << "y" << "b" << "z" ) ), FBS2( BSON( "c" << "y" << "b" << "z" ) ), BSON( "c" << "y" << "b" << "z" ), BSON( "a" << 1 ) );
+ QueryPlan p4( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FRSP( BSON( "c" << "y" << "b" << "z" ) ), FRSP2( BSON( "c" << "y" << "b" << "z" ) ), BSON( "c" << "y" << "b" << "z" ), BSON( "a" << 1 ) );
ASSERT( !p4.exactKeyMatch() );
- QueryPlan p5( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FBS( BSON( "c" << "y" << "b" << "z" ) ), FBS2( BSON( "c" << "y" << "b" << "z" ) ), BSON( "c" << "y" << "b" << "z" ), BSONObj() );
+ QueryPlan p5( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FRSP( BSON( "c" << "y" << "b" << "z" ) ), FRSP2( BSON( "c" << "y" << "b" << "z" ) ), BSON( "c" << "y" << "b" << "z" ), BSONObj() );
ASSERT( !p5.exactKeyMatch() );
- QueryPlan p6( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FBS( BSON( "c" << LT << "y" << "b" << GT << "z" ) ), FBS2( BSON( "c" << LT << "y" << "b" << GT << "z" ) ), BSON( "c" << LT << "y" << "b" << GT << "z" ), BSONObj() );
+ QueryPlan p6( nsd(), INDEXNO( "b" << 1 << "a" << 1 << "c" << 1 ), FRSP( BSON( "c" << LT << "y" << "b" << GT << "z" ) ), FRSP2( BSON( "c" << LT << "y" << "b" << GT << "z" ) ), BSON( "c" << LT << "y" << "b" << GT << "z" ), BSONObj() );
ASSERT( !p6.exactKeyMatch() );
- QueryPlan p7( nsd(), INDEXNO( "b" << 1 ), FBS( BSONObj() ), FBS2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
+ QueryPlan p7( nsd(), INDEXNO( "b" << 1 ), FRSP( BSONObj() ), FRSP2( BSONObj() ), BSONObj(), BSON( "a" << 1 ) );
ASSERT( !p7.exactKeyMatch() );
- QueryPlan p8( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << "y" << "a" << "z" ) ), FBS2( BSON( "b" << "y" << "a" << "z" ) ), BSON( "b" << "y" << "a" << "z" ), BSONObj() );
+ QueryPlan p8( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "b" << "y" << "a" << "z" ) ), FRSP2( BSON( "b" << "y" << "a" << "z" ) ), BSON( "b" << "y" << "a" << "z" ), BSONObj() );
ASSERT( p8.exactKeyMatch() );
- QueryPlan p9( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << "z" ) ), FBS2( BSON( "a" << "z" ) ), BSON( "a" << "z" ), BSON( "a" << 1 ) );
+ QueryPlan p9( nsd(), INDEXNO( "a" << 1 ), FRSP( BSON( "a" << "z" ) ), FRSP2( BSON( "a" << "z" ) ), BSON( "a" << "z" ), BSON( "a" << 1 ) );
ASSERT( p9.exactKeyMatch() );
}
};
@@ -968,7 +286,7 @@ namespace QueryOptimizerTests {
class MoreKeyMatch : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << "r" << "b" << NE << "q" ) ), FBS2( BSON( "a" << "r" << "b" << NE << "q" ) ), BSON( "a" << "r" << "b" << NE << "q" ), BSON( "a" << 1 ) );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FRSP( BSON( "a" << "r" << "b" << NE << "q" ) ), FRSP2( BSON( "a" << "r" << "b" << NE << "q" ) ), BSON( "a" << "r" << "b" << NE << "q" ), BSON( "a" << 1 ) );
ASSERT( !p.exactKeyMatch() );
}
};
@@ -976,18 +294,18 @@ namespace QueryOptimizerTests {
class ExactKeyQueryTypes : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << "b" ) ), FBS2( BSON( "a" << "b" ) ), BSON( "a" << "b" ), BSONObj() );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 ), FRSP( BSON( "a" << "b" ) ), FRSP2( BSON( "a" << "b" ) ), BSON( "a" << "b" ), BSONObj() );
ASSERT( p.exactKeyMatch() );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << 4 ) ), FBS2( BSON( "a" << 4 ) ), BSON( "a" << 4 ), BSONObj() );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 ), FRSP( BSON( "a" << 4 ) ), FRSP2( BSON( "a" << 4 ) ), BSON( "a" << 4 ), BSONObj() );
ASSERT( !p2.exactKeyMatch() );
- QueryPlan p3( nsd(), INDEXNO( "a" << 1 ), FBS( BSON( "a" << BSON( "c" << "d" ) ) ), FBS2( BSON( "a" << BSON( "c" << "d" ) ) ), BSON( "a" << BSON( "c" << "d" ) ), BSONObj() );
+ QueryPlan p3( nsd(), INDEXNO( "a" << 1 ), FRSP( BSON( "a" << BSON( "c" << "d" ) ) ), FRSP2( BSON( "a" << BSON( "c" << "d" ) ) ), BSON( "a" << BSON( "c" << "d" ) ), BSONObj() );
ASSERT( !p3.exactKeyMatch() );
BSONObjBuilder b;
b.appendRegex( "a", "^ddd" );
BSONObj q = b.obj();
- QueryPlan p4( nsd(), INDEXNO( "a" << 1 ), FBS( q ), FBS2( q ), q, BSONObj() );
+ QueryPlan p4( nsd(), INDEXNO( "a" << 1 ), FRSP( q ), FRSP2( q ), q, BSONObj() );
ASSERT( !p4.exactKeyMatch() );
- QueryPlan p5( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "a" << "z" << "b" << 4 ) ), FBS2( BSON( "a" << "z" << "b" << 4 ) ), BSON( "a" << "z" << "b" << 4 ), BSONObj() );
+ QueryPlan p5( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "a" << "z" << "b" << 4 ) ), FRSP2( BSON( "a" << "z" << "b" << 4 ) ), BSON( "a" << "z" << "b" << 4 ), BSONObj() );
ASSERT( !p5.exactKeyMatch() );
}
};
@@ -995,17 +313,17 @@ namespace QueryOptimizerTests {
class Unhelpful : public Base {
public:
void run() {
- QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 ) ), FBS2( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSONObj() );
+ QueryPlan p( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "b" << 1 ) ), FRSP2( BSON( "b" << 1 ) ), BSON( "b" << 1 ), BSONObj() );
ASSERT( !p.range( "a" ).nontrivial() );
ASSERT( p.unhelpful() );
- QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FBS( BSON( "b" << 1 << "c" << 1 ) ), FBS2( BSON( "b" << 1 << "c" << 1 ) ), BSON( "b" << 1 << "c" << 1 ), BSON( "a" << 1 ) );
+ QueryPlan p2( nsd(), INDEXNO( "a" << 1 << "b" << 1 ), FRSP( BSON( "b" << 1 << "c" << 1 ) ), FRSP2( BSON( "b" << 1 << "c" << 1 ) ), BSON( "b" << 1 << "c" << 1 ), BSON( "a" << 1 ) );
ASSERT( !p2.scanAndOrderRequired() );
ASSERT( !p2.range( "a" ).nontrivial() );
ASSERT( !p2.unhelpful() );
- QueryPlan p3( nsd(), INDEXNO( "b" << 1 ), FBS( BSON( "b" << 1 << "c" << 1 ) ), FBS2( BSON( "b" << 1 << "c" << 1 ) ), BSON( "b" << 1 << "c" << 1 ), BSONObj() );
+ QueryPlan p3( nsd(), INDEXNO( "b" << 1 ), FRSP( BSON( "b" << 1 << "c" << 1 ) ), FRSP2( BSON( "b" << 1 << "c" << 1 ) ), BSON( "b" << 1 << "c" << 1 ), BSONObj() );
ASSERT( p3.range( "b" ).nontrivial() );
ASSERT( !p3.unhelpful() );
- QueryPlan p4( nsd(), INDEXNO( "b" << 1 << "c" << 1 ), FBS( BSON( "c" << 1 << "d" << 1 ) ), FBS2( BSON( "c" << 1 << "d" << 1 ) ), BSON( "c" << 1 << "d" << 1 ), BSONObj() );
+ QueryPlan p4( nsd(), INDEXNO( "b" << 1 << "c" << 1 ), FRSP( BSON( "c" << 1 << "d" << 1 ) ), FRSP2( BSON( "c" << 1 << "d" << 1 ) ), BSON( "c" << 1 << "d" << 1 ), BSONObj() );
ASSERT( !p4.range( "b" ).nontrivial() );
ASSERT( p4.unhelpful() );
}
@@ -1023,9 +341,8 @@ namespace QueryOptimizerTests {
virtual ~Base() {
if ( !nsd() )
return;
- NamespaceDetailsTransient::_get( ns() ).clearQueryCache();
- string s( ns() );
- dropNS( s );
+ NamespaceDetailsTransient::get_inlock( ns() ).clearQueryCache();
+ dropCollection( ns() );
}
static void assembleRequest( const string &ns, BSONObj query, int nToReturn, int nToSkip, BSONObj *fieldsToReturn, int queryOptions, Message &toSend ) {
// see query.h for the protocol we are using here.
@@ -1051,9 +368,9 @@ namespace QueryOptimizerTests {
class NoIndexes : public Base {
public:
void run() {
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 4 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
ASSERT_EQUALS( 1, s.nPlans() );
}
};
@@ -1063,9 +380,9 @@ namespace QueryOptimizerTests {
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "b_2" );
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSONObj() );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 4 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 4 ), BSONObj() );
ASSERT_EQUALS( 1, s.nPlans() );
}
};
@@ -1075,9 +392,9 @@ namespace QueryOptimizerTests {
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 4 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
ASSERT_EQUALS( 3, s.nPlans() );
}
};
@@ -1087,9 +404,9 @@ namespace QueryOptimizerTests {
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSONObj() ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSONObj(), BSONObj() );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSONObj() ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSONObj(), BSONObj() );
ASSERT_EQUALS( 1, s.nPlans() );
}
};
@@ -1101,9 +418,9 @@ namespace QueryOptimizerTests {
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
BSONObj b = BSON( "hint" << BSON( "a" << 1 ) );
BSONElement e = b.firstElement();
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), &e );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 1 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), true, &e );
ASSERT_EQUALS( 1, s.nPlans() );
}
};
@@ -1115,9 +432,9 @@ namespace QueryOptimizerTests {
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
BSONObj b = BSON( "hint" << "a_1" );
BSONElement e = b.firstElement();
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), &e );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 1 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), true, &e );
ASSERT_EQUALS( 1, s.nPlans() );
}
};
@@ -1129,9 +446,9 @@ namespace QueryOptimizerTests {
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
BSONObj b = BSON( "hint" << BSON( "$natural" << 1 ) );
BSONElement e = b.firstElement();
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), &e );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 1 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), true, &e );
ASSERT_EQUALS( 1, s.nPlans() );
}
};
@@ -1141,9 +458,9 @@ namespace QueryOptimizerTests {
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "b_2" );
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 ), BSON( "$natural" << 1 ) );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 1 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 1 ), BSON( "$natural" << 1 ) );
ASSERT_EQUALS( 1, s.nPlans() );
}
};
@@ -1153,9 +470,9 @@ namespace QueryOptimizerTests {
void run() {
BSONObj b = BSON( "hint" << "a_1" );
BSONElement e = b.firstElement();
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- ASSERT_EXCEPTION( QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), &e ),
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 1 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ ASSERT_EXCEPTION( QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 1 ), BSON( "b" << 1 ), true, &e ),
AssertionException );
}
};
@@ -1208,9 +525,9 @@ namespace QueryOptimizerTests {
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 1 << "c" << 2 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 1 << "c" << 2 ), BSONObj() );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 1 << "c" << 2 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 1 << "c" << 2 ), BSONObj() );
ASSERT_EQUALS( 2, s.nPlans() );
}
};
@@ -1220,9 +537,9 @@ namespace QueryOptimizerTests {
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 4 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
ASSERT_EQUALS( 3, s.nPlans() );
bool threw = false;
auto_ptr< TestOp > t( new TestOp( true, threw ) );
@@ -1264,9 +581,9 @@ namespace QueryOptimizerTests {
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 4 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
ASSERT_EQUALS( 3, s.nPlans() );
auto_ptr< TestOp > t( new TestOp() );
boost::shared_ptr< TestOp > done = s.runOp( *t );
@@ -1294,62 +611,71 @@ namespace QueryOptimizerTests {
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
Helpers::ensureIndex( ns(), BSON( "b" << 1 ), false, "b_1" );
+ // No best plan - all must be tried.
nPlans( 3 );
runQuery();
+ // Best plan selected by query.
nPlans( 1 );
nPlans( 1 );
Helpers::ensureIndex( ns(), BSON( "c" << 1 ), false, "c_1" );
+ // Best plan cleared when new index added.
nPlans( 3 );
runQuery();
+ // Best plan selected by query.
nPlans( 1 );
{
DBDirectClient client;
- for( int i = 0; i < 34; ++i ) {
+ for( int i = 0; i < 334; ++i ) {
client.insert( ns(), BSON( "i" << i ) );
client.update( ns(), QUERY( "i" << i ), BSON( "i" << i + 1 ) );
client.remove( ns(), BSON( "i" << i + 1 ) );
}
}
+ // Best plan cleared by ~1000 writes.
nPlans( 3 );
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 4 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
NoRecordTestOp original;
s.runOp( original );
+ // NoRecordTestOp doesn't record a best plan (test cases where mayRecordPlan() is false).
nPlans( 3 );
BSONObj hint = fromjson( "{hint:{$natural:1}}" );
BSONElement hintElt = hint.firstElement();
- auto_ptr< FieldRangeSet > frs2( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig2( new FieldRangeSet( *frs2 ) );
- QueryPlanSet s2( ns(), frs2, frsOrig2, BSON( "a" << 4 ), BSON( "b" << 1 ), &hintElt );
+ auto_ptr< FieldRangeSetPair > frsp2( new FieldRangeSetPair( ns(), BSON( "a" << 4 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig2( new FieldRangeSetPair( *frsp2 ) );
+ QueryPlanSet s2( ns(), frsp2, frspOrig2, BSON( "a" << 4 ), BSON( "b" << 1 ), true, &hintElt );
TestOp newOriginal;
s2.runOp( newOriginal );
+ // No plan recorded when a hint is used.
nPlans( 3 );
- auto_ptr< FieldRangeSet > frs3( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig3( new FieldRangeSet( *frs3 ) );
- QueryPlanSet s3( ns(), frs3, frsOrig3, BSON( "a" << 4 ), BSON( "b" << 1 << "c" << 1 ) );
+ auto_ptr< FieldRangeSetPair > frsp3( new FieldRangeSetPair( ns(), BSON( "a" << 4 ), true ) );
+ auto_ptr< FieldRangeSetPair > frspOrig3( new FieldRangeSetPair( *frsp3 ) );
+ QueryPlanSet s3( ns(), frsp3, frspOrig3, BSON( "a" << 4 ), BSON( "b" << 1 << "c" << 1 ) );
TestOp newerOriginal;
s3.runOp( newerOriginal );
+ // Plan recorded was for a different query pattern (different sort spec).
nPlans( 3 );
+ // Best plan still selected by query after all these other tests.
runQuery();
nPlans( 1 );
}
private:
void nPlans( int n ) {
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 4 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
ASSERT_EQUALS( n, s.nPlans() );
}
void runQuery() {
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 4 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
TestOp original;
s.runOp( original );
}
@@ -1376,17 +702,18 @@ namespace QueryOptimizerTests {
void run() {
Helpers::ensureIndex( ns(), BSON( "a" << 1 ), false, "a_1" );
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), BSON( "a" << 4 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, BSON( "a" << 4 ), BSON( "b" << 1 ) );
ScanOnlyTestOp op;
s.runOp( op );
- ASSERT( fromjson( "{$natural:1}" ).woCompare( NamespaceDetailsTransient::_get( ns() ).indexForPattern( s.fbs().pattern( BSON( "b" << 1 ) ) ) ) == 0 );
- ASSERT_EQUALS( 1, NamespaceDetailsTransient::_get( ns() ).nScannedForPattern( s.fbs().pattern( BSON( "b" << 1 ) ) ) );
+ pair< BSONObj, long long > best = QueryUtilIndexed::bestIndexForPatterns( s.frsp(), BSON( "b" << 1 ) );
+ ASSERT( fromjson( "{$natural:1}" ).woCompare( best.first ) == 0 );
+ ASSERT_EQUALS( 1, best.second );
- auto_ptr< FieldRangeSet > frs2( new FieldRangeSet( ns(), BSON( "a" << 4 ) ) );
- auto_ptr< FieldRangeSet > frsOrig2( new FieldRangeSet( *frs2 ) );
- QueryPlanSet s2( ns(), frs2, frsOrig2, BSON( "a" << 4 ), BSON( "b" << 1 ) );
+ auto_ptr< FieldRangeSetPair > frsp2( new FieldRangeSetPair( ns(), BSON( "a" << 4 ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig2( new FieldRangeSetPair( *frsp2 ) );
+ QueryPlanSet s2( ns(), frsp2, frspOrig2, BSON( "a" << 4 ), BSON( "b" << 1 ) );
TestOp op2;
ASSERT( s2.runOp( op2 )->complete() );
}
@@ -1396,7 +723,7 @@ namespace QueryOptimizerTests {
TestOp() {}
virtual void _init() {}
virtual void next() {
- if ( qp().indexKey().firstElement().fieldName() == string( "$natural" ) )
+ if ( qp().indexKey().firstElementFieldName() == string( "$natural" ) )
massert( 10410 , "throw", false );
setComplete();
}
@@ -1442,8 +769,8 @@ namespace QueryOptimizerTests {
BSONObj one = BSON( "a" << 1 );
theDataFileMgr.insertWithObjMod( ns(), one );
deleteObjects( ns(), BSON( "a" << 1 ), false );
- ASSERT( BSON( "a" << 1 ).woCompare( NamespaceDetailsTransient::_get( ns() ).indexForPattern( FieldRangeSet( ns(), BSON( "a" << 1 ) ).pattern() ) ) == 0 );
- ASSERT_EQUALS( 1, NamespaceDetailsTransient::_get( ns() ).nScannedForPattern( FieldRangeSet( ns(), BSON( "a" << 1 ) ).pattern() ) );
+ ASSERT( BSON( "a" << 1 ).woCompare( NamespaceDetailsTransient::get_inlock( ns() ).indexForPattern( FieldRangeSet( ns(), BSON( "a" << 1 ), true ).pattern() ) ) == 0 );
+ ASSERT_EQUALS( 1, NamespaceDetailsTransient::get_inlock( ns() ).nScannedForPattern( FieldRangeSet( ns(), BSON( "a" << 1 ), true ).pattern() ) );
}
};
@@ -1498,7 +825,7 @@ namespace QueryOptimizerTests {
QueryMessage q(d);
runQuery( m, q);
}
- ASSERT( BSON( "$natural" << 1 ).woCompare( NamespaceDetailsTransient::_get( ns() ).indexForPattern( FieldRangeSet( ns(), BSON( "b" << 0 << "a" << GTE << 0 ) ).pattern() ) ) == 0 );
+ ASSERT( BSON( "$natural" << 1 ).woCompare( NamespaceDetailsTransient::get_inlock( ns() ).indexForPattern( FieldRangeSet( ns(), BSON( "b" << 0 << "a" << GTE << 0 ), true ).pattern() ) ) == 0 );
Message m2;
assembleRequest( ns(), QUERY( "b" << 99 << "a" << GTE << 0 ).obj, 2, 0, 0, 0, m2 );
@@ -1507,8 +834,8 @@ namespace QueryOptimizerTests {
QueryMessage q(d);
runQuery( m2, q);
}
- ASSERT( BSON( "a" << 1 ).woCompare( NamespaceDetailsTransient::_get( ns() ).indexForPattern( FieldRangeSet( ns(), BSON( "b" << 0 << "a" << GTE << 0 ) ).pattern() ) ) == 0 );
- ASSERT_EQUALS( 3, NamespaceDetailsTransient::_get( ns() ).nScannedForPattern( FieldRangeSet( ns(), BSON( "b" << 0 << "a" << GTE << 0 ) ).pattern() ) );
+ ASSERT( BSON( "a" << 1 ).woCompare( NamespaceDetailsTransient::get_inlock( ns() ).indexForPattern( FieldRangeSet( ns(), BSON( "b" << 0 << "a" << GTE << 0 ), true ).pattern() ) ) == 0 );
+ ASSERT_EQUALS( 3, NamespaceDetailsTransient::get_inlock( ns() ).nScannedForPattern( FieldRangeSet( ns(), BSON( "b" << 0 << "a" << GTE << 0 ), true ).pattern() ) );
}
};
@@ -1522,10 +849,10 @@ namespace QueryOptimizerTests {
}
BSONObj hint = fromjson( "{$hint:{a:1}}" );
BSONElement hintElt = hint.firstElement();
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSONObj(), &hintElt );
- QueryPlan qp( nsd(), 1, s.fbs(), s.originalFrs(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSONObj() );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSONObj(), true, &hintElt );
+ QueryPlan qp( nsd(), 1, s.frsp(), s.originalFrsp(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSONObj() );
boost::shared_ptr<Cursor> c = qp.newCursor();
double expected[] = { 2, 3, 6, 9 };
for( int i = 0; i < 4; ++i, c->advance() ) {
@@ -1535,10 +862,10 @@ namespace QueryOptimizerTests {
// now check reverse
{
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ) ) );
- auto_ptr< FieldRangeSet > frsOrig( new FieldRangeSet( *frs ) );
- QueryPlanSet s( ns(), frs, frsOrig, fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSON( "a" << -1 ), &hintElt );
- QueryPlan qp( nsd(), 1, s.fbs(), s.originalFrs(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSON( "a" << -1 ) );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ) ) );
+ auto_ptr< FieldRangeSetPair > frspOrig( new FieldRangeSetPair( *frsp ) );
+ QueryPlanSet s( ns(), frsp, frspOrig, fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSON( "a" << -1 ), true, &hintElt );
+ QueryPlan qp( nsd(), 1, s.frsp(), s.originalFrsp(), fromjson( "{a:{$in:[2,3,6,9,11]}}" ), BSON( "a" << -1 ) );
boost::shared_ptr<Cursor> c = qp.newCursor();
double expected[] = { 9, 6, 3, 2 };
for( int i = 0; i < 4; ++i, c->advance() ) {
@@ -1558,8 +885,8 @@ namespace QueryOptimizerTests {
theDataFileMgr.insertWithObjMod( ns(), temp );
}
BSONObj hint = fromjson( "{$hint:{a:1,b:1}}" );
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), fromjson( "{a:5,b:{$in:[2,3,6,9,11]}}" ) ) );
- QueryPlan qp( nsd(), 1, *frs, *frs, fromjson( "{a:5,b:{$in:[2,3,6,9,11]}}" ), BSONObj() );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), fromjson( "{a:5,b:{$in:[2,3,6,9,11]}}" ) ) );
+ QueryPlan qp( nsd(), 1, *frsp, frsp.get(), fromjson( "{a:5,b:{$in:[2,3,6,9,11]}}" ), BSONObj() );
boost::shared_ptr<Cursor> c = qp.newCursor();
double expected[] = { 2, 3, 6, 9 };
ASSERT( c->ok() );
@@ -1580,8 +907,8 @@ namespace QueryOptimizerTests {
theDataFileMgr.insertWithObjMod( ns(), temp );
}
BSONObj hint = fromjson( "{$hint:{a:1,b:1}}" );
- auto_ptr< FieldRangeSet > frs( new FieldRangeSet( ns(), fromjson( "{a:{$gte:5},b:{$in:[2,3,6,9,11]}}" ) ) );
- QueryPlan qp( nsd(), 1, *frs, *frs, fromjson( "{a:{$gte:5},b:{$in:[2,3,6,9,11]}}" ), BSONObj() );
+ auto_ptr< FieldRangeSetPair > frsp( new FieldRangeSetPair( ns(), fromjson( "{a:{$gte:5},b:{$in:[2,3,6,9,11]}}" ) ) );
+ QueryPlan qp( nsd(), 1, *frsp, frsp.get(), fromjson( "{a:{$gte:5},b:{$in:[2,3,6,9,11]}}" ), BSONObj() );
boost::shared_ptr<Cursor> c = qp.newCursor();
int matches[] = { 2, 3, 6, 9 };
for( int i = 0; i < 4; ++i, c->advance() ) {
@@ -1603,10 +930,10 @@ namespace QueryOptimizerTests {
if ( !nsd() )
return;
string s( ns() );
- dropNS( s );
+ dropCollection( ns() );
}
protected:
- static const char *ns() { return "unittests.BaseTests"; }
+ static const char *ns() { return "unittests.QueryOptimizerTests"; }
static NamespaceDetails *nsd() { return nsdetails( ns() ); }
private:
dblock lk_;
@@ -1626,156 +953,1784 @@ namespace QueryOptimizerTests {
boost::shared_ptr< Cursor > c = bestGuessCursor( ns(), BSON( "b" << 1 ), BSON( "a" << 1 ) );
ASSERT_EQUALS( string( "a" ), c->indexKeyPattern().firstElement().fieldName() );
c = bestGuessCursor( ns(), BSON( "a" << 1 ), BSON( "b" << 1 ) );
- ASSERT_EQUALS( string( "b" ), c->indexKeyPattern().firstElement().fieldName() );
+ ASSERT_EQUALS( string( "b" ), c->indexKeyPattern().firstElementFieldName() );
boost::shared_ptr< MultiCursor > m = dynamic_pointer_cast< MultiCursor >( bestGuessCursor( ns(), fromjson( "{b:1,$or:[{z:1}]}" ), BSON( "a" << 1 ) ) );
ASSERT_EQUALS( string( "a" ), m->sub_c()->indexKeyPattern().firstElement().fieldName() );
m = dynamic_pointer_cast< MultiCursor >( bestGuessCursor( ns(), fromjson( "{a:1,$or:[{y:1}]}" ), BSON( "b" << 1 ) ) );
- ASSERT_EQUALS( string( "b" ), m->sub_c()->indexKeyPattern().firstElement().fieldName() );
+ ASSERT_EQUALS( string( "b" ), m->sub_c()->indexKeyPattern().firstElementFieldName() );
- FieldRangeSet frs( "ns", BSON( "a" << 1 ) );
+ FieldRangeSet frs( "ns", BSON( "a" << 1 ), true );
{
- scoped_lock lk(NamespaceDetailsTransient::_qcMutex);
+ SimpleMutex::scoped_lock lk(NamespaceDetailsTransient::_qcMutex);
NamespaceDetailsTransient::get_inlock( ns() ).registerIndexForPattern( frs.pattern( BSON( "b" << 1 ) ), BSON( "a" << 1 ), 0 );
}
m = dynamic_pointer_cast< MultiCursor >( bestGuessCursor( ns(), fromjson( "{a:1,$or:[{y:1}]}" ), BSON( "b" << 1 ) ) );
ASSERT_EQUALS( string( "b" ), m->sub_c()->indexKeyPattern().firstElement().fieldName() );
}
};
+
+ namespace QueryOptimizerCursorTests {
+
+ using boost::shared_ptr;
+
+ class Base {
+ public:
+ Base() {
+ dblock lk;
+ Client::Context ctx( ns() );
+ string err;
+ userCreateNS( ns(), BSONObj(), err, false );
+ dropCollection( ns() );
+ }
+ ~Base() {
+ cc().curop()->reset();
+ }
+ protected:
+ DBDirectClient _cli;
+ static const char *ns() { return "unittests.QueryOptimizerTests"; }
+ void setQueryOptimizerCursor( const BSONObj &query, const BSONObj &order = BSONObj() ) {
+ _c = newQueryOptimizerCursor( ns(), query, order );
+ if ( ok() && !mayReturnCurrent() ) {
+ advance();
+ }
+ }
+ bool ok() const { return _c->ok(); }
+ /** Handles matching and deduping. */
+ bool advance() {
+ while( _c->advance() && !mayReturnCurrent() );
+ return ok();
+ }
+ int itcount() {
+ int ret = 0;
+ while( ok() ) {
+ ++ret;
+ advance();
+ }
+ return ret;
+ }
+ BSONObj current() const { return _c->current(); }
+ bool mayReturnCurrent() {
+ return _c->matcher()->matchesCurrent( _c.get() ) && !_c->getsetdup( _c->currLoc() );
+ }
+ bool prepareToYield() const { return _c->prepareToYield(); }
+ void recoverFromYield() {
+ _c->recoverFromYield();
+ if ( ok() && !mayReturnCurrent() ) {
+ advance();
+ }
+ }
+ shared_ptr<Cursor> c() { return _c; }
+ long long nscanned() const { return _c->nscanned(); }
+ private:
+ shared_ptr<Cursor> _c;
+ };
+
+ /** No results for empty collection. */
+ class Empty : public Base {
+ public:
+ void run() {
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = newQueryOptimizerCursor( ns(), BSONObj() );
+ ASSERT( !c->ok() );
+ ASSERT_EXCEPTION( c->_current(), AssertionException );
+ ASSERT_EXCEPTION( c->current(), AssertionException );
+ ASSERT( c->currLoc().isNull() );
+ ASSERT( !c->advance() );
+ ASSERT_EXCEPTION( c->currKey(), AssertionException );
+ ASSERT_EXCEPTION( c->getsetdup( DiskLoc() ), AssertionException );
+ ASSERT_EXCEPTION( c->isMultiKey(), AssertionException );
+ ASSERT_EXCEPTION( c->matcher(), AssertionException );
+ }
+ };
+
+ /** Simple table scan. */
+ class Unindexed : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSONObj() );
+ ASSERT_EQUALS( 2, itcount() );
+ }
+ };
+
+ /** Basic test with two indexes and deduping requirement. */
+ class Basic : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 2 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 << "a" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 << "a" << GT << 0 ) );
+ ASSERT( ok() );
+ ASSERT_EQUALS( BSON( "_id" << 1 << "a" << 2 ), current() );
+ ASSERT( advance() );
+ ASSERT_EQUALS( BSON( "_id" << 2 << "a" << 1 ), current() );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ };
+
+ class NoMatch : public Base {
+ public:
+ void run() {
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 5 << LT << 4 << "a" << GT << 0 ) );
+ ASSERT( !ok() );
+ }
+ };
+
+ /** Order of results indicates that interleaving is occurring. */
+ class Interleaved : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 2 ) );
+ _cli.insert( ns(), BSON( "_id" << 3 << "a" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 << "a" << 2 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 << "a" << GT << 0 ) );
+ ASSERT( ok() );
+ ASSERT_EQUALS( BSON( "_id" << 1 << "a" << 2 ), current() );
+ ASSERT( advance() );
+ ASSERT_EQUALS( BSON( "_id" << 3 << "a" << 1 ), current() );
+ ASSERT( advance() );
+ ASSERT_EQUALS( BSON( "_id" << 2 << "a" << 2 ), current() );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ };
+
+ /** Some values on each index do not match. */
+ class NotMatch : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 0 << "a" << 10 ) );
+ _cli.insert( ns(), BSON( "_id" << 10 << "a" << 0 ) );
+ _cli.insert( ns(), BSON( "_id" << 11 << "a" << 12 ) );
+ _cli.insert( ns(), BSON( "_id" << 12 << "a" << 11 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 5 << "a" << GT << 5 ) );
+ ASSERT( ok() );
+ ASSERT_EQUALS( BSON( "_id" << 11 << "a" << 12 ), current() );
+ ASSERT( advance() );
+ ASSERT_EQUALS( BSON( "_id" << 12 << "a" << 11 ), current() );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ };
+
+ /** After the first 101 matches for a plan, we stop interleaving the plans. */
+ class StopInterleaving : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 101; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << i ) );
+ }
+ for( int i = 101; i < 200; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << (301-i) ) );
+ }
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << -1 << "a" << GT << -1 ) );
+ for( int i = 0; i < 200; ++i ) {
+ ASSERT( ok() );
+ ASSERT_EQUALS( i, current().getIntField( "_id" ) );
+ advance();
+ }
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ };
+
+ /** Test correct deduping with the takeover cursor. */
+ class TakeoverWithDup : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 101; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << i ) );
+ }
+ _cli.insert( ns(), BSON( "_id" << 500 << "a" << BSON_ARRAY( 0 << 300 ) ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << -1 << "a" << GT << -1 ) );
+ ASSERT_EQUALS( 102, itcount() );
+ }
+ };
+
+ /** Test usage of matcher with takeover cursor. */
+ class TakeoverWithNonMatches : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 101; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << i ) );
+ }
+ _cli.insert( ns(), BSON( "_id" << 101 << "a" << 600 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << -1 << "a" << LT << 500 ) );
+ ASSERT_EQUALS( 101, itcount() );
+ }
+ };
+
+ /** Check deduping of dups within just the takeover cursor. */
+ class TakeoverWithTakeoverDup : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 101; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i*2 << "a" << 0 ) );
+ _cli.insert( ns(), BSON( "_id" << i*2+1 << "a" << 1 ) );
+ }
+ _cli.insert( ns(), BSON( "_id" << 202 << "a" << BSON_ARRAY( 2 << 3 ) ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << -1 << "a" << GT << 0) );
+ ASSERT_EQUALS( 102, itcount() );
+ }
+ };
+
+ /** Basic test with $or query. */
+ class BasicOr : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 0 << "a" << 0 ) );
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "$or" << BSON_ARRAY( BSON( "_id" << 0 ) << BSON( "a" << 1 ) ) ) );
+ ASSERT_EQUALS( BSON( "_id" << 0 << "a" << 0 ), current() );
+ ASSERT( advance() );
+ ASSERT_EQUALS( BSON( "_id" << 1 << "a" << 1 ), current() );
+ ASSERT( !advance() );
+ }
+ };
+
+ /** $or first clause empty. */
+ class OrFirstClauseEmpty : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 0 << "a" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "$or" << BSON_ARRAY( BSON( "_id" << -1 ) << BSON( "a" << 1 ) ) ) );
+ ASSERT_EQUALS( BSON( "_id" << 0 << "a" << 1 ), current() );
+ ASSERT( advance() );
+ ASSERT_EQUALS( BSON( "_id" << 1 << "a" << 1 ), current() );
+ ASSERT( !advance() );
+ }
+ };
+
+ /** $or second clause empty. */
+ class OrSecondClauseEmpty : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 0 << "a" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "$or" << BSON_ARRAY( BSON( "_id" << 0 ) << BSON( "_id" << -1 ) << BSON( "a" << 1 ) ) ) );
+ ASSERT_EQUALS( BSON( "_id" << 0 << "a" << 1 ), current() );
+ ASSERT( advance() );
+ ASSERT_EQUALS( BSON( "_id" << 1 << "a" << 1 ), current() );
+ ASSERT( !advance() );
+ }
+ };
+
+ /** $or multiple clauses empty empty. */
+ class OrMultipleClausesEmpty : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 0 << "a" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "$or" << BSON_ARRAY( BSON( "_id" << 2 ) << BSON( "_id" << 4 ) << BSON( "_id" << 0 ) << BSON( "_id" << -1 ) << BSON( "_id" << 6 ) << BSON( "a" << 1 ) << BSON( "_id" << 9 ) ) ) );
+ ASSERT_EQUALS( BSON( "_id" << 0 << "a" << 1 ), current() );
+ ASSERT( advance() );
+ ASSERT_EQUALS( BSON( "_id" << 1 << "a" << 1 ), current() );
+ ASSERT( !advance() );
+ }
+ };
+
+ /** Check that takeover occurs at proper match count with $or clauses */
+ class TakeoverCountOr : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 60; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << 0 ) );
+ }
+ for( int i = 60; i < 120; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << 1 ) );
+ }
+ for( int i = 120; i < 150; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << (200-i) ) );
+ }
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "$or" << BSON_ARRAY( BSON( "a" << 0 ) << BSON( "a" << 1 ) << BSON( "_id" << GTE << 120 << "a" << GT << 1 ) ) ) );
+ for( int i = 0; i < 120; ++i ) {
+ ASSERT( ok() );
+ advance();
+ }
+ // Expect to be scanning on _id index only.
+ for( int i = 120; i < 150; ++i ) {
+ ASSERT_EQUALS( i, current().getIntField( "_id" ) );
+ advance();
+ }
+ ASSERT( !ok() );
+ }
+ };
+
+ /** Takeover just at end of clause. */
+ class TakeoverEndOfOrClause : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 102; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i ) );
+ }
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "$or" << BSON_ARRAY( BSON( "_id" << LT << 101 ) << BSON( "_id" << 101 ) ) ) );
+ for( int i = 0; i < 102; ++i ) {
+ ASSERT_EQUALS( i, current().getIntField( "_id" ) );
+ advance();
+ }
+ ASSERT( !ok() );
+ }
+ };
+
+ class TakeoverBeforeEndOfOrClause : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 101; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i ) );
+ }
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "$or" << BSON_ARRAY( BSON( "_id" << LT << 100 ) << BSON( "_id" << 100 ) ) ) );
+ for( int i = 0; i < 101; ++i ) {
+ ASSERT_EQUALS( i, current().getIntField( "_id" ) );
+ advance();
+ }
+ ASSERT( !ok() );
+ }
+ };
+
+ class TakeoverAfterEndOfOrClause : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 103; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i ) );
+ }
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "$or" << BSON_ARRAY( BSON( "_id" << LT << 102 ) << BSON( "_id" << 102 ) ) ) );
+ for( int i = 0; i < 103; ++i ) {
+ ASSERT_EQUALS( i, current().getIntField( "_id" ) );
+ advance();
+ }
+ ASSERT( !ok() );
+ }
+ };
+
+ /** Test matching and deduping done manually by cursor client. */
+ class ManualMatchingDeduping : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 0 << "a" << 10 ) );
+ _cli.insert( ns(), BSON( "_id" << 10 << "a" << 0 ) );
+ _cli.insert( ns(), BSON( "_id" << 11 << "a" << 12 ) );
+ _cli.insert( ns(), BSON( "_id" << 12 << "a" << 11 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr< Cursor > c = newQueryOptimizerCursor( ns(), BSON( "_id" << GT << 5 << "a" << GT << 5 ) );
+ ASSERT( c->ok() );
+
+ // _id 10 {_id:1}
+ ASSERT_EQUALS( 10, c->current().getIntField( "_id" ) );
+ ASSERT( !c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->advance() );
+
+ // _id 0 {a:1}
+ ASSERT_EQUALS( 0, c->current().getIntField( "_id" ) );
+ ASSERT( !c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->advance() );
+
+ // _id 0 {$natural:1}
+ ASSERT_EQUALS( 0, c->current().getIntField( "_id" ) );
+ ASSERT( !c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->advance() );
+
+ // _id 11 {_id:1}
+ ASSERT_EQUALS( BSON( "_id" << 11 << "a" << 12 ), c->current() );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+ ASSERT( c->advance() );
+
+ // _id 12 {a:1}
+ ASSERT_EQUALS( BSON( "_id" << 12 << "a" << 11 ), c->current() );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+ ASSERT( c->advance() );
+
+ // _id 10 {$natural:1}
+ ASSERT_EQUALS( 10, c->current().getIntField( "_id" ) );
+ ASSERT( !c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->advance() );
+
+ // _id 12 {_id:1}
+ ASSERT_EQUALS( BSON( "_id" << 12 << "a" << 11 ), c->current() );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->getsetdup( c->currLoc() ) );
+ ASSERT( c->advance() );
+
+ // _id 11 {a:1}
+ ASSERT_EQUALS( BSON( "_id" << 11 << "a" << 12 ), c->current() );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->getsetdup( c->currLoc() ) );
+ ASSERT( c->advance() );
+
+ // _id 11 {$natural:1}
+ ASSERT_EQUALS( 11, c->current().getIntField( "_id" ) );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->getsetdup( c->currLoc() ) );
+
+ // {_id:1} scan is complete.
+ ASSERT( !c->advance() );
+ ASSERT( !c->ok() );
+
+ // Scan the results again - this time the winning plan has been
+ // recorded.
+ c = newQueryOptimizerCursor( ns(), BSON( "_id" << GT << 5 << "a" << GT << 5 ) );
+ ASSERT( c->ok() );
+
+ // _id 10 {_id:1}
+ ASSERT_EQUALS( 10, c->current().getIntField( "_id" ) );
+ ASSERT( !c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->advance() );
+
+ // _id 11 {_id:1}
+ ASSERT_EQUALS( BSON( "_id" << 11 << "a" << 12 ), c->current() );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+ ASSERT( c->advance() );
+
+ // _id 12 {_id:1}
+ ASSERT_EQUALS( BSON( "_id" << 12 << "a" << 11 ), c->current() );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+
+ // {_id:1} scan complete
+ ASSERT( !c->advance() );
+ ASSERT( !c->ok() );
+ }
+ };
+
+ /** Curr key must be correct for currLoc for correct matching. */
+ class ManualMatchingUsingCurrKey : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << "a" ) );
+ _cli.insert( ns(), BSON( "_id" << "b" ) );
+ _cli.insert( ns(), BSON( "_id" << "ba" ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr< Cursor > c = newQueryOptimizerCursor( ns(), fromjson( "{_id:/a/}" ) );
+ ASSERT( c->ok() );
+ // "a"
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+ ASSERT( c->advance() );
+ ASSERT( c->ok() );
+
+ // "b"
+ ASSERT( !c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->advance() );
+ ASSERT( c->ok() );
+
+ // "ba"
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+ ASSERT( !c->advance() );
+ }
+ };
+
+ /** Test matching and deduping done manually by cursor client. */
+ class ManualMatchingDedupingTakeover : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 150; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << 0 ) );
+ }
+ _cli.insert( ns(), BSON( "_id" << 300 << "a" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr< Cursor > c = newQueryOptimizerCursor( ns(), BSON( "$or" << BSON_ARRAY( BSON( "_id" << LT << 300 ) << BSON( "a" << 1 ) ) ) );
+ for( int i = 0; i < 151; ++i ) {
+ ASSERT( c->ok() );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+ c->advance();
+ }
+ ASSERT( !c->ok() );
+ }
+ };
+
+ /** Test single key matching bounds. */
+ class Singlekey : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "a" << "10" ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr< Cursor > c = newQueryOptimizerCursor( ns(), BSON( "a" << GT << 1 << LT << 5 ) );
+ // Two sided bounds work.
+ ASSERT( !c->ok() );
+ }
+ };
+
+ /** Test multi key matching bounds. */
+ class Multikey : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "a" << BSON_ARRAY( 1 << 10 ) ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "a" << GT << 5 << LT << 3 ) );
+ // Multi key bounds work.
+ ASSERT( ok() );
+ }
+ };
+
+ /** Add other plans when the recorded one is doing more poorly than expected. */
+ class AddOtherPlans : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 0 << "a" << 0 << "b" << 0 ) );
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 1 << "b" << 0 ) );
+ for( int i = 100; i < 150; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << 100 << "b" << i ) );
+ }
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "b" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = newQueryOptimizerCursor( ns(), BSON( "a" << 0 << "b" << 0 ) );
+
+ ASSERT_EQUALS( BSON( "_id" << 0 << "a" << 0 << "b" << 0 ), c->current() );
+ ASSERT( c->advance() );
+ ASSERT_EQUALS( BSON( "_id" << 0 << "a" << 0 << "b" << 0 ), c->current() );
+ ASSERT( c->advance() );
+ // $natrual plan
+ ASSERT_EQUALS( BSON( "_id" << 0 << "a" << 0 << "b" << 0 ), c->current() );
+ ASSERT( !c->advance() );
+
+ c = newQueryOptimizerCursor( ns(), BSON( "a" << 100 << "b" << 149 ) );
+ // Try {a:1}, which was successful previously.
+ for( int i = 0; i < 11; ++i ) {
+ ASSERT( 149 != c->current().getIntField( "b" ) );
+ ASSERT( c->advance() );
+ }
+ // Now try {b:1} plan.
+ ASSERT_EQUALS( 149, c->current().getIntField( "b" ) );
+ ASSERT( c->advance() );
+ // {b:1} plan finished.
+ ASSERT( !c->advance() );
+ }
+ };
+
+ /** Check $or clause range elimination. */
+ class OrRangeElimination : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = newQueryOptimizerCursor( ns(), BSON( "$or" << BSON_ARRAY( BSON( "_id" << GT << 0 ) << BSON( "_id" << 1 ) ) ) );
+ ASSERT( c->ok() );
+ ASSERT( !c->advance() );
+ }
+ };
+
+ /** Check $or match deduping - in takeover cursor. */
+ class OrDedup : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 150; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << i ) );
+ }
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = newQueryOptimizerCursor( ns(), BSON( "$or" << BSON_ARRAY( BSON( "_id" << LT << 140 ) << BSON( "_id" << 145 ) << BSON( "a" << 145 ) ) ) );
+
+ while( c->current().getIntField( "_id" ) < 140 ) {
+ ASSERT( c->advance() );
+ }
+ // Match from second $or clause.
+ ASSERT_EQUALS( 145, c->current().getIntField( "_id" ) );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->advance() );
+ // Match from third $or clause.
+ ASSERT_EQUALS( 145, c->current().getIntField( "_id" ) );
+ // $or deduping is handled by the matcher.
+ ASSERT( !c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( !c->advance() );
+ }
+ };
+
+ /** Standard dups with a multikey cursor. */
+ class EarlyDups : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "a" << BSON_ARRAY( 0 << 1 << 200 ) ) );
+ for( int i = 2; i < 150; ++i ) {
+ _cli.insert( ns(), BSON( "a" << i ) );
+ }
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "a" << GT << -1 ) );
+ ASSERT_EQUALS( 149, itcount() );
+ }
+ };
+
+ /** Pop or clause in takeover cursor. */
+ class OrPopInTakeover : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 150; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i ) );
+ }
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = newQueryOptimizerCursor( ns(), BSON( "$or" << BSON_ARRAY( BSON( "_id" << LTE << 147 ) << BSON( "_id" << 148 ) << BSON( "_id" << 149 ) ) ) );
+ for( int i = 0; i < 150; ++i ) {
+ ASSERT( c->ok() );
+ ASSERT_EQUALS( i, c->current().getIntField( "_id" ) );
+ c->advance();
+ }
+ ASSERT( !c->ok() );
+ }
+ };
+
+ /** Or clause iteration abandoned once full collection scan is performed. */
+ class OrCollectionScanAbort : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 0 << "a" << BSON_ARRAY( 1 << 2 << 3 << 4 << 5 ) << "b" << 4 ) );
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << BSON_ARRAY( 6 << 7 << 8 << 9 << 10 ) << "b" << 4 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = newQueryOptimizerCursor( ns(), BSON( "$or" << BSON_ARRAY( BSON( "a" << LT << 6 << "b" << 4 ) << BSON( "a" << GTE << 6 << "b" << 4 ) ) ) );
+
+ ASSERT( c->ok() );
+
+ // _id 0 on {a:1}
+ ASSERT_EQUALS( 0, c->current().getIntField( "_id" ) );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+ c->advance();
+
+ // _id 0 on {$natural:1}
+ ASSERT_EQUALS( 0, c->current().getIntField( "_id" ) );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->getsetdup( c->currLoc() ) );
+ c->advance();
+
+ // _id 0 on {a:1}
+ ASSERT_EQUALS( 0, c->current().getIntField( "_id" ) );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->getsetdup( c->currLoc() ) );
+ c->advance();
+
+ // _id 1 on {$natural:1}
+ ASSERT_EQUALS( 1, c->current().getIntField( "_id" ) );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+ c->advance();
+
+ // _id 0 on {a:1}
+ ASSERT_EQUALS( 0, c->current().getIntField( "_id" ) );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->getsetdup( c->currLoc() ) );
+ c->advance();
+
+ // {$natural:1} finished
+ ASSERT( !c->ok() );
+ }
+ };
+
+ /** Simple geo query. */
+ class Geo : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 0 << "loc" << BSON( "lon" << 30 << "lat" << 30 ) ) );
+ _cli.insert( ns(), BSON( "_id" << 1 << "loc" << BSON( "lon" << 31 << "lat" << 31 ) ) );
+ _cli.ensureIndex( ns(), BSON( "loc" << "2d" ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "loc" << BSON( "$near" << BSON_ARRAY( 30 << 30 ) ) ) );
+ ASSERT( ok() );
+ ASSERT_EQUALS( 0, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ };
+
+ /** Yield cursor and delete current entry, then continue iteration. */
+ class YieldNoOp : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( ok() );
+ ASSERT_EQUALS( 2, current().getIntField( "_id" ) );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ ASSERT( prepareToYield() );
+ recoverFromYield();
+ }
+ }
+ };
+
+ /** Yield cursor and delete current entry. */
+ class YieldDelete : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << 1 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ _cli.remove( ns(), BSON( "_id" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( !ok() );
+ ASSERT( !advance() );
+ }
+ }
+ };
+
+ /** Yield cursor and delete current entry, then continue iteration. */
+ class YieldDeleteContinue : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ _cli.remove( ns(), BSON( "_id" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( ok() );
+ ASSERT_EQUALS( 2, current().getIntField( "_id" ) );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ }
+ };
+
+ /** Yield cursor and delete current entry, then continue iteration. */
+ class YieldDeleteContinueFurther : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 ) );
+ _cli.insert( ns(), BSON( "_id" << 3 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ _cli.remove( ns(), BSON( "_id" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( ok() );
+ ASSERT_EQUALS( 2, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 3, current().getIntField( "_id" ) );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ }
+ };
+
+ /** Yield and update current. */
+ class YieldUpdate : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "a" << 1 ) );
+ _cli.insert( ns(), BSON( "a" << 2 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "a" << GT << 0 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "a" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ _cli.update( ns(), BSON( "a" << 1 ), BSON( "$set" << BSON( "a" << 3 ) ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( ok() );
+ ASSERT_EQUALS( 2, current().getIntField( "a" ) );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ }
+ };
+
+ /** Yield and drop collection. */
+ class YieldDrop : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ _cli.dropCollection( ns() );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( !ok() );
+ }
+ }
+ };
+
+ /** Yield and drop collection with $or query. */
+ class YieldDropOr : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "$or" << BSON_ARRAY( BSON( "_id" << 1 ) << BSON( "_id" << 2 ) ) ) );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ _cli.dropCollection( ns() );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ ASSERT_EXCEPTION( recoverFromYield(), MsgAssertionException );
+ ASSERT( !ok() );
+ }
+ }
+ };
+
+ /** Yield and overwrite current in capped collection. */
+ class YieldCappedOverwrite : public Base {
+ public:
+ void run() {
+ _cli.createCollection( ns(), 1000, true );
+ _cli.insert( ns(), BSON( "x" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "x" << GT << 0 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "x" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ int x = 2;
+ while( _cli.count( ns(), BSON( "x" << 1 ) ) > 0 ) {
+ _cli.insert( ns(), BSON( "x" << x++ ) );
+ }
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ ASSERT_EXCEPTION( recoverFromYield(), MsgAssertionException );
+ ASSERT( !ok() );
+ }
+ }
+ };
+
+ /** Yield and drop unrelated index - see SERVER-2454. */
+ class YieldDropIndex : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << 1 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ _cli.dropIndex( ns(), BSON( "a" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( !ok() );
+ }
+ }
+ };
+
+ /** Yielding with multiple plans active. */
+ class YieldMultiplePlansNoOp : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 2 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 << "a" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 << "a" << GT << 0 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( ok() );
+ ASSERT_EQUALS( 2, current().getIntField( "_id" ) );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ }
+ };
+
+ /** Yielding with advance and multiple plans active. */
+ class YieldMultiplePlansAdvanceNoOp : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 2 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 << "a" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 3 << "a" << 3 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 << "a" << GT << 0 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ advance();
+ ASSERT_EQUALS( 2, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( ok() );
+ ASSERT_EQUALS( 3, current().getIntField( "_id" ) );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ }
+ };
+
+ /** Yielding with delete and multiple plans active. */
+ class YieldMultiplePlansDelete : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 2 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 << "a" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 3 << "a" << 4 ) );
+ _cli.insert( ns(), BSON( "_id" << 4 << "a" << 3 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 << "a" << GT << 0 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ advance();
+ ASSERT_EQUALS( 2, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ _cli.remove( ns(), BSON( "_id" << 2 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ c()->recoverFromYield();
+ ASSERT( ok() );
+ // index {a:1} active during yield
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 3, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 4, current().getIntField( "_id" ) );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ }
+ };
+
+ /** Yielding with multiple plans and capped overwrite. */
+ class YieldMultiplePlansCappedOverwrite : public Base {
+ public:
+ void run() {
+ _cli.createCollection( ns(), 1000, true );
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "_id" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 << "a" << GT << 0 ) );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ int i = 1;
+ while( _cli.count( ns(), BSON( "_id" << 1 ) ) > 0 ) {
+ ++i;
+ _cli.insert( ns(), BSON( "_id" << i << "a" << i ) );
+ }
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( ok() );
+ // {$natural:1} plan does not recover, {_id:1} plan does.
+ ASSERT( 1 < current().getIntField( "_id" ) );
+ }
+ }
+ };
+
+ /**
+ * Yielding with multiple plans and capped overwrite with unrecoverable cursor
+ * active at time of yield.
+ */
+ class YieldMultiplePlansCappedOverwriteManual : public Base {
+ public:
+ void run() {
+ _cli.createCollection( ns(), 1000, true );
+ _cli.insert( ns(), BSON( "a" << 1 << "b" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ shared_ptr<Cursor> c;
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ c = newQueryOptimizerCursor( ns(), BSON( "a" << GT << 0 << "b" << GT << 0 ) );
+ ASSERT_EQUALS( 1, c->current().getIntField( "a" ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+ c->advance();
+ ASSERT_EQUALS( 1, c->current().getIntField( "a" ) );
+ ASSERT( c->getsetdup( c->currLoc() ) );
+ ASSERT( c->prepareToYield() );
+ }
+
+ int i = 1;
+ while( _cli.count( ns(), BSON( "a" << 1 ) ) > 0 ) {
+ ++i;
+ _cli.insert( ns(), BSON( "a" << i << "b" << i ) );
+ }
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ c->recoverFromYield();
+ ASSERT( c->ok() );
+ // {$natural:1} plan does not recover, {_id:1} plan does.
+ ASSERT( 1 < c->current().getIntField( "a" ) );
+ }
+ }
+ };
+
+ /**
+ * Yielding with multiple plans and capped overwrite with unrecoverable cursor
+ * inctive at time of yield.
+ */
+ class YieldMultiplePlansCappedOverwriteManual2 : public Base {
+ public:
+ void run() {
+ _cli.createCollection( ns(), 1000, true );
+ _cli.insert( ns(), BSON( "_id" << 1 << "a" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "_id" << 1 ) );
+
+ shared_ptr<Cursor> c;
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ c = newQueryOptimizerCursor( ns(), BSON( "_id" << GT << 0 << "a" << GT << 0 ) );
+ ASSERT_EQUALS( 1, c->current().getIntField( "_id" ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+ ASSERT( c->prepareToYield() );
+ }
+
+ int n = 1;
+ while( _cli.count( ns(), BSON( "_id" << 1 ) ) > 0 ) {
+ ++n;
+ _cli.insert( ns(), BSON( "_id" << n << "a" << n ) );
+ }
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ c->recoverFromYield();
+ ASSERT( c->ok() );
+ // {$natural:1} plan does not recover, {_id:1} plan does.
+ ASSERT( 1 < c->current().getIntField( "_id" ) );
+ ASSERT( !c->getsetdup( c->currLoc() ) );
+ int i = c->current().getIntField( "_id" );
+ ASSERT( c->advance() );
+ ASSERT( c->getsetdup( c->currLoc() ) );
+ while( i < n ) {
+ ASSERT( c->advance() );
+ ++i;
+ ASSERT_EQUALS( i, c->current().getIntField( "_id" ) );
+ }
+ }
+ }
+ };
+
+ /** Try and fail to yield a geo query. */
+ class TryYieldGeo : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 0 << "loc" << BSON( "lon" << 30 << "lat" << 30 ) ) );
+ _cli.ensureIndex( ns(), BSON( "loc" << "2d" ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "loc" << BSON( "$near" << BSON_ARRAY( 50 << 50 ) ) ) );
+ ASSERT( ok() );
+ ASSERT_EQUALS( 0, current().getIntField( "_id" ) );
+ ASSERT( !prepareToYield() );
+ ASSERT( ok() );
+ ASSERT_EQUALS( 0, current().getIntField( "_id" ) );
+ ASSERT( !advance() );
+ ASSERT( !ok() );
+ }
+ };
+
+ /** Yield with takeover cursor. */
+ class YieldTakeover : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 150; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << i ) );
+ }
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GTE << 0 << "a" << GTE << 0 ) );
+ for( int i = 0; i < 120; ++i ) {
+ ASSERT( advance() );
+ }
+ ASSERT( ok() );
+ ASSERT_EQUALS( 120, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ _cli.remove( ns(), BSON( "_id" << 120 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( ok() );
+ ASSERT_EQUALS( 121, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 122, current().getIntField( "_id" ) );
+ }
+ }
+ };
+
+ /** Yield with BacicCursor takeover cursor. */
+ class YieldTakeoverBasic : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 150; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << BSON_ARRAY( i << i+1 ) ) );
+ }
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ auto_ptr<ClientCursor> cc;
+ auto_ptr<ClientCursor::YieldData> data( new ClientCursor::YieldData() );
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "b" << NE << 0 << "a" << GTE << 0 ) );
+ cc.reset( new ClientCursor( QueryOption_NoCursorTimeout, c(), ns() ) );
+ for( int i = 0; i < 120; ++i ) {
+ ASSERT( advance() );
+ }
+ ASSERT( ok() );
+ ASSERT_EQUALS( 120, current().getIntField( "_id" ) );
+ cc->prepareToYield( *data );
+ }
+ _cli.remove( ns(), BSON( "_id" << 120 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ ASSERT( ClientCursor::recoverFromYield( *data ) );
+ ASSERT( ok() );
+ ASSERT_EQUALS( 121, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 122, current().getIntField( "_id" ) );
+ }
+ }
+ };
+
+ /** Yield with advance of inactive cursor. */
+ class YieldInactiveCursorAdvance : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 10; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << 10 - i ) );
+ }
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 << "a" << GT << 0 ) );
+ ASSERT( ok() );
+ ASSERT_EQUALS( 1, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 9, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 2, current().getIntField( "_id" ) );
+ ASSERT( prepareToYield() );
+ }
+
+ _cli.remove( ns(), BSON( "_id" << 9 ) );
+
+ {
+ dblock lk;
+ Client::Context ctx( ns() );
+ recoverFromYield();
+ ASSERT( ok() );
+ ASSERT_EQUALS( 8, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 3, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 7, current().getIntField( "_id" ) );
+ }
+ }
+ };
+
+ class OrderId : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 10; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i ) );
+ }
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSONObj(), BSON( "_id" << 1 ) );
+
+ for( int i = 0; i < 10; ++i, advance() ) {
+ ASSERT( ok() );
+ ASSERT_EQUALS( i, current().getIntField( "_id" ) );
+ }
+ }
+ };
+
+ class OrderMultiIndex : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 10; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << 1 ) );
+ }
+ _cli.ensureIndex( ns(), BSON( "_id" << 1 << "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GTE << 0 << "a" << GTE << 0 ), BSON( "_id" << 1 ) );
+
+ for( int i = 0; i < 10; ++i, advance() ) {
+ ASSERT( ok() );
+ ASSERT_EQUALS( i, current().getIntField( "_id" ) );
+ }
+ }
+ };
+
+ class OrderReject : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 10; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << i % 5 ) );
+ }
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "a" << GTE << 3 ), BSON( "_id" << 1 ) );
+
+ ASSERT( ok() );
+ ASSERT_EQUALS( 3, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 4, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 8, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 9, current().getIntField( "_id" ) );
+ ASSERT( !advance() );
+ }
+ };
+
+ class OrderNatural : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 5 ) );
+ _cli.insert( ns(), BSON( "_id" << 4 ) );
+ _cli.insert( ns(), BSON( "_id" << 6 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 ), BSON( "$natural" << 1 ) );
+
+ ASSERT( ok() );
+ ASSERT_EQUALS( 5, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 4, current().getIntField( "_id" ) );
+ ASSERT( advance() );
+ ASSERT_EQUALS( 6, current().getIntField( "_id" ) );
+ ASSERT( !advance() );
+ }
+ };
+
+ class OrderUnindexed : public Base {
+ public:
+ void run() {
+ dblock lk;
+ Client::Context ctx( ns() );
+ ASSERT( !newQueryOptimizerCursor( ns(), BSONObj(), BSON( "a" << 1 ) ).get() );
+ }
+ };
+
+ class RecordedOrderInvalid : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "a" << 1 << "b" << 1 ) );
+ _cli.insert( ns(), BSON( "a" << 2 << "b" << 2 ) );
+ _cli.insert( ns(), BSON( "a" << 3 << "b" << 3 ) );
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ _cli.ensureIndex( ns(), BSON( "b" << 1 ) );
+ ASSERT( _cli.query( ns(), QUERY( "a" << 2 ).sort( "b" ) )->more() );
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = newQueryOptimizerCursor( ns(), BSON( "a" << 2 ), BSON( "b" << 1 ) );
+ // Check that we are scanning {b:1} not {a:1}.
+ for( int i = 0; i < 3; ++i ) {
+ ASSERT( c->ok() );
+ c->advance();
+ }
+ ASSERT( !c->ok() );
+ }
+ };
+
+ class KillOp : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 << "b" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 << "b" << 2 ) );
+ _cli.ensureIndex( ns(), BSON( "b" << 1 ) );
+
+ mongolock lk( false );
+ Client::Context ctx( ns() );
+ setQueryOptimizerCursor( BSON( "_id" << GT << 0 << "b" << GT << 0 ) );
+ ASSERT( ok() );
+ cc().curop()->kill();
+ // First advance() call throws, subsequent calls just fail.
+ ASSERT_EXCEPTION( advance(), MsgAssertionException );
+ ASSERT( !advance() );
+ }
+ };
+
+ class KillOpFirstClause : public Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 1 << "b" << 1 ) );
+ _cli.insert( ns(), BSON( "_id" << 2 << "b" << 2 ) );
+ _cli.ensureIndex( ns(), BSON( "b" << 1 ) );
+
+ mongolock lk( false );
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = newQueryOptimizerCursor( ns(), BSON( "$or" << BSON_ARRAY( BSON( "_id" << GT << 0 ) << BSON( "b" << GT << 0 ) ) ) );
+ ASSERT( c->ok() );
+ cc().curop()->kill();
+ // First advance() call throws, subsequent calls just fail.
+ ASSERT_EXCEPTION( c->advance(), MsgAssertionException );
+ ASSERT( !c->advance() );
+ }
+ };
+
+ class Nscanned : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 120; ++i ) {
+ _cli.insert( ns(), BSON( "_id" << i << "a" << i ) );
+ }
+
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = newQueryOptimizerCursor( ns(), BSON( "_id" << GTE << 0 << "a" << GTE << 0 ) );
+ ASSERT( c->ok() );
+ ASSERT_EQUALS( 2, c->nscanned() );
+ c->advance();
+ ASSERT( c->ok() );
+ ASSERT_EQUALS( 2, c->nscanned() );
+ c->advance();
+ for( int i = 3; i < 222; ++i ) {
+ ASSERT( c->ok() );
+ c->advance();
+ }
+ ASSERT( !c->ok() );
+ }
+ };
+
+ namespace GetCursor {
+
+ class Base : public QueryOptimizerCursorTests::Base {
+ public:
+ Base() {
+ // create collection
+ _cli.insert( ns(), BSON( "_id" << 5 ) );
+ }
+ virtual ~Base() {}
+ void run() {
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = NamespaceDetailsTransient::getCursor( ns(), query(), order() );
+ string type = c->toString().substr( 0, expectedType().length() );
+ ASSERT_EQUALS( expectedType(), type );
+ check( c );
+ }
+ protected:
+ virtual string expectedType() const = 0;
+ virtual BSONObj query() const { return BSONObj(); }
+ virtual BSONObj order() const { return BSONObj(); }
+ virtual void check( const shared_ptr<Cursor> &c ) {
+ ASSERT( c->ok() );
+ ASSERT( !c->matcher() );
+ ASSERT_EQUALS( 5, c->current().getIntField( "_id" ) );
+ ASSERT( !c->advance() );
+ }
+ };
+
+ class NoConstraints : public Base {
+ string expectedType() const { return "BasicCursor"; }
+ };
+
+ class SimpleId : public Base {
+ public:
+ SimpleId() {
+ _cli.insert( ns(), BSON( "_id" << 0 ) );
+ _cli.insert( ns(), BSON( "_id" << 10 ) );
+ }
+ string expectedType() const { return "BtreeCursor _id_"; }
+ BSONObj query() const { return BSON( "_id" << 5 ); }
+ };
+
+ class OptimalIndex : public Base {
+ public:
+ OptimalIndex() {
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ _cli.insert( ns(), BSON( "a" << 5 ) );
+ _cli.insert( ns(), BSON( "a" << 6 ) );
+ }
+ string expectedType() const { return "BtreeCursor a_1"; }
+ BSONObj query() const { return BSON( "a" << GTE << 5 ); }
+ void check( const shared_ptr<Cursor> &c ) {
+ ASSERT( c->ok() );
+ ASSERT( c->matcher() );
+ ASSERT_EQUALS( 5, c->current().getIntField( "a" ) );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( c->advance() );
+ ASSERT_EQUALS( 6, c->current().getIntField( "a" ) );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT( !c->advance() );
+ }
+ };
+
+ class Geo : public Base {
+ public:
+ Geo() {
+ _cli.insert( ns(), BSON( "_id" << 44 << "loc" << BSON_ARRAY( 44 << 45 ) ) );
+ _cli.ensureIndex( ns(), BSON( "loc" << "2d" ) );
+ }
+ string expectedType() const { return "GeoSearchCursor"; }
+ BSONObj query() const { return fromjson( "{ loc : { $near : [50,50] } }" ); }
+ void check( const shared_ptr<Cursor> &c ) {
+ ASSERT( c->ok() );
+ ASSERT( c->matcher() );
+ ASSERT( c->matcher()->matchesCurrent( c.get() ) );
+ ASSERT_EQUALS( 44, c->current().getIntField( "_id" ) );
+ ASSERT( !c->advance() );
+ }
+ };
+
+ class OutOfOrder : public QueryOptimizerCursorTests::Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 5 ) );
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = NamespaceDetailsTransient::getCursor( ns(), BSONObj(), BSON( "b" << 1 ) );
+ ASSERT( !c );
+ }
+ };
+
+ class BestSavedOutOfOrder : public QueryOptimizerCursorTests::Base {
+ public:
+ void run() {
+ _cli.insert( ns(), BSON( "_id" << 5 << "b" << BSON_ARRAY( 1 << 2 << 3 << 4 << 5 ) ) );
+ _cli.insert( ns(), BSON( "_id" << 1 << "b" << 6 ) );
+ _cli.ensureIndex( ns(), BSON( "b" << 1 ) );
+ // record {_id:1} index for this query
+ ASSERT( _cli.query( ns(), QUERY( "_id" << GT << 0 << "b" << GT << 0 ).sort( "b" ) )->more() );
+ dblock lk;
+ Client::Context ctx( ns() );
+ shared_ptr<Cursor> c = NamespaceDetailsTransient::getCursor( ns(), BSON( "_id" << GT << 0 << "b" << GT << 0 ), BSON( "b" << 1 ) );
+ // {_id:1} requires scan and order, so {b:1} must be chosen.
+ ASSERT( c );
+ ASSERT_EQUALS( 5, c->current().getIntField( "_id" ) );
+ }
+ };
+
+ class MultiIndex : public Base {
+ public:
+ MultiIndex() {
+ _cli.ensureIndex( ns(), BSON( "a" << 1 ) );
+ }
+ string expectedType() const { return "QueryOptimizerCursor"; }
+ BSONObj query() const { return BSON( "_id" << GT << 0 << "a" << GT << 0 ); }
+ void check( const shared_ptr<Cursor> &c ) {}
+ };
+
+ } // namespace GetCursor
+
+ } // namespace QueryOptimizerCursorTests
class All : public Suite {
public:
All() : Suite( "queryoptimizer" ) {}
void setupTests() {
- add< FieldRangeTests::Empty >();
- add< FieldRangeTests::Eq >();
- add< FieldRangeTests::DupEq >();
- add< FieldRangeTests::Lt >();
- add< FieldRangeTests::Lte >();
- add< FieldRangeTests::Gt >();
- add< FieldRangeTests::Gte >();
- add< FieldRangeTests::TwoLt >();
- add< FieldRangeTests::TwoGt >();
- add< FieldRangeTests::EqGte >();
- add< FieldRangeTests::EqGteInvalid >();
- add< FieldRangeTests::Regex >();
- add< FieldRangeTests::RegexObj >();
- add< FieldRangeTests::UnhelpfulRegex >();
- add< FieldRangeTests::In >();
- add< FieldRangeTests::Equality >();
- add< FieldRangeTests::SimplifiedQuery >();
- add< FieldRangeTests::QueryPatternTest >();
- add< FieldRangeTests::NoWhere >();
- add< FieldRangeTests::Numeric >();
- add< FieldRangeTests::InLowerBound >();
- add< FieldRangeTests::InUpperBound >();
- add< FieldRangeTests::UnionBound >();
- add< FieldRangeTests::MultiBound >();
- add< FieldRangeTests::Diff1 >();
- add< FieldRangeTests::Diff2 >();
- add< FieldRangeTests::Diff3 >();
- add< FieldRangeTests::Diff4 >();
- add< FieldRangeTests::Diff5 >();
- add< FieldRangeTests::Diff6 >();
- add< FieldRangeTests::Diff7 >();
- add< FieldRangeTests::Diff8 >();
- add< FieldRangeTests::Diff9 >();
- add< FieldRangeTests::Diff10 >();
- add< FieldRangeTests::Diff11 >();
- add< FieldRangeTests::Diff12 >();
- add< FieldRangeTests::Diff13 >();
- add< FieldRangeTests::Diff14 >();
- add< FieldRangeTests::Diff15 >();
- add< FieldRangeTests::Diff16 >();
- add< FieldRangeTests::Diff17 >();
- add< FieldRangeTests::Diff18 >();
- add< FieldRangeTests::Diff19 >();
- add< FieldRangeTests::Diff20 >();
- add< FieldRangeTests::Diff21 >();
- add< FieldRangeTests::Diff22 >();
- add< FieldRangeTests::Diff23 >();
- add< FieldRangeTests::Diff24 >();
- add< FieldRangeTests::Diff25 >();
- add< FieldRangeTests::Diff26 >();
- add< FieldRangeTests::Diff27 >();
- add< FieldRangeTests::Diff28 >();
- add< FieldRangeTests::Diff29 >();
- add< FieldRangeTests::Diff30 >();
- add< FieldRangeTests::Diff31 >();
- add< FieldRangeTests::Diff32 >();
- add< FieldRangeTests::Diff33 >();
- add< FieldRangeTests::Diff34 >();
- add< FieldRangeTests::Diff35 >();
- add< FieldRangeTests::Diff36 >();
- add< FieldRangeTests::Diff37 >();
- add< FieldRangeTests::Diff38 >();
- add< FieldRangeTests::Diff39 >();
- add< FieldRangeTests::Diff40 >();
- add< FieldRangeTests::Diff41 >();
- add< FieldRangeTests::Diff42 >();
- add< FieldRangeTests::Diff43 >();
- add< FieldRangeTests::Diff44 >();
- add< FieldRangeTests::Diff45 >();
- add< FieldRangeTests::Diff46 >();
- add< FieldRangeTests::Diff47 >();
- add< FieldRangeTests::Diff48 >();
- add< FieldRangeTests::Diff49 >();
- add< FieldRangeTests::Diff50 >();
- add< FieldRangeTests::Diff51 >();
- add< FieldRangeTests::Diff52 >();
- add< FieldRangeTests::Diff53 >();
- add< FieldRangeTests::Diff54 >();
- add< FieldRangeTests::Diff55 >();
- add< FieldRangeTests::Diff56 >();
- add< FieldRangeTests::Diff57 >();
- add< FieldRangeTests::Diff58 >();
- add< FieldRangeTests::Diff59 >();
- add< FieldRangeTests::Diff60 >();
- add< FieldRangeTests::Diff61 >();
- add< FieldRangeTests::Diff62 >();
- add< FieldRangeTests::Diff63 >();
- add< FieldRangeTests::Diff64 >();
- add< FieldRangeTests::DiffMulti1 >();
- add< FieldRangeTests::DiffMulti2 >();
- add< FieldRangeTests::SetIntersect >();
- add< QueryPlanTests::NoIndex >();
- add< QueryPlanTests::SimpleOrder >();
- add< QueryPlanTests::MoreIndexThanNeeded >();
- add< QueryPlanTests::IndexSigns >();
- add< QueryPlanTests::IndexReverse >();
- add< QueryPlanTests::NoOrder >();
- add< QueryPlanTests::EqualWithOrder >();
- add< QueryPlanTests::Optimal >();
- add< QueryPlanTests::MoreOptimal >();
- add< QueryPlanTests::KeyMatch >();
- add< QueryPlanTests::MoreKeyMatch >();
- add< QueryPlanTests::ExactKeyQueryTypes >();
- add< QueryPlanTests::Unhelpful >();
- add< QueryPlanSetTests::NoIndexes >();
- add< QueryPlanSetTests::Optimal >();
- add< QueryPlanSetTests::NoOptimal >();
- add< QueryPlanSetTests::NoSpec >();
- add< QueryPlanSetTests::HintSpec >();
- add< QueryPlanSetTests::HintName >();
- add< QueryPlanSetTests::NaturalHint >();
- add< QueryPlanSetTests::NaturalSort >();
- add< QueryPlanSetTests::BadHint >();
- add< QueryPlanSetTests::Count >();
- add< QueryPlanSetTests::QueryMissingNs >();
- add< QueryPlanSetTests::UnhelpfulIndex >();
- add< QueryPlanSetTests::SingleException >();
- add< QueryPlanSetTests::AllException >();
- add< QueryPlanSetTests::SaveGoodIndex >();
- add< QueryPlanSetTests::TryAllPlansOnErr >();
- add< QueryPlanSetTests::FindOne >();
- add< QueryPlanSetTests::Delete >();
- add< QueryPlanSetTests::DeleteOneScan >();
- add< QueryPlanSetTests::DeleteOneIndex >();
- add< QueryPlanSetTests::TryOtherPlansBeforeFinish >();
- add< QueryPlanSetTests::InQueryIntervals >();
- add< QueryPlanSetTests::EqualityThenIn >();
- add< QueryPlanSetTests::NotEqualityThenIn >();
- add< BestGuess >();
+ __forceLinkGeoPlugin();
+ add<QueryPlanTests::NoIndex>();
+ add<QueryPlanTests::SimpleOrder>();
+ add<QueryPlanTests::MoreIndexThanNeeded>();
+ add<QueryPlanTests::IndexSigns>();
+ add<QueryPlanTests::IndexReverse>();
+ add<QueryPlanTests::NoOrder>();
+ add<QueryPlanTests::EqualWithOrder>();
+ add<QueryPlanTests::Optimal>();
+ add<QueryPlanTests::MoreOptimal>();
+ add<QueryPlanTests::KeyMatch>();
+ add<QueryPlanTests::MoreKeyMatch>();
+ add<QueryPlanTests::ExactKeyQueryTypes>();
+ add<QueryPlanTests::Unhelpful>();
+ add<QueryPlanSetTests::NoIndexes>();
+ add<QueryPlanSetTests::Optimal>();
+ add<QueryPlanSetTests::NoOptimal>();
+ add<QueryPlanSetTests::NoSpec>();
+ add<QueryPlanSetTests::HintSpec>();
+ add<QueryPlanSetTests::HintName>();
+ add<QueryPlanSetTests::NaturalHint>();
+ add<QueryPlanSetTests::NaturalSort>();
+ add<QueryPlanSetTests::BadHint>();
+ add<QueryPlanSetTests::Count>();
+ add<QueryPlanSetTests::QueryMissingNs>();
+ add<QueryPlanSetTests::UnhelpfulIndex>();
+ add<QueryPlanSetTests::SingleException>();
+ add<QueryPlanSetTests::AllException>();
+ add<QueryPlanSetTests::SaveGoodIndex>();
+ add<QueryPlanSetTests::TryAllPlansOnErr>();
+ add<QueryPlanSetTests::FindOne>();
+ add<QueryPlanSetTests::Delete>();
+ add<QueryPlanSetTests::DeleteOneScan>();
+ add<QueryPlanSetTests::DeleteOneIndex>();
+ add<QueryPlanSetTests::TryOtherPlansBeforeFinish>();
+ add<QueryPlanSetTests::InQueryIntervals>();
+ add<QueryPlanSetTests::EqualityThenIn>();
+ add<QueryPlanSetTests::NotEqualityThenIn>();
+ add<BestGuess>();
+ add<QueryOptimizerCursorTests::Empty>();
+ add<QueryOptimizerCursorTests::Unindexed>();
+ add<QueryOptimizerCursorTests::Basic>();
+ add<QueryOptimizerCursorTests::NoMatch>();
+ add<QueryOptimizerCursorTests::Interleaved>();
+ add<QueryOptimizerCursorTests::NotMatch>();
+ add<QueryOptimizerCursorTests::StopInterleaving>();
+ add<QueryOptimizerCursorTests::TakeoverWithDup>();
+ add<QueryOptimizerCursorTests::TakeoverWithNonMatches>();
+ add<QueryOptimizerCursorTests::TakeoverWithTakeoverDup>();
+ add<QueryOptimizerCursorTests::BasicOr>();
+ add<QueryOptimizerCursorTests::OrFirstClauseEmpty>();
+ add<QueryOptimizerCursorTests::OrSecondClauseEmpty>();
+ add<QueryOptimizerCursorTests::OrMultipleClausesEmpty>();
+ add<QueryOptimizerCursorTests::TakeoverCountOr>();
+ add<QueryOptimizerCursorTests::TakeoverEndOfOrClause>();
+ add<QueryOptimizerCursorTests::TakeoverBeforeEndOfOrClause>();
+ add<QueryOptimizerCursorTests::TakeoverAfterEndOfOrClause>();
+ add<QueryOptimizerCursorTests::ManualMatchingDeduping>();
+ add<QueryOptimizerCursorTests::ManualMatchingUsingCurrKey>();
+ add<QueryOptimizerCursorTests::ManualMatchingDedupingTakeover>();
+ add<QueryOptimizerCursorTests::Singlekey>();
+ add<QueryOptimizerCursorTests::Multikey>();
+ add<QueryOptimizerCursorTests::AddOtherPlans>();
+ add<QueryOptimizerCursorTests::OrRangeElimination>();
+ add<QueryOptimizerCursorTests::OrDedup>();
+ add<QueryOptimizerCursorTests::EarlyDups>();
+ add<QueryOptimizerCursorTests::OrPopInTakeover>();
+ add<QueryOptimizerCursorTests::OrCollectionScanAbort>();
+ add<QueryOptimizerCursorTests::Geo>();
+ add<QueryOptimizerCursorTests::YieldNoOp>();
+ add<QueryOptimizerCursorTests::YieldDelete>();
+ add<QueryOptimizerCursorTests::YieldDeleteContinue>();
+ add<QueryOptimizerCursorTests::YieldDeleteContinueFurther>();
+ add<QueryOptimizerCursorTests::YieldUpdate>();
+ add<QueryOptimizerCursorTests::YieldDrop>();
+ add<QueryOptimizerCursorTests::YieldDropOr>();
+ add<QueryOptimizerCursorTests::YieldCappedOverwrite>();
+ add<QueryOptimizerCursorTests::YieldDropIndex>();
+ add<QueryOptimizerCursorTests::YieldMultiplePlansNoOp>();
+ add<QueryOptimizerCursorTests::YieldMultiplePlansAdvanceNoOp>();
+ add<QueryOptimizerCursorTests::YieldMultiplePlansDelete>();
+ add<QueryOptimizerCursorTests::YieldMultiplePlansCappedOverwrite>();
+ add<QueryOptimizerCursorTests::YieldMultiplePlansCappedOverwriteManual>();
+ add<QueryOptimizerCursorTests::YieldMultiplePlansCappedOverwriteManual2>();
+ add<QueryOptimizerCursorTests::TryYieldGeo>();
+ add<QueryOptimizerCursorTests::YieldTakeover>();
+ add<QueryOptimizerCursorTests::YieldTakeoverBasic>();
+ add<QueryOptimizerCursorTests::YieldInactiveCursorAdvance>();
+ add<QueryOptimizerCursorTests::OrderId>();
+ add<QueryOptimizerCursorTests::OrderMultiIndex>();
+ add<QueryOptimizerCursorTests::OrderReject>();
+ add<QueryOptimizerCursorTests::OrderNatural>();
+ add<QueryOptimizerCursorTests::OrderUnindexed>();
+ add<QueryOptimizerCursorTests::RecordedOrderInvalid>();
+ add<QueryOptimizerCursorTests::KillOp>();
+ add<QueryOptimizerCursorTests::KillOpFirstClause>();
+ add<QueryOptimizerCursorTests::Nscanned>();
+ add<QueryOptimizerCursorTests::GetCursor::NoConstraints>();
+ add<QueryOptimizerCursorTests::GetCursor::SimpleId>();
+ add<QueryOptimizerCursorTests::GetCursor::OptimalIndex>();
+ add<QueryOptimizerCursorTests::GetCursor::Geo>();
+ add<QueryOptimizerCursorTests::GetCursor::OutOfOrder>();
+ add<QueryOptimizerCursorTests::GetCursor::BestSavedOutOfOrder>();
+ add<QueryOptimizerCursorTests::GetCursor::MultiIndex>();
}
} myall;
diff --git a/dbtests/querytests.cpp b/dbtests/querytests.cpp
index d008e4d..694053b 100644
--- a/dbtests/querytests.cpp
+++ b/dbtests/querytests.cpp
@@ -18,9 +18,10 @@
*/
#include "pch.h"
-#include "../db/query.h"
+#include "../db/ops/query.h"
+#include "../db/dbhelpers.h"
+#include "../db/clientcursor.h"
-#include "../db/db.h"
#include "../db/instance.h"
#include "../db/json.h"
#include "../db/lasterror.h"
@@ -61,7 +62,7 @@ namespace QueryTests {
}
static void addIndex( const BSONObj &key ) {
BSONObjBuilder b;
- b.append( "name", key.firstElement().fieldName() );
+ b.append( "name", key.firstElementFieldName() );
b.append( "ns", ns() );
b.append( "key", key );
BSONObj o = b.done();
@@ -239,7 +240,7 @@ namespace QueryTests {
}
void run() {
const char *ns = "unittests.querytests.ReturnOneOfManyAndTail";
- client().createCollection( ns, 0, true );
+ client().createCollection( ns, 1024, true );
insert( ns, BSON( "a" << 0 ) );
insert( ns, BSON( "a" << 1 ) );
insert( ns, BSON( "a" << 2 ) );
@@ -258,7 +259,7 @@ namespace QueryTests {
}
void run() {
const char *ns = "unittests.querytests.TailNotAtEnd";
- client().createCollection( ns, 0, true );
+ client().createCollection( ns, 2047, true );
insert( ns, BSON( "a" << 0 ) );
insert( ns, BSON( "a" << 1 ) );
insert( ns, BSON( "a" << 2 ) );
@@ -283,7 +284,7 @@ namespace QueryTests {
}
void run() {
const char *ns = "unittests.querytests.EmptyTail";
- client().createCollection( ns, 0, true );
+ client().createCollection( ns, 1900, true );
auto_ptr< DBClientCursor > c = client().query( ns, Query().hint( BSON( "$natural" << 1 ) ), 2, 0, 0, QueryOption_CursorTailable );
ASSERT_EQUALS( 0, c->getCursorId() );
ASSERT( c->isDead() );
@@ -301,7 +302,7 @@ namespace QueryTests {
}
void run() {
const char *ns = "unittests.querytests.TailableDelete";
- client().createCollection( ns, 0, true, 2 );
+ client().createCollection( ns, 8192, true, 2 );
insert( ns, BSON( "a" << 0 ) );
insert( ns, BSON( "a" << 1 ) );
auto_ptr< DBClientCursor > c = client().query( ns, Query().hint( BSON( "$natural" << 1 ) ), 2, 0, 0, QueryOption_CursorTailable );
@@ -322,7 +323,7 @@ namespace QueryTests {
}
void run() {
const char *ns = "unittests.querytests.TailableInsertDelete";
- client().createCollection( ns, 0, true );
+ client().createCollection( ns, 1330, true );
insert( ns, BSON( "a" << 0 ) );
insert( ns, BSON( "a" << 1 ) );
auto_ptr< DBClientCursor > c = client().query( ns, Query().hint( BSON( "$natural" << 1 ) ), 2, 0, 0, QueryOption_CursorTailable );
@@ -356,23 +357,32 @@ namespace QueryTests {
~TailableQueryOnId() {
client().dropCollection( "unittests.querytests.TailableQueryOnId" );
}
+
+ void insertA(const char* ns, int a) {
+ BSONObjBuilder b;
+ b.appendOID("_id", 0, true);
+ b.appendOID("value", 0, true);
+ b.append("a", a);
+ insert(ns, b.obj());
+ }
+
void run() {
const char *ns = "unittests.querytests.TailableQueryOnId";
BSONObj info;
- client().runCommand( "unittests", BSON( "create" << "querytests.TailableQueryOnId" << "capped" << true << "autoIndexId" << true ), info );
- insert( ns, BSON( "a" << 0 ) );
- insert( ns, BSON( "a" << 1 ) );
+ client().runCommand( "unittests", BSON( "create" << "querytests.TailableQueryOnId" << "capped" << true << "size" << 8192 << "autoIndexId" << true ), info );
+ insertA( ns, 0 );
+ insertA( ns, 1 );
auto_ptr< DBClientCursor > c1 = client().query( ns, QUERY( "a" << GT << -1 ), 0, 0, 0, QueryOption_CursorTailable );
OID id;
id.init("000000000000000000000000");
- auto_ptr< DBClientCursor > c2 = client().query( ns, QUERY( "_id" << GT << id ), 0, 0, 0, QueryOption_CursorTailable );
+ auto_ptr< DBClientCursor > c2 = client().query( ns, QUERY( "value" << GT << id ), 0, 0, 0, QueryOption_CursorTailable );
c1->next();
c1->next();
ASSERT( !c1->more() );
c2->next();
c2->next();
ASSERT( !c2->more() );
- insert( ns, BSON( "a" << 2 ) );
+ insertA( ns, 2 );
ASSERT( c1->more() );
ASSERT_EQUALS( 2, c1->next().getIntField( "a" ) );
ASSERT( !c1->more() );
@@ -390,7 +400,6 @@ namespace QueryTests {
}
void run() {
const char *ns = "unittests.querytests.OplogReplayMode";
- insert( ns, BSON( "ts" << 3 ) );
insert( ns, BSON( "ts" << 0 ) );
insert( ns, BSON( "ts" << 1 ) );
insert( ns, BSON( "ts" << 2 ) );
@@ -398,6 +407,12 @@ namespace QueryTests {
ASSERT( c->more() );
ASSERT_EQUALS( 2, c->next().getIntField( "ts" ) );
ASSERT( !c->more() );
+
+ insert( ns, BSON( "ts" << 3 ) );
+ c = client().query( ns, QUERY( "ts" << GT << 1 ).hint( BSON( "$natural" << 1 ) ), 0, 0, 0, QueryOption_OplogReplay );
+ ASSERT( c->more() );
+ ASSERT_EQUALS( 2, c->next().getIntField( "ts" ) );
+ ASSERT( c->more() );
}
};
@@ -725,6 +740,90 @@ namespace QueryTests {
};
BSONObj MinMax::empty_;
+ class MatchCodeCodeWScope : public ClientBase {
+ public:
+ MatchCodeCodeWScope() : _ns( "unittests.querytests.MatchCodeCodeWScope" ) {}
+ ~MatchCodeCodeWScope() {
+ client().dropCollection( "unittests.querytests.MatchCodeCodeWScope" );
+ }
+ void run() {
+ checkMatch();
+ client().ensureIndex( _ns, BSON( "a" << 1 ) );
+ checkMatch();
+ // Use explain queries to check index bounds.
+ {
+ BSONObj explain = client().findOne( _ns, QUERY( "a" << BSON( "$type" << (int)Code ) ).explain() );
+ BSONObjBuilder lower;
+ lower.appendCode( "", "" );
+ BSONObjBuilder upper;
+ upper.appendCodeWScope( "", "", BSONObj() );
+ ASSERT( lower.done().firstElement().valuesEqual( explain[ "indexBounds" ].Obj()[ "a" ].Array()[ 0 ].Array()[ 0 ] ) );
+ ASSERT( upper.done().firstElement().valuesEqual( explain[ "indexBounds" ].Obj()[ "a" ].Array()[ 0 ].Array()[ 1 ] ) );
+ }
+ {
+ BSONObj explain = client().findOne( _ns, QUERY( "a" << BSON( "$type" << (int)CodeWScope ) ).explain() );
+ BSONObjBuilder lower;
+ lower.appendCodeWScope( "", "", BSONObj() );
+ // This upper bound may change if a new bson type is added.
+ BSONObjBuilder upper;
+ upper << "" << BSON( "$maxElement" << 1 );
+ ASSERT( lower.done().firstElement().valuesEqual( explain[ "indexBounds" ].Obj()[ "a" ].Array()[ 0 ].Array()[ 0 ] ) );
+ ASSERT( upper.done().firstElement().valuesEqual( explain[ "indexBounds" ].Obj()[ "a" ].Array()[ 0 ].Array()[ 1 ] ) );
+ }
+ }
+ private:
+ void checkMatch() {
+ client().remove( _ns, BSONObj() );
+
+ client().insert( _ns, code() );
+ client().insert( _ns, codeWScope() );
+
+ ASSERT_EQUALS( 1U, client().count( _ns, code() ) );
+ ASSERT_EQUALS( 1U, client().count( _ns, codeWScope() ) );
+
+ ASSERT_EQUALS( 1U, client().count( _ns, BSON( "a" << BSON( "$type" << (int)Code ) ) ) );
+ ASSERT_EQUALS( 1U, client().count( _ns, BSON( "a" << BSON( "$type" << (int)CodeWScope ) ) ) );
+ }
+ BSONObj code() const {
+ BSONObjBuilder codeBuilder;
+ codeBuilder.appendCode( "a", "return 1;" );
+ return codeBuilder.obj();
+ }
+ BSONObj codeWScope() const {
+ BSONObjBuilder codeWScopeBuilder;
+ codeWScopeBuilder.appendCodeWScope( "a", "return 1;", BSONObj() );
+ return codeWScopeBuilder.obj();
+ }
+ const char *_ns;
+ };
+
+ class MatchDBRefType : public ClientBase {
+ public:
+ MatchDBRefType() : _ns( "unittests.querytests.MatchDBRefType" ) {}
+ ~MatchDBRefType() {
+ client().dropCollection( "unittests.querytests.MatchDBRefType" );
+ }
+ void run() {
+ checkMatch();
+ client().ensureIndex( _ns, BSON( "a" << 1 ) );
+ checkMatch();
+ }
+ private:
+ void checkMatch() {
+ client().remove( _ns, BSONObj() );
+ client().insert( _ns, dbref() );
+ ASSERT_EQUALS( 1U, client().count( _ns, dbref() ) );
+ ASSERT_EQUALS( 1U, client().count( _ns, BSON( "a" << BSON( "$type" << (int)DBRef ) ) ) );
+ }
+ BSONObj dbref() const {
+ BSONObjBuilder b;
+ OID oid;
+ b.appendDBRef( "a", "ns", oid );
+ return b.obj();
+ }
+ const char *_ns;
+ };
+
class DirectLocking : public ClientBase {
public:
void run() {
@@ -850,10 +949,12 @@ namespace QueryTests {
writelock lk("");
Client::Context ctx( "unittests" );
+ // note that extents are always at least 4KB now - so this will get rounded up a bit.
ASSERT( userCreateNS( ns() , fromjson( "{ capped : true , size : 2000 }" ) , err , false ) );
- for ( int i=0; i<100; i++ ) {
+ for ( int i=0; i<200; i++ ) {
insertNext();
- ASSERT( count() < 45 );
+// cout << count() << endl;
+ ASSERT( count() < 90 );
}
int a = count();
@@ -870,7 +971,7 @@ namespace QueryTests {
insertNext();
ASSERT( c->more() );
- for ( int i=0; i<50; i++ ) {
+ for ( int i=0; i<90; i++ ) {
insertNext();
}
@@ -879,7 +980,10 @@ namespace QueryTests {
}
void insertNext() {
- insert( ns() , BSON( "i" << _n++ ) );
+ BSONObjBuilder b;
+ b.appendOID("_id", 0, true);
+ b.append("i", _n++);
+ insert( ns() , b.obj() );
}
int _n;
@@ -913,6 +1017,7 @@ namespace QueryTests {
unsigned long long slow , fast;
int n = 10000;
+ DEV n = 1000;
{
Timer t;
for ( int i=0; i<n; i++ ) {
@@ -986,7 +1091,7 @@ namespace QueryTests {
void run() {
BSONObj info;
- ASSERT( client().runCommand( "unittests", BSON( "create" << "querytests.findingstart" << "capped" << true << "size" << 1000 << "$nExtents" << 5 << "autoIndexId" << false ), info ) );
+ ASSERT( client().runCommand( "unittests", BSON( "create" << "querytests.findingstart" << "capped" << true << "$nExtents" << 5 << "autoIndexId" << false ), info ) );
int i = 0;
for( int oldCount = -1;
@@ -1003,6 +1108,7 @@ namespace QueryTests {
ASSERT( !next[ "ts" ].eoo() );
ASSERT_EQUALS( ( j > min ? j : min ), next[ "ts" ].numberInt() );
}
+ //cout << k << endl;
}
}
@@ -1023,7 +1129,7 @@ namespace QueryTests {
unsigned startNumCursors = ClientCursor::numCursors();
BSONObj info;
- ASSERT( client().runCommand( "unittests", BSON( "create" << "querytests.findingstart" << "capped" << true << "size" << 10000 << "$nExtents" << 5 << "autoIndexId" << false ), info ) );
+ ASSERT( client().runCommand( "unittests", BSON( "create" << "querytests.findingstart" << "capped" << true << "$nExtents" << 5 << "autoIndexId" << false ), info ) );
int i = 0;
for( ; i < 150; client().insert( ns(), BSON( "ts" << i++ ) ) );
@@ -1046,7 +1152,35 @@ namespace QueryTests {
private:
int _old;
};
+
+ /**
+ * Check OplogReplay mode where query timestamp is earlier than the earliest
+ * entry in the collection.
+ */
+ class FindingStartStale : public CollectionBase {
+ public:
+ FindingStartStale() : CollectionBase( "findingstart" ) {}
+
+ void run() {
+ unsigned startNumCursors = ClientCursor::numCursors();
+
+ BSONObj info;
+ ASSERT( client().runCommand( "unittests", BSON( "create" << "querytests.findingstart" << "capped" << true << "$nExtents" << 5 << "autoIndexId" << false ), info ) );
+
+ // Check OplogReplay mode with empty collection.
+ auto_ptr< DBClientCursor > c = client().query( ns(), QUERY( "ts" << GTE << 50 ), 0, 0, 0, QueryOption_OplogReplay );
+ ASSERT( !c->more() );
+ // Check with some docs in the collection.
+ for( int i = 100; i < 150; client().insert( ns(), BSON( "ts" << i++ ) ) );
+ c = client().query( ns(), QUERY( "ts" << GTE << 50 ), 0, 0, 0, QueryOption_OplogReplay );
+ ASSERT( c->more() );
+ ASSERT_EQUALS( 100, c->next()[ "ts" ].numberInt() );
+
+ // Check that no persistent cursors outlast our queries above.
+ ASSERT_EQUALS( startNumCursors, ClientCursor::numCursors() );
+ }
+ };
class WhatsMyUri : public CollectionBase {
public:
@@ -1217,6 +1351,7 @@ namespace QueryTests {
}
void setupTests() {
+ add< FindingStart >();
add< CountBasic >();
add< CountQuery >();
add< CountFields >();
@@ -1250,6 +1385,8 @@ namespace QueryTests {
add< IndexInsideArrayCorrect >();
add< SubobjArr >();
add< MinMax >();
+ add< MatchCodeCodeWScope >();
+ add< MatchDBRefType >();
add< DirectLocking >();
add< FastCountIn >();
add< EmbeddedArray >();
@@ -1258,8 +1395,8 @@ namespace QueryTests {
add< TailableCappedRaceCondition >();
add< HelperTest >();
add< HelperByIdTest >();
- add< FindingStart >();
add< FindingStartPartiallyFull >();
+ add< FindingStartStale >();
add< WhatsMyUri >();
add< parsedtests::basic1 >();
diff --git a/dbtests/queryutiltests.cpp b/dbtests/queryutiltests.cpp
new file mode 100644
index 0000000..e825b4f
--- /dev/null
+++ b/dbtests/queryutiltests.cpp
@@ -0,0 +1,989 @@
+// queryutiltests.cpp : query utility unit tests
+//
+
+/**
+ * Copyright (C) 2009 10gen Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include "pch.h"
+#include "../db/queryutil.h"
+#include "../db/querypattern.h"
+#include "../db/instance.h"
+#include "../db/pdfile.h"
+#include "dbtests.h"
+
+namespace QueryUtilTests {
+
+ namespace FieldRangeTests {
+ class Base {
+ public:
+ virtual ~Base() {}
+ void run() {
+ const FieldRangeSet s( "ns", query(), true );
+ checkElt( lower(), s.range( "a" ).min() );
+ checkElt( upper(), s.range( "a" ).max() );
+ ASSERT_EQUALS( lowerInclusive(), s.range( "a" ).minInclusive() );
+ ASSERT_EQUALS( upperInclusive(), s.range( "a" ).maxInclusive() );
+ }
+ protected:
+ virtual BSONObj query() = 0;
+ virtual BSONElement lower() { return minKey.firstElement(); }
+ virtual bool lowerInclusive() { return true; }
+ virtual BSONElement upper() { return maxKey.firstElement(); }
+ virtual bool upperInclusive() { return true; }
+ static void checkElt( BSONElement expected, BSONElement actual ) {
+ if ( expected.woCompare( actual, false ) ) {
+ log() << "expected: " << expected << ", got: " << actual;
+ ASSERT( false );
+ }
+ }
+ };
+
+
+ class NumericBase : public Base {
+ public:
+ NumericBase() {
+ o = BSON( "min" << -numeric_limits<double>::max() << "max" << numeric_limits<double>::max() );
+ }
+
+ virtual BSONElement lower() { return o["min"]; }
+ virtual BSONElement upper() { return o["max"]; }
+ private:
+ BSONObj o;
+ };
+
+ class Empty : public Base {
+ virtual BSONObj query() { return BSONObj(); }
+ };
+
+ class Eq : public Base {
+ public:
+ Eq() : o_( BSON( "a" << 1 ) ) {}
+ virtual BSONObj query() { return o_; }
+ virtual BSONElement lower() { return o_.firstElement(); }
+ virtual BSONElement upper() { return o_.firstElement(); }
+ BSONObj o_;
+ };
+
+ class DupEq : public Eq {
+ public:
+ virtual BSONObj query() { return BSON( "a" << 1 << "b" << 2 << "a" << 1 ); }
+ };
+
+ class Lt : public NumericBase {
+ public:
+ Lt() : o_( BSON( "-" << 1 ) ) {}
+ virtual BSONObj query() { return BSON( "a" << LT << 1 ); }
+ virtual BSONElement upper() { return o_.firstElement(); }
+ virtual bool upperInclusive() { return false; }
+ BSONObj o_;
+ };
+
+ class Lte : public Lt {
+ virtual BSONObj query() { return BSON( "a" << LTE << 1 ); }
+ virtual bool upperInclusive() { return true; }
+ };
+
+ class Gt : public NumericBase {
+ public:
+ Gt() : o_( BSON( "-" << 1 ) ) {}
+ virtual BSONObj query() { return BSON( "a" << GT << 1 ); }
+ virtual BSONElement lower() { return o_.firstElement(); }
+ virtual bool lowerInclusive() { return false; }
+ BSONObj o_;
+ };
+
+ class Gte : public Gt {
+ virtual BSONObj query() { return BSON( "a" << GTE << 1 ); }
+ virtual bool lowerInclusive() { return true; }
+ };
+
+ class TwoLt : public Lt {
+ virtual BSONObj query() { return BSON( "a" << LT << 1 << LT << 5 ); }
+ };
+
+ class TwoGt : public Gt {
+ virtual BSONObj query() { return BSON( "a" << GT << 0 << GT << 1 ); }
+ };
+
+ class EqGte : public Eq {
+ virtual BSONObj query() { return BSON( "a" << 1 << "a" << GTE << 1 ); }
+ };
+
+ class EqGteInvalid {
+ public:
+ void run() {
+ FieldRangeSet frs( "ns", BSON( "a" << 1 << "a" << GTE << 2 ), true );
+ ASSERT( !frs.matchPossible() );
+ }
+ };
+
+ struct RegexBase : Base {
+ void run() { //need to only look at first interval
+ FieldRangeSet s( "ns", query(), true );
+ checkElt( lower(), s.range( "a" ).intervals()[0]._lower._bound );
+ checkElt( upper(), s.range( "a" ).intervals()[0]._upper._bound );
+ ASSERT_EQUALS( lowerInclusive(), s.range( "a" ).intervals()[0]._lower._inclusive );
+ ASSERT_EQUALS( upperInclusive(), s.range( "a" ).intervals()[0]._upper._inclusive );
+ }
+ };
+
+ class Regex : public RegexBase {
+ public:
+ Regex() : o1_( BSON( "" << "abc" ) ), o2_( BSON( "" << "abd" ) ) {}
+ virtual BSONObj query() {
+ BSONObjBuilder b;
+ b.appendRegex( "a", "^abc" );
+ return b.obj();
+ }
+ virtual BSONElement lower() { return o1_.firstElement(); }
+ virtual BSONElement upper() { return o2_.firstElement(); }
+ virtual bool upperInclusive() { return false; }
+ BSONObj o1_, o2_;
+ };
+
+ class RegexObj : public RegexBase {
+ public:
+ RegexObj() : o1_( BSON( "" << "abc" ) ), o2_( BSON( "" << "abd" ) ) {}
+ virtual BSONObj query() { return BSON("a" << BSON("$regex" << "^abc")); }
+ virtual BSONElement lower() { return o1_.firstElement(); }
+ virtual BSONElement upper() { return o2_.firstElement(); }
+ virtual bool upperInclusive() { return false; }
+ BSONObj o1_, o2_;
+ };
+
+ class UnhelpfulRegex : public RegexBase {
+ public:
+ UnhelpfulRegex() {
+ BSONObjBuilder b;
+ b.appendMinForType("lower", String);
+ b.appendMaxForType("upper", String);
+ limits = b.obj();
+ }
+
+ virtual BSONObj query() {
+ BSONObjBuilder b;
+ b.appendRegex( "a", "abc" );
+ return b.obj();
+ }
+ virtual BSONElement lower() { return limits["lower"]; }
+ virtual BSONElement upper() { return limits["upper"]; }
+ virtual bool upperInclusive() { return false; }
+ BSONObj limits;
+ };
+
+ class In : public Base {
+ public:
+ In() : o1_( BSON( "-" << -3 ) ), o2_( BSON( "-" << 44 ) ) {}
+ virtual BSONObj query() {
+ vector< int > vals;
+ vals.push_back( 4 );
+ vals.push_back( 8 );
+ vals.push_back( 44 );
+ vals.push_back( -1 );
+ vals.push_back( -3 );
+ vals.push_back( 0 );
+ BSONObjBuilder bb;
+ bb.append( "$in", vals );
+ BSONObjBuilder b;
+ b.append( "a", bb.done() );
+ return b.obj();
+ }
+ virtual BSONElement lower() { return o1_.firstElement(); }
+ virtual BSONElement upper() { return o2_.firstElement(); }
+ BSONObj o1_, o2_;
+ };
+
+ class Equality {
+ public:
+ void run() {
+ FieldRangeSet s( "ns", BSON( "a" << 1 ), true );
+ ASSERT( s.range( "a" ).equality() );
+ FieldRangeSet s2( "ns", BSON( "a" << GTE << 1 << LTE << 1 ), true );
+ ASSERT( s2.range( "a" ).equality() );
+ FieldRangeSet s3( "ns", BSON( "a" << GT << 1 << LTE << 1 ), true );
+ ASSERT( !s3.range( "a" ).equality() );
+ FieldRangeSet s4( "ns", BSON( "a" << GTE << 1 << LT << 1 ), true );
+ ASSERT( !s4.range( "a" ).equality() );
+ FieldRangeSet s5( "ns", BSON( "a" << GTE << 1 << LTE << 1 << GT << 1 ), true );
+ ASSERT( !s5.range( "a" ).equality() );
+ FieldRangeSet s6( "ns", BSON( "a" << GTE << 1 << LTE << 1 << LT << 1 ), true );
+ ASSERT( !s6.range( "a" ).equality() );
+ }
+ };
+
+ class SimplifiedQuery {
+ public:
+ void run() {
+ FieldRangeSet frs( "ns", BSON( "a" << GT << 1 << GT << 5 << LT << 10 << "b" << 4 << "c" << LT << 4 << LT << 6 << "d" << GTE << 0 << GT << 0 << "e" << GTE << 0 << LTE << 10 ), true );
+ BSONObj simple = frs.simplifiedQuery();
+ cout << "simple: " << simple << endl;
+ ASSERT( !simple.getObjectField( "a" ).woCompare( fromjson( "{$gt:5,$lt:10}" ) ) );
+ ASSERT_EQUALS( 4, simple.getIntField( "b" ) );
+ ASSERT( !simple.getObjectField( "c" ).woCompare( BSON("$gte" << -numeric_limits<double>::max() << "$lt" << 4 ) ) );
+ ASSERT( !simple.getObjectField( "d" ).woCompare( BSON("$gt" << 0 << "$lte" << numeric_limits<double>::max() ) ) );
+ ASSERT( !simple.getObjectField( "e" ).woCompare( fromjson( "{$gte:0,$lte:10}" ) ) );
+ }
+ };
+
+ class QueryPatternTest {
+ public:
+ void run() {
+ ASSERT( p( BSON( "a" << 1 ) ) == p( BSON( "a" << 1 ) ) );
+ ASSERT( p( BSON( "a" << 1 ) ) == p( BSON( "a" << 5 ) ) );
+ ASSERT( p( BSON( "a" << 1 ) ) != p( BSON( "b" << 1 ) ) );
+ ASSERT( p( BSON( "a" << 1 ) ) != p( BSON( "a" << LTE << 1 ) ) );
+ ASSERT( p( BSON( "a" << 1 ) ) != p( BSON( "a" << 1 << "b" << 2 ) ) );
+ ASSERT( p( BSON( "a" << 1 << "b" << 3 ) ) != p( BSON( "a" << 1 ) ) );
+ ASSERT( p( BSON( "a" << LT << 1 ) ) == p( BSON( "a" << LTE << 5 ) ) );
+ ASSERT( p( BSON( "a" << LT << 1 << GTE << 0 ) ) == p( BSON( "a" << LTE << 5 << GTE << 0 ) ) );
+ ASSERT( p( BSON( "a" << 1 ) ) < p( BSON( "a" << 1 << "b" << 1 ) ) );
+ ASSERT( !( p( BSON( "a" << 1 << "b" << 1 ) ) < p( BSON( "a" << 1 ) ) ) );
+ ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 ) ) == p( BSON( "a" << 4 ), BSON( "b" << "a" ) ) );
+ ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 ) ) == p( BSON( "a" << 4 ), BSON( "b" << -1 ) ) );
+ ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 ) ) != p( BSON( "a" << 4 ), BSON( "c" << 1 ) ) );
+ ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 << "c" << -1 ) ) == p( BSON( "a" << 4 ), BSON( "b" << -1 << "c" << 1 ) ) );
+ ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 << "c" << 1 ) ) != p( BSON( "a" << 4 ), BSON( "b" << 1 ) ) );
+ ASSERT( p( BSON( "a" << 1 ), BSON( "b" << 1 ) ) != p( BSON( "a" << 4 ), BSON( "b" << 1 << "c" << 1 ) ) );
+ }
+ private:
+ static QueryPattern p( const BSONObj &query, const BSONObj &sort = BSONObj() ) {
+ return FieldRangeSet( "", query, true ).pattern( sort );
+ }
+ };
+
+ class NoWhere {
+ public:
+ void run() {
+ ASSERT_EQUALS( 0, FieldRangeSet( "ns", BSON( "$where" << 1 ), true ).nNontrivialRanges() );
+ }
+ };
+
+ class Numeric {
+ public:
+ void run() {
+ FieldRangeSet f( "", BSON( "a" << 1 ), true );
+ ASSERT( f.range( "a" ).min().woCompare( BSON( "a" << 2.0 ).firstElement() ) < 0 );
+ ASSERT( f.range( "a" ).min().woCompare( BSON( "a" << 0.0 ).firstElement() ) > 0 );
+ }
+ };
+
+ class InLowerBound {
+ public:
+ void run() {
+ FieldRangeSet f( "", fromjson( "{a:{$gt:4,$in:[1,2,3,4,5,6]}}" ), true );
+ ASSERT( f.range( "a" ).min().woCompare( BSON( "a" << 5.0 ).firstElement(), false ) == 0 );
+ ASSERT( f.range( "a" ).max().woCompare( BSON( "a" << 6.0 ).firstElement(), false ) == 0 );
+ }
+ };
+
+ class InUpperBound {
+ public:
+ void run() {
+ FieldRangeSet f( "", fromjson( "{a:{$lt:4,$in:[1,2,3,4,5,6]}}" ), true );
+ ASSERT( f.range( "a" ).min().woCompare( BSON( "a" << 1.0 ).firstElement(), false ) == 0 );
+ ASSERT( f.range( "a" ).max().woCompare( BSON( "a" << 3.0 ).firstElement(), false ) == 0 );
+ }
+ };
+
+ class UnionBound {
+ public:
+ void run() {
+ FieldRangeSet frs( "", fromjson( "{a:{$gt:1,$lt:9},b:{$gt:9,$lt:12}}" ), true );
+ FieldRange ret = frs.range( "a" );
+ ret |= frs.range( "b" );
+ ASSERT_EQUALS( 2U, ret.intervals().size() );
+ }
+ };
+
+ class MultiBound {
+ public:
+ void run() {
+ FieldRangeSet frs1( "", fromjson( "{a:{$in:[1,3,5,7,9]}}" ), true );
+ FieldRangeSet frs2( "", fromjson( "{a:{$in:[2,3,5,8,9]}}" ), true );
+ FieldRange fr1 = frs1.range( "a" );
+ FieldRange fr2 = frs2.range( "a" );
+ fr1 &= fr2;
+ ASSERT( fr1.min().woCompare( BSON( "a" << 3.0 ).firstElement(), false ) == 0 );
+ ASSERT( fr1.max().woCompare( BSON( "a" << 9.0 ).firstElement(), false ) == 0 );
+ vector< FieldInterval > intervals = fr1.intervals();
+ vector< FieldInterval >::const_iterator j = intervals.begin();
+ double expected[] = { 3, 5, 9 };
+ for( int i = 0; i < 3; ++i, ++j ) {
+ ASSERT_EQUALS( expected[ i ], j->_lower._bound.number() );
+ ASSERT( j->_lower._inclusive );
+ ASSERT( j->_lower == j->_upper );
+ }
+ ASSERT( j == intervals.end() );
+ }
+ };
+
+ class DiffBase {
+ public:
+ virtual ~DiffBase() {}
+ void run() {
+ FieldRangeSet frs( "", fromjson( obj().toString() ), true );
+ FieldRange ret = frs.range( "a" );
+ ret -= frs.range( "b" );
+ check( ret );
+ }
+ protected:
+ void check( const FieldRange &fr ) {
+ vector< FieldInterval > fi = fr.intervals();
+ ASSERT_EQUALS( len(), fi.size() );
+ int i = 0;
+ for( vector< FieldInterval >::const_iterator j = fi.begin(); j != fi.end(); ++j ) {
+ ASSERT_EQUALS( nums()[ i ], j->_lower._bound.numberInt() );
+ ASSERT_EQUALS( incs()[ i ], j->_lower._inclusive );
+ ++i;
+ ASSERT_EQUALS( nums()[ i ], j->_upper._bound.numberInt() );
+ ASSERT_EQUALS( incs()[ i ], j->_upper._inclusive );
+ ++i;
+ }
+ }
+ virtual unsigned len() const = 0;
+ virtual const int *nums() const = 0;
+ virtual const bool *incs() const = 0;
+ virtual BSONObj obj() const = 0;
+ };
+
+ class TwoRangeBase : public DiffBase {
+ public:
+ TwoRangeBase( string obj, int low, int high, bool lowI, bool highI )
+ : _obj( obj ) {
+ _n[ 0 ] = low;
+ _n[ 1 ] = high;
+ _b[ 0 ] = lowI;
+ _b[ 1 ] = highI;
+ }
+ private:
+ virtual unsigned len() const { return 1; }
+ virtual const int *nums() const { return _n; }
+ virtual const bool *incs() const { return _b; }
+ virtual BSONObj obj() const { return fromjson( _obj ); }
+ string _obj;
+ int _n[ 2 ];
+ bool _b[ 2 ];
+ };
+
+ struct Diff1 : public TwoRangeBase {
+ Diff1() : TwoRangeBase( "{a:{$gt:1,$lt:2},b:{$gt:3,$lt:4}}", 1, 2, false, false ) {}
+ };
+
+ struct Diff2 : public TwoRangeBase {
+ Diff2() : TwoRangeBase( "{a:{$gt:1,$lt:2},b:{$gt:2,$lt:4}}", 1, 2, false, false ) {}
+ };
+
+ struct Diff3 : public TwoRangeBase {
+ Diff3() : TwoRangeBase( "{a:{$gt:1,$lte:2},b:{$gt:2,$lt:4}}", 1, 2, false, true ) {}
+ };
+
+ struct Diff4 : public TwoRangeBase {
+ Diff4() : TwoRangeBase( "{a:{$gt:1,$lt:2},b:{$gte:2,$lt:4}}", 1, 2, false, false) {}
+ };
+
+ struct Diff5 : public TwoRangeBase {
+ Diff5() : TwoRangeBase( "{a:{$gt:1,$lte:2},b:{$gte:2,$lt:4}}", 1, 2, false, false) {}
+ };
+
+ struct Diff6 : public TwoRangeBase {
+ Diff6() : TwoRangeBase( "{a:{$gt:1,$lte:3},b:{$gte:2,$lt:4}}", 1, 2, false, false) {}
+ };
+
+ struct Diff7 : public TwoRangeBase {
+ Diff7() : TwoRangeBase( "{a:{$gt:1,$lte:3},b:{$gt:2,$lt:4}}", 1, 2, false, true) {}
+ };
+
+ struct Diff8 : public TwoRangeBase {
+ Diff8() : TwoRangeBase( "{a:{$gt:1,$lt:4},b:{$gt:2,$lt:4}}", 1, 2, false, true) {}
+ };
+
+ struct Diff9 : public TwoRangeBase {
+ Diff9() : TwoRangeBase( "{a:{$gt:1,$lt:4},b:{$gt:2,$lte:4}}", 1, 2, false, true) {}
+ };
+
+ struct Diff10 : public TwoRangeBase {
+ Diff10() : TwoRangeBase( "{a:{$gt:1,$lte:4},b:{$gt:2,$lte:4}}", 1, 2, false, true) {}
+ };
+
+ class SplitRangeBase : public DiffBase {
+ public:
+ SplitRangeBase( string obj, int low1, bool low1I, int high1, bool high1I, int low2, bool low2I, int high2, bool high2I )
+ : _obj( obj ) {
+ _n[ 0 ] = low1;
+ _n[ 1 ] = high1;
+ _n[ 2 ] = low2;
+ _n[ 3 ] = high2;
+ _b[ 0 ] = low1I;
+ _b[ 1 ] = high1I;
+ _b[ 2 ] = low2I;
+ _b[ 3 ] = high2I;
+ }
+ private:
+ virtual unsigned len() const { return 2; }
+ virtual const int *nums() const { return _n; }
+ virtual const bool *incs() const { return _b; }
+ virtual BSONObj obj() const { return fromjson( _obj ); }
+ string _obj;
+ int _n[ 4 ];
+ bool _b[ 4 ];
+ };
+
+ struct Diff11 : public SplitRangeBase {
+ Diff11() : SplitRangeBase( "{a:{$gt:1,$lte:4},b:{$gt:2,$lt:4}}", 1, false, 2, true, 4, true, 4, true) {}
+ };
+
+ struct Diff12 : public SplitRangeBase {
+ Diff12() : SplitRangeBase( "{a:{$gt:1,$lt:5},b:{$gt:2,$lt:4}}", 1, false, 2, true, 4, true, 5, false) {}
+ };
+
+ struct Diff13 : public TwoRangeBase {
+ Diff13() : TwoRangeBase( "{a:{$gt:1,$lt:5},b:{$gt:1,$lt:4}}", 4, 5, true, false) {}
+ };
+
+ struct Diff14 : public SplitRangeBase {
+ Diff14() : SplitRangeBase( "{a:{$gte:1,$lt:5},b:{$gt:1,$lt:4}}", 1, true, 1, true, 4, true, 5, false) {}
+ };
+
+ struct Diff15 : public TwoRangeBase {
+ Diff15() : TwoRangeBase( "{a:{$gt:1,$lt:5},b:{$gte:1,$lt:4}}", 4, 5, true, false) {}
+ };
+
+ struct Diff16 : public TwoRangeBase {
+ Diff16() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:{$gte:1,$lt:4}}", 4, 5, true, false) {}
+ };
+
+ struct Diff17 : public TwoRangeBase {
+ Diff17() : TwoRangeBase( "{a:{$gt:1,$lt:5},b:{$gt:0,$lt:4}}", 4, 5, true, false) {}
+ };
+
+ struct Diff18 : public TwoRangeBase {
+ Diff18() : TwoRangeBase( "{a:{$gt:1,$lt:5},b:{$gt:0,$lte:4}}", 4, 5, false, false) {}
+ };
+
+ struct Diff19 : public TwoRangeBase {
+ Diff19() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gte:0,$lte:1}}", 1, 5, false, true) {}
+ };
+
+ struct Diff20 : public TwoRangeBase {
+ Diff20() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:{$gte:0,$lte:1}}", 1, 5, false, true) {}
+ };
+
+ struct Diff21 : public TwoRangeBase {
+ Diff21() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gte:0,$lt:1}}", 1, 5, true, true) {}
+ };
+
+ struct Diff22 : public TwoRangeBase {
+ Diff22() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:{$gte:0,$lt:1}}", 1, 5, false, true) {}
+ };
+
+ struct Diff23 : public TwoRangeBase {
+ Diff23() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:{$gte:0,$lt:0.5}}", 1, 5, false, true) {}
+ };
+
+ struct Diff24 : public TwoRangeBase {
+ Diff24() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:0}", 1, 5, false, true) {}
+ };
+
+ struct Diff25 : public TwoRangeBase {
+ Diff25() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:0}", 1, 5, true, true) {}
+ };
+
+ struct Diff26 : public TwoRangeBase {
+ Diff26() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:1}", 1, 5, false, true) {}
+ };
+
+ struct Diff27 : public TwoRangeBase {
+ Diff27() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:1}", 1, 5, false, true) {}
+ };
+
+ struct Diff28 : public SplitRangeBase {
+ Diff28() : SplitRangeBase( "{a:{$gte:1,$lte:5},b:3}", 1, true, 3, false, 3, false, 5, true) {}
+ };
+
+ struct Diff29 : public TwoRangeBase {
+ Diff29() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:5}", 1, 5, true, false) {}
+ };
+
+ struct Diff30 : public TwoRangeBase {
+ Diff30() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:5}", 1, 5, true, false) {}
+ };
+
+ struct Diff31 : public TwoRangeBase {
+ Diff31() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:6}", 1, 5, true, false) {}
+ };
+
+ struct Diff32 : public TwoRangeBase {
+ Diff32() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:6}", 1, 5, true, true) {}
+ };
+
+ class EmptyBase : public DiffBase {
+ public:
+ EmptyBase( string obj )
+ : _obj( obj ) {}
+ private:
+ virtual unsigned len() const { return 0; }
+ virtual const int *nums() const { return 0; }
+ virtual const bool *incs() const { return 0; }
+ virtual BSONObj obj() const { return fromjson( _obj ); }
+ string _obj;
+ };
+
+ struct Diff33 : public EmptyBase {
+ Diff33() : EmptyBase( "{a:{$gte:1,$lte:5},b:{$gt:0,$lt:6}}" ) {}
+ };
+
+ struct Diff34 : public EmptyBase {
+ Diff34() : EmptyBase( "{a:{$gte:1,$lte:5},b:{$gte:1,$lt:6}}" ) {}
+ };
+
+ struct Diff35 : public EmptyBase {
+ Diff35() : EmptyBase( "{a:{$gt:1,$lte:5},b:{$gte:1,$lt:6}}" ) {}
+ };
+
+ struct Diff36 : public EmptyBase {
+ Diff36() : EmptyBase( "{a:{$gt:1,$lte:5},b:{$gt:1,$lt:6}}" ) {}
+ };
+
+ struct Diff37 : public TwoRangeBase {
+ Diff37() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gt:1,$lt:6}}", 1, 1, true, true ) {}
+ };
+
+ struct Diff38 : public EmptyBase {
+ Diff38() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gt:0,$lt:5}}" ) {}
+ };
+
+ struct Diff39 : public EmptyBase {
+ Diff39() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gt:0,$lte:5}}" ) {}
+ };
+
+ struct Diff40 : public EmptyBase {
+ Diff40() : EmptyBase( "{a:{$gt:1,$lte:5},b:{$gt:0,$lte:5}}" ) {}
+ };
+
+ struct Diff41 : public TwoRangeBase {
+ Diff41() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gt:0,$lt:5}}", 5, 5, true, true ) {}
+ };
+
+ struct Diff42 : public EmptyBase {
+ Diff42() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gt:1,$lt:5}}" ) {}
+ };
+
+ struct Diff43 : public EmptyBase {
+ Diff43() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gt:1,$lte:5}}" ) {}
+ };
+
+ struct Diff44 : public EmptyBase {
+ Diff44() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gte:1,$lt:5}}" ) {}
+ };
+
+ struct Diff45 : public EmptyBase {
+ Diff45() : EmptyBase( "{a:{$gt:1,$lt:5},b:{$gte:1,$lte:5}}" ) {}
+ };
+
+ struct Diff46 : public TwoRangeBase {
+ Diff46() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:{$gt:1,$lt:5}}", 5, 5, true, true ) {}
+ };
+
+ struct Diff47 : public EmptyBase {
+ Diff47() : EmptyBase( "{a:{$gt:1,$lte:5},b:{$gt:1,$lte:5}}" ) {}
+ };
+
+ struct Diff48 : public TwoRangeBase {
+ Diff48() : TwoRangeBase( "{a:{$gt:1,$lte:5},b:{$gte:1,$lt:5}}", 5, 5, true, true ) {}
+ };
+
+ struct Diff49 : public EmptyBase {
+ Diff49() : EmptyBase( "{a:{$gt:1,$lte:5},b:{$gte:1,$lte:5}}" ) {}
+ };
+
+ struct Diff50 : public TwoRangeBase {
+ Diff50() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:{$gt:1,$lt:5}}", 1, 1, true, true ) {}
+ };
+
+ struct Diff51 : public TwoRangeBase {
+ Diff51() : TwoRangeBase( "{a:{$gte:1,$lt:5},b:{$gt:1,$lte:5}}", 1, 1, true, true ) {}
+ };
+
+ struct Diff52 : public EmptyBase {
+ Diff52() : EmptyBase( "{a:{$gte:1,$lt:5},b:{$gte:1,$lt:5}}" ) {}
+ };
+
+ struct Diff53 : public EmptyBase {
+ Diff53() : EmptyBase( "{a:{$gte:1,$lt:5},b:{$gte:1,$lte:5}}" ) {}
+ };
+
+ struct Diff54 : public SplitRangeBase {
+ Diff54() : SplitRangeBase( "{a:{$gte:1,$lte:5},b:{$gt:1,$lt:5}}", 1, true, 1, true, 5, true, 5, true ) {}
+ };
+
+ struct Diff55 : public TwoRangeBase {
+ Diff55() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gt:1,$lte:5}}", 1, 1, true, true ) {}
+ };
+
+ struct Diff56 : public TwoRangeBase {
+ Diff56() : TwoRangeBase( "{a:{$gte:1,$lte:5},b:{$gte:1,$lt:5}}", 5, 5, true, true ) {}
+ };
+
+ struct Diff57 : public EmptyBase {
+ Diff57() : EmptyBase( "{a:{$gte:1,$lte:5},b:{$gte:1,$lte:5}}" ) {}
+ };
+
+ struct Diff58 : public TwoRangeBase {
+ Diff58() : TwoRangeBase( "{a:1,b:{$gt:1,$lt:5}}", 1, 1, true, true ) {}
+ };
+
+ struct Diff59 : public EmptyBase {
+ Diff59() : EmptyBase( "{a:1,b:{$gte:1,$lt:5}}" ) {}
+ };
+
+ struct Diff60 : public EmptyBase {
+ Diff60() : EmptyBase( "{a:2,b:{$gte:1,$lt:5}}" ) {}
+ };
+
+ struct Diff61 : public EmptyBase {
+ Diff61() : EmptyBase( "{a:5,b:{$gte:1,$lte:5}}" ) {}
+ };
+
+ struct Diff62 : public TwoRangeBase {
+ Diff62() : TwoRangeBase( "{a:5,b:{$gt:1,$lt:5}}", 5, 5, true, true ) {}
+ };
+
+ struct Diff63 : public EmptyBase {
+ Diff63() : EmptyBase( "{a:5,b:5}" ) {}
+ };
+
+ struct Diff64 : public TwoRangeBase {
+ Diff64() : TwoRangeBase( "{a:{$gte:1,$lte:2},b:{$gt:0,$lte:1}}", 1, 2, false, true ) {}
+ };
+
+ class DiffMulti1 : public DiffBase {
+ public:
+ void run() {
+ FieldRangeSet frs( "", fromjson( "{a:{$gt:1,$lt:9},b:{$gt:0,$lt:2},c:3,d:{$gt:4,$lt:5},e:{$gt:7,$lt:10}}" ), true );
+ FieldRange ret = frs.range( "a" );
+ FieldRange other = frs.range( "b" );
+ other |= frs.range( "c" );
+ other |= frs.range( "d" );
+ other |= frs.range( "e" );
+ ret -= other;
+ check( ret );
+ }
+ protected:
+ virtual unsigned len() const { return 3; }
+ virtual const int *nums() const { static int n[] = { 2, 3, 3, 4, 5, 7 }; return n; }
+ virtual const bool *incs() const { static bool b[] = { true, false, false, true, true, true }; return b; }
+ virtual BSONObj obj() const { return BSONObj(); }
+ };
+
+ class DiffMulti2 : public DiffBase {
+ public:
+ void run() {
+ FieldRangeSet frs( "", fromjson( "{a:{$gt:1,$lt:9},b:{$gt:0,$lt:2},c:3,d:{$gt:4,$lt:5},e:{$gt:7,$lt:10}}" ), true );
+ FieldRange mask = frs.range( "a" );
+ FieldRange ret = frs.range( "b" );
+ ret |= frs.range( "c" );
+ ret |= frs.range( "d" );
+ ret |= frs.range( "e" );
+ ret -= mask;
+ check( ret );
+ }
+ protected:
+ virtual unsigned len() const { return 2; }
+ virtual const int *nums() const { static int n[] = { 0, 1, 9, 10 }; return n; }
+ virtual const bool *incs() const { static bool b[] = { false, true, true, false }; return b; }
+ virtual BSONObj obj() const { return BSONObj(); }
+ };
+
+ } // namespace FieldRangeTests
+
+ namespace FieldRangeSetTests {
+
+ class Intersect {
+ public:
+ void run() {
+ FieldRangeSet frs1( "", fromjson( "{b:{$in:[5,6]},c:7,d:{$in:[8,9]}}" ), true );
+ FieldRangeSet frs2( "", fromjson( "{a:1,b:5,c:{$in:[7,8]},d:{$in:[8,9]},e:10}" ), true );
+ frs1 &= frs2;
+ ASSERT_EQUALS( fromjson( "{a:1,b:5,c:7,d:{$gte:8,$lte:9},e:10}" ), frs1.simplifiedQuery( BSONObj() ) );
+ }
+ };
+
+ class MultiKeyIntersect {
+ public:
+ void run() {
+ FieldRangeSet frs1( "", BSONObj(), false );
+ FieldRangeSet frs2( "", BSON( "a" << GT << 4 ), false );
+ FieldRangeSet frs3( "", BSON( "a" << LT << 6 ), false );
+ // An intersection with a trivial range is allowed.
+ frs1 &= frs2;
+ ASSERT_EQUALS( frs2.simplifiedQuery( BSONObj() ), frs1.simplifiedQuery( BSONObj() ) );
+ // An intersection with a nontrivial range is not allowed, as it might prevent a valid
+ // multikey match.
+ frs1 &= frs3;
+ ASSERT_EQUALS( frs2.simplifiedQuery( BSONObj() ), frs1.simplifiedQuery( BSONObj() ) );
+ // Now intersect with a fully contained range.
+ FieldRangeSet frs4( "", BSON( "a" << GT << 6 ), false );
+ frs1 &= frs4;
+ ASSERT_EQUALS( frs4.simplifiedQuery( BSONObj() ), frs1.simplifiedQuery( BSONObj() ) );
+ }
+ };
+
+ class MultiKeyDiff {
+ public:
+ void run() {
+ FieldRangeSet frs1( "", BSON( "a" << GT << 4 ), false );
+ FieldRangeSet frs2( "", BSON( "a" << GT << 6 ), false );
+ // Range subtraction is no different for multikey ranges.
+ frs1 -= frs2;
+ ASSERT_EQUALS( BSON( "a" << GT << 4 << LTE << 6 ), frs1.simplifiedQuery( BSONObj() ) );
+ }
+ };
+
+ class MatchPossible {
+ public:
+ void run() {
+ FieldRangeSet frs1( "", BSON( "a" << GT << 4 ), true );
+ ASSERT( frs1.matchPossible() );
+ // Conflicting constraints invalid for a single key set.
+ FieldRangeSet frs2( "", BSON( "a" << GT << 4 << LT << 2 ), true );
+ ASSERT( !frs2.matchPossible() );
+ // Conflicting constraints not possible for a multi key set.
+ FieldRangeSet frs3( "", BSON( "a" << GT << 4 << LT << 2 ), false );
+ ASSERT( frs3.matchPossible() );
+ }
+ };
+
+ class MatchPossibleForIndex {
+ public:
+ void run() {
+ // Conflicting constraints not possible for a multi key set.
+ FieldRangeSet frs1( "", BSON( "a" << GT << 4 << LT << 2 ), false );
+ ASSERT( frs1.matchPossibleForIndex( BSON( "a" << 1 ) ) );
+ // Conflicting constraints for a multi key set.
+ FieldRangeSet frs2( "", BSON( "a" << GT << 4 << LT << 2 ), true );
+ ASSERT( !frs2.matchPossibleForIndex( BSON( "a" << 1 ) ) );
+ // If the index doesn't include the key, it is not single key invalid.
+ ASSERT( frs2.matchPossibleForIndex( BSON( "b" << 1 ) ) );
+ // If the index key is not an index, the set is not single key invalid.
+ ASSERT( frs2.matchPossibleForIndex( BSON( "$natural" << 1 ) ) );
+ ASSERT( frs2.matchPossibleForIndex( BSONObj() ) );
+ }
+ };
+
+ } // namespace FieldRangeSetTests
+
+ namespace FieldRangeSetPairTests {
+
+ class NoNontrivialRanges {
+ public:
+ void run() {
+ FieldRangeSetPair frsp1( "", BSONObj() );
+ ASSERT( frsp1.noNontrivialRanges() );
+ FieldRangeSetPair frsp2( "", BSON( "a" << 1 ) );
+ ASSERT( !frsp2.noNontrivialRanges() );
+ FieldRangeSetPair frsp3( "", BSON( "a" << GT << 1 ) );
+ ASSERT( !frsp3.noNontrivialRanges() );
+ // A single key invalid constraint is still nontrivial.
+ FieldRangeSetPair frsp4( "", BSON( "a" << GT << 1 << LT << 0 ) );
+ ASSERT( !frsp4.noNontrivialRanges() );
+ // Still nontrivial if multikey invalid.
+ frsp4 -= frsp4.frsForIndex( 0, -1 );
+ ASSERT( !frsp4.noNontrivialRanges() );
+ }
+ };
+
+ class MatchPossible {
+ public:
+ void run() {
+ // Match possible for simple query.
+ FieldRangeSetPair frsp1( "", BSON( "a" << 1 ) );
+ ASSERT( frsp1.matchPossible() );
+ // Match possible for single key invalid query.
+ FieldRangeSetPair frsp2( "", BSON( "a" << GT << 1 << LT << 0 ) );
+ ASSERT( frsp2.matchPossible() );
+ // Match not possible for multi key invalid query.
+ frsp1 -= frsp1.frsForIndex( 0, - 1 );
+ ASSERT( !frsp1.matchPossible() );
+ }
+ };
+
+ class IndexBase {
+ public:
+ IndexBase() : _ctx( ns() ) , indexNum_( 0 ) {
+ string err;
+ userCreateNS( ns(), BSONObj(), err, false );
+ }
+ ~IndexBase() {
+ if ( !nsd() )
+ return;
+ string s( ns() );
+ dropNS( s );
+ }
+ protected:
+ static const char *ns() { return "unittests.FieldRangeSetPairTests"; }
+ static NamespaceDetails *nsd() { return nsdetails( ns() ); }
+ IndexDetails *index( const BSONObj &key ) {
+ stringstream ss;
+ ss << indexNum_++;
+ string name = ss.str();
+ client_.resetIndexCache();
+ client_.ensureIndex( ns(), key, false, name.c_str() );
+ NamespaceDetails *d = nsd();
+ for( int i = 0; i < d->nIndexes; ++i ) {
+ if ( d->idx(i).keyPattern() == key /*indexName() == name*/ || ( d->idx(i).isIdIndex() && IndexDetails::isIdIndexPattern( key ) ) )
+ return &d->idx(i);
+ }
+ assert( false );
+ return 0;
+ }
+ int indexno( const BSONObj &key ) {
+ return nsd()->idxNo( *index(key) );
+ }
+ static DBDirectClient client_;
+ private:
+ dblock lk_;
+ Client::Context _ctx;
+ int indexNum_;
+ };
+ DBDirectClient IndexBase::client_;
+
+ class MatchPossibleForIndex : public IndexBase {
+ public:
+ void run() {
+ int a = indexno( BSON( "a" << 1 ) );
+ int b = indexno( BSON( "b" << 1 ) );
+ IndexBase::client_.insert( ns(), BSON( "a" << BSON_ARRAY( 1 << 2 ) << "b" << 1 ) );
+ // Valid ranges match possible for both indexes.
+ FieldRangeSetPair frsp1( ns(), BSON( "a" << GT << 1 << LT << 4 << "b" << GT << 1 << LT << 4 ) );
+ ASSERT( frsp1.matchPossibleForIndex( nsd(), a, BSON( "a" << 1 ) ) );
+ ASSERT( frsp1.matchPossibleForIndex( nsd(), b, BSON( "b" << 1 ) ) );
+ // Single key invalid range means match impossible for single key index.
+ FieldRangeSetPair frsp2( ns(), BSON( "a" << GT << 4 << LT << 1 << "b" << GT << 4 << LT << 1 ) );
+ ASSERT( frsp2.matchPossibleForIndex( nsd(), a, BSON( "a" << 1 ) ) );
+ ASSERT( !frsp2.matchPossibleForIndex( nsd(), b, BSON( "b" << 1 ) ) );
+ }
+ };
+
+ } // namespace FieldRangeSetPairTests
+
+ class All : public Suite {
+ public:
+ All() : Suite( "queryutil" ) {}
+
+ void setupTests() {
+ add< FieldRangeTests::Empty >();
+ add< FieldRangeTests::Eq >();
+ add< FieldRangeTests::DupEq >();
+ add< FieldRangeTests::Lt >();
+ add< FieldRangeTests::Lte >();
+ add< FieldRangeTests::Gt >();
+ add< FieldRangeTests::Gte >();
+ add< FieldRangeTests::TwoLt >();
+ add< FieldRangeTests::TwoGt >();
+ add< FieldRangeTests::EqGte >();
+ add< FieldRangeTests::EqGteInvalid >();
+ add< FieldRangeTests::Regex >();
+ add< FieldRangeTests::RegexObj >();
+ add< FieldRangeTests::UnhelpfulRegex >();
+ add< FieldRangeTests::In >();
+ add< FieldRangeTests::Equality >();
+ add< FieldRangeTests::SimplifiedQuery >();
+ add< FieldRangeTests::QueryPatternTest >();
+ add< FieldRangeTests::NoWhere >();
+ add< FieldRangeTests::Numeric >();
+ add< FieldRangeTests::InLowerBound >();
+ add< FieldRangeTests::InUpperBound >();
+ add< FieldRangeTests::UnionBound >();
+ add< FieldRangeTests::MultiBound >();
+ add< FieldRangeTests::Diff1 >();
+ add< FieldRangeTests::Diff2 >();
+ add< FieldRangeTests::Diff3 >();
+ add< FieldRangeTests::Diff4 >();
+ add< FieldRangeTests::Diff5 >();
+ add< FieldRangeTests::Diff6 >();
+ add< FieldRangeTests::Diff7 >();
+ add< FieldRangeTests::Diff8 >();
+ add< FieldRangeTests::Diff9 >();
+ add< FieldRangeTests::Diff10 >();
+ add< FieldRangeTests::Diff11 >();
+ add< FieldRangeTests::Diff12 >();
+ add< FieldRangeTests::Diff13 >();
+ add< FieldRangeTests::Diff14 >();
+ add< FieldRangeTests::Diff15 >();
+ add< FieldRangeTests::Diff16 >();
+ add< FieldRangeTests::Diff17 >();
+ add< FieldRangeTests::Diff18 >();
+ add< FieldRangeTests::Diff19 >();
+ add< FieldRangeTests::Diff20 >();
+ add< FieldRangeTests::Diff21 >();
+ add< FieldRangeTests::Diff22 >();
+ add< FieldRangeTests::Diff23 >();
+ add< FieldRangeTests::Diff24 >();
+ add< FieldRangeTests::Diff25 >();
+ add< FieldRangeTests::Diff26 >();
+ add< FieldRangeTests::Diff27 >();
+ add< FieldRangeTests::Diff28 >();
+ add< FieldRangeTests::Diff29 >();
+ add< FieldRangeTests::Diff30 >();
+ add< FieldRangeTests::Diff31 >();
+ add< FieldRangeTests::Diff32 >();
+ add< FieldRangeTests::Diff33 >();
+ add< FieldRangeTests::Diff34 >();
+ add< FieldRangeTests::Diff35 >();
+ add< FieldRangeTests::Diff36 >();
+ add< FieldRangeTests::Diff37 >();
+ add< FieldRangeTests::Diff38 >();
+ add< FieldRangeTests::Diff39 >();
+ add< FieldRangeTests::Diff40 >();
+ add< FieldRangeTests::Diff41 >();
+ add< FieldRangeTests::Diff42 >();
+ add< FieldRangeTests::Diff43 >();
+ add< FieldRangeTests::Diff44 >();
+ add< FieldRangeTests::Diff45 >();
+ add< FieldRangeTests::Diff46 >();
+ add< FieldRangeTests::Diff47 >();
+ add< FieldRangeTests::Diff48 >();
+ add< FieldRangeTests::Diff49 >();
+ add< FieldRangeTests::Diff50 >();
+ add< FieldRangeTests::Diff51 >();
+ add< FieldRangeTests::Diff52 >();
+ add< FieldRangeTests::Diff53 >();
+ add< FieldRangeTests::Diff54 >();
+ add< FieldRangeTests::Diff55 >();
+ add< FieldRangeTests::Diff56 >();
+ add< FieldRangeTests::Diff57 >();
+ add< FieldRangeTests::Diff58 >();
+ add< FieldRangeTests::Diff59 >();
+ add< FieldRangeTests::Diff60 >();
+ add< FieldRangeTests::Diff61 >();
+ add< FieldRangeTests::Diff62 >();
+ add< FieldRangeTests::Diff63 >();
+ add< FieldRangeTests::Diff64 >();
+ add< FieldRangeTests::DiffMulti1 >();
+ add< FieldRangeTests::DiffMulti2 >();
+ add< FieldRangeSetTests::Intersect >();
+ add< FieldRangeSetTests::MultiKeyIntersect >();
+ add< FieldRangeSetTests::MultiKeyDiff >();
+ add< FieldRangeSetTests::MatchPossible >();
+ add< FieldRangeSetTests::MatchPossibleForIndex >();
+ add< FieldRangeSetPairTests::NoNontrivialRanges >();
+ add< FieldRangeSetPairTests::MatchPossible >();
+ add< FieldRangeSetPairTests::MatchPossibleForIndex >();
+ }
+ } myall;
+
+} // namespace QueryUtilTests
+
diff --git a/dbtests/repltests.cpp b/dbtests/repltests.cpp
index c6ffba2..0b53d36 100644
--- a/dbtests/repltests.cpp
+++ b/dbtests/repltests.cpp
@@ -25,6 +25,8 @@
#include "../db/json.h"
#include "dbtests.h"
+#include "../db/oplog.h"
+#include "../db/queryoptimizer.h"
namespace mongo {
void createOplog();
@@ -1012,120 +1014,94 @@ namespace ReplTests {
ASSERT( !one( BSON( "_id" << 2 ) ).isEmpty() );
}
};
-
- class DbIdsTest {
+
+ class DatabaseIgnorerBasic {
public:
void run() {
- Client::Context ctx( "unittests.repltest.DbIdsTest" );
-
- s_.reset( new DbIds( "local.temp.DbIdsTest" ) );
- s_->reset();
- check( false, false, false );
-
- s_->set( "a", BSON( "_id" << 4 ), true );
- check( true, false, false );
- s_->set( "a", BSON( "_id" << 4 ), false );
- check( false, false, false );
-
- s_->set( "b", BSON( "_id" << 4 ), true );
- check( false, true, false );
- s_->set( "b", BSON( "_id" << 4 ), false );
- check( false, false, false );
-
- s_->set( "a", BSON( "_id" << 5 ), true );
- check( false, false, true );
- s_->set( "a", BSON( "_id" << 5 ), false );
- check( false, false, false );
-
- s_->set( "a", BSON( "_id" << 4 ), true );
- s_->set( "b", BSON( "_id" << 4 ), true );
- s_->set( "a", BSON( "_id" << 5 ), true );
- check( true, true, true );
-
- s_->reset();
- check( false, false, false );
-
- s_->set( "a", BSON( "_id" << 4 ), true );
- s_->set( "a", BSON( "_id" << 4 ), true );
- check( true, false, false );
- s_->set( "a", BSON( "_id" << 4 ), false );
- check( false, false, false );
- }
- private:
- void check( bool one, bool two, bool three ) {
- ASSERT_EQUALS( one, s_->get( "a", BSON( "_id" << 4 ) ) );
- ASSERT_EQUALS( two, s_->get( "b", BSON( "_id" << 4 ) ) );
- ASSERT_EQUALS( three, s_->get( "a", BSON( "_id" << 5 ) ) );
+ DatabaseIgnorer d;
+ ASSERT( !d.ignoreAt( "a", OpTime( 4, 0 ) ) );
+ d.doIgnoreUntilAfter( "a", OpTime( 5, 0 ) );
+ ASSERT( d.ignoreAt( "a", OpTime( 4, 0 ) ) );
+ ASSERT( !d.ignoreAt( "b", OpTime( 4, 0 ) ) );
+ ASSERT( d.ignoreAt( "a", OpTime( 4, 10 ) ) );
+ ASSERT( d.ignoreAt( "a", OpTime( 5, 0 ) ) );
+ ASSERT( !d.ignoreAt( "a", OpTime( 5, 1 ) ) );
+ // Ignore state is expired.
+ ASSERT( !d.ignoreAt( "a", OpTime( 4, 0 ) ) );
}
- dblock lk_;
- auto_ptr< DbIds > s_;
};
- class MemIdsTest {
+ class DatabaseIgnorerUpdate {
public:
void run() {
- int n = sizeof( BSONObj ) + BSON( "_id" << 4 ).objsize();
-
- s_.reset();
- ASSERT_EQUALS( 0, s_.roughSize() );
- ASSERT( !s_.get( "a", BSON( "_id" << 4 ) ) );
- ASSERT( !s_.get( "b", BSON( "_id" << 4 ) ) );
- s_.set( "a", BSON( "_id" << 4 ), true );
- ASSERT_EQUALS( n, s_.roughSize() );
- ASSERT( s_.get( "a", BSON( "_id" << 4 ) ) );
- ASSERT( !s_.get( "b", BSON( "_id" << 4 ) ) );
- s_.set( "a", BSON( "_id" << 4 ), false );
- ASSERT_EQUALS( 0, s_.roughSize() );
- ASSERT( !s_.get( "a", BSON( "_id" << 4 ) ) );
-
- s_.set( "a", BSON( "_id" << 4 ), true );
- s_.set( "b", BSON( "_id" << 4 ), true );
- s_.set( "b", BSON( "_id" << 100 ), true );
- s_.set( "b", BSON( "_id" << 101 ), true );
- ASSERT_EQUALS( n * 4, s_.roughSize() );
+ DatabaseIgnorer d;
+ d.doIgnoreUntilAfter( "a", OpTime( 5, 0 ) );
+ d.doIgnoreUntilAfter( "a", OpTime( 6, 0 ) );
+ ASSERT( d.ignoreAt( "a", OpTime( 5, 5 ) ) );
+ ASSERT( d.ignoreAt( "a", OpTime( 6, 0 ) ) );
+ ASSERT( !d.ignoreAt( "a", OpTime( 6, 1 ) ) );
+
+ d.doIgnoreUntilAfter( "a", OpTime( 5, 0 ) );
+ d.doIgnoreUntilAfter( "a", OpTime( 6, 0 ) );
+ d.doIgnoreUntilAfter( "a", OpTime( 6, 0 ) );
+ d.doIgnoreUntilAfter( "a", OpTime( 5, 0 ) );
+ ASSERT( d.ignoreAt( "a", OpTime( 5, 5 ) ) );
+ ASSERT( d.ignoreAt( "a", OpTime( 6, 0 ) ) );
+ ASSERT( !d.ignoreAt( "a", OpTime( 6, 1 ) ) );
}
- private:
- MemIds s_;
};
-
- class IdTrackerTest {
+
+ /**
+ * Check against oldest document in the oplog before scanning backward
+ * from the newest document.
+ */
+ class FindingStartCursorStale : public Base {
public:
void run() {
- Client::Context ctx( "unittests.repltests.IdTrackerTest" );
-
- ASSERT( s_.inMem() );
- s_.reset( 4 * sizeof( BSONObj ) - 1 );
- s_.haveId( "a", BSON( "_id" << 0 ), true );
- s_.haveId( "a", BSON( "_id" << 1 ), true );
- s_.haveId( "b", BSON( "_id" << 0 ), true );
- s_.haveModId( "b", BSON( "_id" << 0 ), true );
- ASSERT( s_.inMem() );
- check();
- s_.mayUpgradeStorage();
- ASSERT( !s_.inMem() );
- check();
-
- s_.haveId( "a", BSON( "_id" << 1 ), false );
- ASSERT( !s_.haveId( "a", BSON( "_id" << 1 ) ) );
- s_.haveId( "a", BSON( "_id" << 1 ), true );
- check();
- ASSERT( !s_.inMem() );
-
- s_.reset( 4 * sizeof( BSONObj ) - 1 );
- s_.mayUpgradeStorage();
- ASSERT( s_.inMem() );
+ for( int i = 0; i < 10; ++i ) {
+ client()->insert( ns(), BSON( "_id" << i ) );
+ }
+ dblock lk;
+ Client::Context ctx( cllNS() );
+ NamespaceDetails *nsd = nsdetails( cllNS() );
+ BSONObjBuilder b;
+ b.appendTimestamp( "$gte" );
+ BSONObj query = BSON( "ts" << b.obj() );
+ FieldRangeSetPair frsp( cllNS(), query );
+ BSONObj order = BSON( "$natural" << 1 );
+ QueryPlan qp( nsd, -1, frsp, &frsp, query, order );
+ FindingStartCursor fsc( qp );
+ ASSERT( fsc.done() );
+ ASSERT_EQUALS( 0, fsc.cursor()->current()[ "o" ].Obj()[ "_id" ].Int() );
}
- private:
- void check() {
- ASSERT( s_.haveId( "a", BSON( "_id" << 0 ) ) );
- ASSERT( s_.haveId( "a", BSON( "_id" << 1 ) ) );
- ASSERT( s_.haveId( "b", BSON( "_id" << 0 ) ) );
- ASSERT( s_.haveModId( "b", BSON( "_id" << 0 ) ) );
+ };
+
+ /** Check unsuccessful yield recovery with FindingStartCursor */
+ class FindingStartCursorYield : public Base {
+ public:
+ void run() {
+ for( int i = 0; i < 10; ++i ) {
+ client()->insert( ns(), BSON( "_id" << i ) );
+ }
+ Date_t ts = client()->query( "local.oplog.$main", Query().sort( BSON( "$natural" << 1 ) ), 1, 4 )->next()[ "ts" ].date();
+ Client::Context ctx( cllNS() );
+ NamespaceDetails *nsd = nsdetails( cllNS() );
+ BSONObjBuilder b;
+ b.appendDate( "$gte", ts );
+ BSONObj query = BSON( "ts" << b.obj() );
+ FieldRangeSetPair frsp( cllNS(), query );
+ BSONObj order = BSON( "$natural" << 1 );
+ QueryPlan qp( nsd, -1, frsp, &frsp, query, order );
+ FindingStartCursor fsc( qp );
+ ASSERT( !fsc.done() );
+ fsc.next();
+ ASSERT( !fsc.done() );
+ ASSERT( fsc.prepareToYield() );
+ ClientCursor::invalidate( "local.oplog.$main" );
+ ASSERT_EXCEPTION( fsc.recoverFromYield(), MsgAssertionException );
}
- dblock lk_;
- IdTracker s_;
};
-
+
class All : public Suite {
public:
All() : Suite( "repl" ) {
@@ -1178,9 +1154,10 @@ namespace ReplTests {
add< Idempotence::RenameOverwrite >();
add< Idempotence::NoRename >();
add< DeleteOpIsIdBased >();
- add< DbIdsTest >();
- add< MemIdsTest >();
- add< IdTrackerTest >();
+ add< DatabaseIgnorerBasic >();
+ add< DatabaseIgnorerUpdate >();
+ add< FindingStartCursorStale >();
+ add< FindingStartCursorYield >();
}
} myall;
diff --git a/dbtests/socktests.cpp b/dbtests/socktests.cpp
index 5cd42f5..176db8c 100644
--- a/dbtests/socktests.cpp
+++ b/dbtests/socktests.cpp
@@ -18,7 +18,7 @@
*/
#include "pch.h"
-#include "../util/sock.h"
+#include "../util/net/sock.h"
#include "dbtests.h"
namespace SockTests {
diff --git a/dbtests/spin_lock_test.cpp b/dbtests/spin_lock_test.cpp
index 4b24aba..dbd637e 100644
--- a/dbtests/spin_lock_test.cpp
+++ b/dbtests/spin_lock_test.cpp
@@ -16,9 +16,8 @@
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-#include "../pch.h"
+#include "pch.h"
#include <boost/thread/thread.hpp>
-
#include "dbtests.h"
#include "../util/concurrency/spin_lock.h"
@@ -70,8 +69,6 @@ namespace {
public:
void run() {
-#if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) || defined(_WIN32)
-
SpinLock spin;
int counter = 0;
@@ -92,14 +89,8 @@ namespace {
}
ASSERT_EQUALS( counter, threads*incs );
-#else
- warning() << "spin lock slow on this platform" << endl;
-
#if defined(__linux__)
- // we don't want to have linux binaries without a fast spinlock
- //ASSERT( false ); TODO SERVER-3075
-#endif
-
+ ASSERT( SpinLock::isfast() );
#endif
}
diff --git a/dbtests/test.sln b/dbtests/test.sln
new file mode 100755
index 0000000..3a1b741
--- /dev/null
+++ b/dbtests/test.sln
@@ -0,0 +1,26 @@
+
+Microsoft Visual Studio Solution File, Format Version 11.00
+# Visual Studio 2010
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test", "test.vcxproj", "{215B2D68-0A70-4D10-8E75-B33010C62A91}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Win32 = Debug|Win32
+ Debug|x64 = Debug|x64
+ Release|Win32 = Release|Win32
+ Release|x64 = Release|x64
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {215B2D68-0A70-4D10-8E75-B33010C62A91}.Debug|Win32.ActiveCfg = Debug|Win32
+ {215B2D68-0A70-4D10-8E75-B33010C62A91}.Debug|Win32.Build.0 = Debug|Win32
+ {215B2D68-0A70-4D10-8E75-B33010C62A91}.Debug|x64.ActiveCfg = Debug|x64
+ {215B2D68-0A70-4D10-8E75-B33010C62A91}.Debug|x64.Build.0 = Debug|x64
+ {215B2D68-0A70-4D10-8E75-B33010C62A91}.Release|Win32.ActiveCfg = Release|Win32
+ {215B2D68-0A70-4D10-8E75-B33010C62A91}.Release|Win32.Build.0 = Release|Win32
+ {215B2D68-0A70-4D10-8E75-B33010C62A91}.Release|x64.ActiveCfg = Release|x64
+ {215B2D68-0A70-4D10-8E75-B33010C62A91}.Release|x64.Build.0 = Release|x64
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
diff --git a/dbtests/test.vcxproj b/dbtests/test.vcxproj
index b80a730..a4987d9 100644
--- a/dbtests/test.vcxproj
+++ b/dbtests/test.vcxproj
@@ -1,712 +1,776 @@
-<?xml version="1.0" encoding="utf-8"?>
-<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
- <ItemGroup Label="ProjectConfigurations">
- <ProjectConfiguration Include="Debug|Win32">
- <Configuration>Debug</Configuration>
- <Platform>Win32</Platform>
- </ProjectConfiguration>
- <ProjectConfiguration Include="Debug|x64">
- <Configuration>Debug</Configuration>
- <Platform>x64</Platform>
- </ProjectConfiguration>
- <ProjectConfiguration Include="Release|Win32">
- <Configuration>Release</Configuration>
- <Platform>Win32</Platform>
- </ProjectConfiguration>
- <ProjectConfiguration Include="Release|x64">
- <Configuration>Release</Configuration>
- <Platform>x64</Platform>
- </ProjectConfiguration>
- </ItemGroup>
- <PropertyGroup Label="Globals">
- <ProjectGuid>{215B2D68-0A70-4D10-8E75-B33010C62A91}</ProjectGuid>
- <RootNamespace>dbtests</RootNamespace>
- <Keyword>Win32Proj</Keyword>
- </PropertyGroup>
- <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
- <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
- <ConfigurationType>Application</ConfigurationType>
- <CharacterSet>Unicode</CharacterSet>
- <WholeProgramOptimization>true</WholeProgramOptimization>
- </PropertyGroup>
- <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
- <ConfigurationType>Application</ConfigurationType>
- <CharacterSet>Unicode</CharacterSet>
- <WholeProgramOptimization>true</WholeProgramOptimization>
- </PropertyGroup>
- <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
- <ConfigurationType>Application</ConfigurationType>
- <UseOfMfc>false</UseOfMfc>
- <UseOfAtl>false</UseOfAtl>
- <CharacterSet>Unicode</CharacterSet>
- </PropertyGroup>
- <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
- <ConfigurationType>Application</ConfigurationType>
- <UseOfMfc>false</UseOfMfc>
- <UseOfAtl>false</UseOfAtl>
- <CharacterSet>Unicode</CharacterSet>
- </PropertyGroup>
- <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
- <ImportGroup Label="ExtensionSettings">
- </ImportGroup>
- <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets">
- <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
- </ImportGroup>
- <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
- <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
- </ImportGroup>
- <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets">
- <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
- </ImportGroup>
- <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
- <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
- </ImportGroup>
- <PropertyGroup Label="UserMacros" />
- <PropertyGroup>
- <_ProjectFileVersion>10.0.30319.1</_ProjectFileVersion>
- <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(SolutionDir)$(Configuration)\</OutDir>
- <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(SolutionDir)$(Configuration)\</OutDir>
- <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(Configuration)\</IntDir>
- <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(Configuration)\</IntDir>
- <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">false</LinkIncremental>
- <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</LinkIncremental>
- <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(SolutionDir)$(Configuration)\</OutDir>
- <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(SolutionDir)$(Configuration)\</OutDir>
- <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(Configuration)\</IntDir>
- <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(Configuration)\</IntDir>
- <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</LinkIncremental>
- <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</LinkIncremental>
- <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">AllRules.ruleset</CodeAnalysisRuleSet>
- <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">AllRules.ruleset</CodeAnalysisRuleSet>
- <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" />
- <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" />
- <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" />
- <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" />
- <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">AllRules.ruleset</CodeAnalysisRuleSet>
- <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Release|x64'">AllRules.ruleset</CodeAnalysisRuleSet>
- <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" />
- <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Release|x64'" />
- <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" />
- <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Release|x64'" />
- <IncludePath Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">..;$(IncludePath)</IncludePath>
- <IncludePath Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">..;$(IncludePath)</IncludePath>
- <IncludePath Condition="'$(Configuration)|$(Platform)'=='Release|x64'">..;$(IncludePath)</IncludePath>
- <IncludePath Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">..;$(IncludePath)</IncludePath>
- </PropertyGroup>
- <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- <ClCompile>
- <Optimization>Disabled</Optimization>
- <AdditionalIncludeDirectories>..\..\js\src;..\pcre-7.4;C:\boost;\boost;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
- <PreprocessorDefinitions>_UNICODE;UNICODE;SUPPORT_UCP;SUPPORT_UTF8;MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;WIN32;_DEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;PCRE_STATIC;%(PreprocessorDefinitions)</PreprocessorDefinitions>
- <MinimalRebuild>No</MinimalRebuild>
- <BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
- <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
- <PrecompiledHeader>Use</PrecompiledHeader>
- <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
- <WarningLevel>Level3</WarningLevel>
- <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
- <DisableSpecificWarnings>4355;4800;%(DisableSpecificWarnings)</DisableSpecificWarnings>
- <MultiProcessorCompilation>true</MultiProcessorCompilation>
- </ClCompile>
- <Link>
- <AdditionalDependencies>ws2_32.lib;Psapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
- <AdditionalLibraryDirectories>c:\boost\lib\vs2010_32;\boost\lib\vs2010_32;\boost\lib</AdditionalLibraryDirectories>
- <IgnoreAllDefaultLibraries>false</IgnoreAllDefaultLibraries>
- <IgnoreSpecificDefaultLibraries>%(IgnoreSpecificDefaultLibraries)</IgnoreSpecificDefaultLibraries>
- <GenerateDebugInformation>true</GenerateDebugInformation>
- <SubSystem>Console</SubSystem>
- <TargetMachine>MachineX86</TargetMachine>
- <Profile>true</Profile>
- </Link>
- </ItemDefinitionGroup>
- <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- <ClCompile>
- <Optimization>Disabled</Optimization>
- <AdditionalIncludeDirectories>..\..\js\src;..\pcre-7.4;C:\boost;\boost;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
- <PreprocessorDefinitions>_DURABLE;_UNICODE;UNICODE;SUPPORT_UCP;SUPPORT_UTF8;MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;WIN32;_DEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;PCRE_STATIC;%(PreprocessorDefinitions)</PreprocessorDefinitions>
- <BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
- <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
- <PrecompiledHeader>Use</PrecompiledHeader>
- <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
- <WarningLevel>Level3</WarningLevel>
- <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
- <DisableSpecificWarnings>4355;4800;4267;4244;%(DisableSpecificWarnings)</DisableSpecificWarnings>
- <MinimalRebuild>No</MinimalRebuild>
- <MultiProcessorCompilation>true</MultiProcessorCompilation>
- </ClCompile>
- <Link>
- <AdditionalDependencies>ws2_32.lib;Psapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
- <AdditionalLibraryDirectories>c:\boost\lib\vs2010_64;\boost\lib\vs2010_64;\boost\lib</AdditionalLibraryDirectories>
- <IgnoreAllDefaultLibraries>false</IgnoreAllDefaultLibraries>
- <IgnoreSpecificDefaultLibraries>%(IgnoreSpecificDefaultLibraries)</IgnoreSpecificDefaultLibraries>
- <GenerateDebugInformation>true</GenerateDebugInformation>
- <SubSystem>Console</SubSystem>
- </Link>
- </ItemDefinitionGroup>
- <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- <ClCompile>
- <Optimization>MaxSpeed</Optimization>
- <IntrinsicFunctions>true</IntrinsicFunctions>
- <AdditionalIncludeDirectories>..\..\js\src;..\pcre-7.4;C:\boost;\boost;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
- <PreprocessorDefinitions>_UNICODE;UNICODE;SUPPORT_UCP;SUPPORT_UTF8;MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;WIN32;NDEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;PCRE_STATIC;%(PreprocessorDefinitions)</PreprocessorDefinitions>
- <RuntimeLibrary>MultiThreaded</RuntimeLibrary>
- <FunctionLevelLinking>true</FunctionLevelLinking>
- <PrecompiledHeader>Use</PrecompiledHeader>
- <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
- <WarningLevel>Level3</WarningLevel>
- <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
- <DisableSpecificWarnings>4355;4800;%(DisableSpecificWarnings)</DisableSpecificWarnings>
- <MinimalRebuild>No</MinimalRebuild>
- <MultiProcessorCompilation>true</MultiProcessorCompilation>
- </ClCompile>
- <Link>
- <AdditionalDependencies>ws2_32.lib;psapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
- <AdditionalLibraryDirectories>c:\boost\lib\vs2010_32;\boost\lib\vs2010_32;\boost\lib</AdditionalLibraryDirectories>
- <GenerateDebugInformation>true</GenerateDebugInformation>
- <SubSystem>Console</SubSystem>
- <OptimizeReferences>true</OptimizeReferences>
- <EnableCOMDATFolding>true</EnableCOMDATFolding>
- <TargetMachine>MachineX86</TargetMachine>
- </Link>
- </ItemDefinitionGroup>
- <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- <ClCompile>
- <Optimization>MaxSpeed</Optimization>
- <IntrinsicFunctions>true</IntrinsicFunctions>
- <AdditionalIncludeDirectories>..\..\js\src;..\pcre-7.4;C:\boost;\boost;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
- <PreprocessorDefinitions>_UNICODE;UNICODE;SUPPORT_UCP;SUPPORT_UTF8;MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;WIN32;NDEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;PCRE_STATIC;%(PreprocessorDefinitions)</PreprocessorDefinitions>
- <RuntimeLibrary>MultiThreaded</RuntimeLibrary>
- <FunctionLevelLinking>true</FunctionLevelLinking>
- <PrecompiledHeader>Use</PrecompiledHeader>
- <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
- <WarningLevel>Level3</WarningLevel>
- <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
- <DisableSpecificWarnings>4355;4800;4267;4244;%(DisableSpecificWarnings)</DisableSpecificWarnings>
- <MinimalRebuild>No</MinimalRebuild>
- <MultiProcessorCompilation>true</MultiProcessorCompilation>
- </ClCompile>
- <Link>
- <AdditionalDependencies>ws2_32.lib;psapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
- <AdditionalLibraryDirectories>c:\boost\lib\vs2010_64;\boost\lib\vs2010_64;\boost\lib</AdditionalLibraryDirectories>
- <GenerateDebugInformation>true</GenerateDebugInformation>
- <SubSystem>Console</SubSystem>
- <OptimizeReferences>true</OptimizeReferences>
- <EnableCOMDATFolding>true</EnableCOMDATFolding>
- </Link>
- </ItemDefinitionGroup>
- <ItemGroup>
- <ClInclude Include="..\..\boostw\boost_1_34_1\boost\config\auto_link.hpp" />
- <ClInclude Include="..\db\dur.h" />
- <ClInclude Include="..\db\durop.h" />
- <ClInclude Include="..\db\dur_journal.h" />
- <ClInclude Include="..\db\jsobjmanipulator.h" />
- <ClInclude Include="..\db\mongommf.h" />
- <ClInclude Include="..\db\mongomutex.h" />
- <ClInclude Include="..\pcre-7.4\pcrecpp.h" />
- <ClInclude Include="..\targetver.h" />
- <ClInclude Include="..\..\boostw\boost_1_34_1\boost\version.hpp" />
- <ClInclude Include="..\pcre-7.4\config.h" />
- <ClInclude Include="..\pcre-7.4\pcre.h" />
- <ClInclude Include="..\client\connpool.h" />
- <ClInclude Include="..\client\dbclient.h" />
- <ClInclude Include="..\client\model.h" />
- <ClInclude Include="..\db\btree.h" />
- <ClInclude Include="..\db\clientcursor.h" />
- <ClInclude Include="..\db\cmdline.h" />
- <ClInclude Include="..\db\commands.h" />
- <ClInclude Include="..\db\concurrency.h" />
- <ClInclude Include="..\db\curop.h" />
- <ClInclude Include="..\db\cursor.h" />
- <ClInclude Include="..\db\database.h" />
- <ClInclude Include="..\db\db.h" />
- <ClInclude Include="..\db\dbhelpers.h" />
- <ClInclude Include="..\db\dbinfo.h" />
- <ClInclude Include="..\db\dbmessage.h" />
- <ClInclude Include="..\db\diskloc.h" />
- <ClInclude Include="..\db\extsort.h" />
- <ClInclude Include="..\db\introspect.h" />
- <ClInclude Include="..\db\jsobj.h" />
- <ClInclude Include="..\db\json.h" />
- <ClInclude Include="..\db\matcher.h" />
- <ClInclude Include="..\grid\message.h" />
- <ClInclude Include="..\db\minilex.h" />
- <ClInclude Include="..\db\namespace.h" />
- <ClInclude Include="..\pch.h" />
- <ClInclude Include="..\db\pdfile.h" />
- <ClInclude Include="..\grid\protocol.h" />
- <ClInclude Include="..\db\query.h" />
- <ClInclude Include="..\db\queryoptimizer.h" />
- <ClInclude Include="..\db\repl.h" />
- <ClInclude Include="..\db\replset.h" />
- <ClInclude Include="..\db\resource.h" />
- <ClInclude Include="..\db\scanandorder.h" />
- <ClInclude Include="..\db\security.h" />
- <ClInclude Include="..\util\builder.h" />
- <ClInclude Include="..\util\concurrency\list.h" />
- <ClInclude Include="..\util\concurrency\task.h" />
- <ClInclude Include="..\util\concurrency\value.h" />
- <ClInclude Include="..\util\file.h" />
- <ClInclude Include="..\util\goodies.h" />
- <ClInclude Include="..\util\hashtab.h" />
- <ClInclude Include="..\db\lasterror.h" />
- <ClInclude Include="..\util\log.h" />
- <ClInclude Include="..\util\logfile.h" />
- <ClInclude Include="..\util\lruishmap.h" />
- <ClInclude Include="..\util\md5.h" />
- <ClInclude Include="..\util\md5.hpp" />
- <ClInclude Include="..\util\miniwebserver.h" />
- <ClInclude Include="..\util\mmap.h" />
- <ClInclude Include="..\util\sock.h" />
- <ClInclude Include="..\util\unittest.h" />
- </ItemGroup>
- <ItemGroup>
- <ClCompile Include="..\bson\oid.cpp" />
- <ClCompile Include="..\client\dbclientcursor.cpp" />
- <ClCompile Include="..\client\dbclient_rs.cpp" />
- <ClCompile Include="..\client\distlock.cpp" />
- <ClCompile Include="..\client\gridfs.cpp" />
- <ClCompile Include="..\client\model.cpp" />
- <ClCompile Include="..\client\parallel.cpp" />
- <ClCompile Include="..\db\cap.cpp" />
- <ClCompile Include="..\db\commands\isself.cpp" />
- <ClCompile Include="..\db\compact.cpp" />
- <ClCompile Include="..\db\dbcommands_generic.cpp" />
- <ClCompile Include="..\db\dur.cpp" />
- <ClCompile Include="..\db\durop.cpp" />
- <ClCompile Include="..\db\dur_commitjob.cpp" />
- <ClCompile Include="..\db\dur_journal.cpp" />
- <ClCompile Include="..\db\dur_preplogbuffer.cpp" />
- <ClCompile Include="..\db\dur_recover.cpp" />
- <ClCompile Include="..\db\dur_writetodatafiles.cpp" />
- <ClCompile Include="..\db\geo\2d.cpp" />
- <ClCompile Include="..\db\geo\haystack.cpp" />
- <ClCompile Include="..\db\mongommf.cpp" />
- <ClCompile Include="..\db\projection.cpp" />
- <ClCompile Include="..\db\repl\consensus.cpp" />
- <ClCompile Include="..\db\repl\heartbeat.cpp" />
- <ClCompile Include="..\db\repl\manager.cpp" />
- <ClCompile Include="..\db\repl\rs.cpp" />
- <ClCompile Include="..\db\repl\rs_initialsync.cpp" />
- <ClCompile Include="..\db\repl\rs_initiate.cpp" />
- <ClCompile Include="..\db\repl\rs_rollback.cpp" />
- <ClCompile Include="..\db\repl\rs_sync.cpp" />
- <ClCompile Include="..\db\restapi.cpp" />
- <ClCompile Include="..\db\security_key.cpp" />
- <ClCompile Include="..\pcre-7.4\pcrecpp.cc">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_chartables.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_compile.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_config.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_dfa_exec.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_exec.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_fullinfo.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_get.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_globals.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_info.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_maketables.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_newline.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_ord2utf8.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_refcount.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_scanner.cc">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_stringpiece.cc">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_study.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_tables.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_try_flipped.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_ucp_searchfuncs.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_valid_utf8.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_version.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_xclass.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcreposix.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\client\connpool.cpp" />
- <ClCompile Include="..\client\dbclient.cpp" />
- <ClCompile Include="..\client\syncclusterconnection.cpp" />
- <ClCompile Include="..\db\btree.cpp" />
- <ClCompile Include="..\db\btreecursor.cpp" />
- <ClCompile Include="..\pch.cpp">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">Create</PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">Create</PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">Create</PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">Create</PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\db\client.cpp" />
- <ClCompile Include="..\db\clientcursor.cpp" />
- <ClCompile Include="..\db\cloner.cpp" />
- <ClCompile Include="..\db\commands.cpp" />
- <ClCompile Include="..\db\common.cpp" />
- <ClCompile Include="..\db\cursor.cpp" />
- <ClCompile Include="..\db\database.cpp" />
- <ClCompile Include="..\db\dbcommands.cpp" />
- <ClCompile Include="..\db\dbeval.cpp" />
- <ClCompile Include="..\db\dbhelpers.cpp" />
- <ClCompile Include="..\db\dbwebserver.cpp" />
- <ClCompile Include="..\db\extsort.cpp" />
- <ClCompile Include="..\db\index.cpp" />
- <ClCompile Include="..\db\indexkey.cpp" />
- <ClCompile Include="..\db\instance.cpp" />
- <ClCompile Include="..\db\introspect.cpp" />
- <ClCompile Include="..\db\jsobj.cpp" />
- <ClCompile Include="..\db\json.cpp" />
- <ClCompile Include="..\db\lasterror.cpp" />
- <ClCompile Include="..\db\matcher.cpp" />
- <ClCompile Include="..\scripting\bench.cpp" />
- <ClCompile Include="..\s\chunk.cpp" />
- <ClCompile Include="..\s\config.cpp" />
- <ClCompile Include="..\s\d_chunk_manager.cpp" />
- <ClCompile Include="..\s\d_migrate.cpp" />
- <ClCompile Include="..\s\d_split.cpp" />
- <ClCompile Include="..\s\d_state.cpp" />
- <ClCompile Include="..\s\d_writeback.cpp" />
- <ClCompile Include="..\s\grid.cpp" />
- <ClCompile Include="..\s\shard.cpp" />
- <ClCompile Include="..\s\shardconnection.cpp" />
- <ClCompile Include="..\s\shardkey.cpp" />
- <ClCompile Include="..\util\alignedbuilder.cpp">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\util\concurrency\spin_lock.cpp" />
- <ClCompile Include="..\util\concurrency\synchronization.cpp" />
- <ClCompile Include="..\util\concurrency\task.cpp" />
- <ClCompile Include="..\util\concurrency\thread_pool.cpp" />
- <ClCompile Include="..\util\concurrency\vars.cpp" />
- <ClCompile Include="..\util\file_allocator.cpp" />
- <ClCompile Include="..\util\log.cpp" />
- <ClCompile Include="..\util\logfile.cpp" />
- <ClCompile Include="..\util\mmap_win.cpp" />
- <ClCompile Include="..\db\namespace.cpp" />
- <ClCompile Include="..\db\nonce.cpp" />
- <ClCompile Include="..\db\pdfile.cpp" />
- <ClCompile Include="..\db\query.cpp" />
- <ClCompile Include="..\db\queryoptimizer.cpp" />
- <ClCompile Include="..\util\processinfo.cpp" />
- <ClCompile Include="..\db\repl.cpp" />
- <ClCompile Include="..\db\security.cpp" />
- <ClCompile Include="..\db\security_commands.cpp" />
- <ClCompile Include="..\db\tests.cpp" />
- <ClCompile Include="..\db\update.cpp" />
- <ClCompile Include="..\db\cmdline.cpp" />
- <ClCompile Include="..\db\matcher_covered.cpp" />
- <ClCompile Include="..\db\oplog.cpp" />
- <ClCompile Include="..\db\queryutil.cpp" />
- <ClCompile Include="..\db\repl_block.cpp" />
- <ClCompile Include="..\util\assert_util.cpp" />
- <ClCompile Include="..\util\background.cpp" />
- <ClCompile Include="..\util\base64.cpp" />
- <ClCompile Include="..\util\httpclient.cpp" />
- <ClCompile Include="..\util\md5.c">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- <PrecompiledHeaderFile Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeaderFile>
- <PrecompiledHeaderFile Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeaderFile>
- </ClCompile>
- <ClCompile Include="..\util\md5main.cpp" />
- <ClCompile Include="..\util\message.cpp" />
- <ClCompile Include="..\util\message_server_port.cpp" />
- <ClCompile Include="..\util\miniwebserver.cpp" />
- <ClCompile Include="..\util\mmap.cpp" />
- <ClCompile Include="..\util\processinfo_win32.cpp" />
- <ClCompile Include="..\util\sock.cpp" />
- <ClCompile Include="..\util\stringutils.cpp" />
- <ClCompile Include="..\util\text.cpp" />
- <ClCompile Include="..\util\util.cpp" />
- <ClCompile Include="..\s\d_logic.cpp" />
- <ClCompile Include="..\scripting\engine.cpp" />
- <ClCompile Include="..\scripting\engine_spidermonkey.cpp" />
- <ClCompile Include="..\shell\mongo_vstudio.cpp">
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
- </PrecompiledHeader>
- <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- </PrecompiledHeader>
- </ClCompile>
- <ClCompile Include="..\scripting\utils.cpp" />
- <ClCompile Include="..\util\version.cpp" />
- <ClCompile Include="basictests.cpp" />
- <ClCompile Include="btreetests.cpp" />
- <ClCompile Include="clienttests.cpp" />
- <ClCompile Include="cursortests.cpp" />
- <ClCompile Include="dbtests.cpp" />
- <ClCompile Include="directclienttests.cpp" />
- <ClCompile Include="framework.cpp" />
- <ClCompile Include="jsobjtests.cpp" />
- <ClCompile Include="jsontests.cpp" />
- <ClCompile Include="jstests.cpp" />
- <ClCompile Include="matchertests.cpp" />
- <ClCompile Include="mmaptests.cpp" />
- <ClCompile Include="namespacetests.cpp" />
- <ClCompile Include="pairingtests.cpp" />
- <ClCompile Include="pdfiletests.cpp" />
- <ClCompile Include="perftests.cpp" />
- <ClCompile Include="queryoptimizertests.cpp" />
- <ClCompile Include="querytests.cpp" />
- <ClCompile Include="repltests.cpp" />
- <ClCompile Include="socktests.cpp" />
- <ClCompile Include="threadedtests.cpp">
- <DisableSpecificWarnings Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">4180;%(DisableSpecificWarnings)</DisableSpecificWarnings>
- <DisableSpecificWarnings Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">4180;%(DisableSpecificWarnings)</DisableSpecificWarnings>
- </ClCompile>
- <ClCompile Include="updatetests.cpp" />
- <ClCompile Include="..\db\stats\counters.cpp" />
- <ClCompile Include="..\db\stats\snapshots.cpp" />
- <ClCompile Include="..\db\stats\top.cpp" />
- <ClCompile Include="..\db\repl\health.cpp" />
- <ClCompile Include="..\db\repl\replset_commands.cpp" />
- <ClCompile Include="..\db\repl\rs_config.cpp" />
- </ItemGroup>
- <ItemGroup>
- <None Include="..\SConstruct" />
- </ItemGroup>
- <ItemGroup>
- <Library Include="..\..\js\js32d.lib">
- <FileType>Document</FileType>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</ExcludedFromBuild>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</ExcludedFromBuild>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</ExcludedFromBuild>
- </Library>
- <Library Include="..\..\js\js32r.lib">
- <FileType>Document</FileType>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</ExcludedFromBuild>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</ExcludedFromBuild>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</ExcludedFromBuild>
- </Library>
- <Library Include="..\..\js\js64d.lib">
- <FileType>Document</FileType>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</ExcludedFromBuild>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</ExcludedFromBuild>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</ExcludedFromBuild>
- </Library>
- <Library Include="..\..\js\js64r.lib">
- <FileType>Document</FileType>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</ExcludedFromBuild>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</ExcludedFromBuild>
- <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</ExcludedFromBuild>
- </Library>
- </ItemGroup>
- <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
- <ImportGroup Label="ExtensionTargets">
- </ImportGroup>
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+ <ProjectConfiguration Include="Debug|Win32">
+ <Configuration>Debug</Configuration>
+ <Platform>Win32</Platform>
+ </ProjectConfiguration>
+ <ProjectConfiguration Include="Debug|x64">
+ <Configuration>Debug</Configuration>
+ <Platform>x64</Platform>
+ </ProjectConfiguration>
+ <ProjectConfiguration Include="Release|Win32">
+ <Configuration>Release</Configuration>
+ <Platform>Win32</Platform>
+ </ProjectConfiguration>
+ <ProjectConfiguration Include="Release|x64">
+ <Configuration>Release</Configuration>
+ <Platform>x64</Platform>
+ </ProjectConfiguration>
+ </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectGuid>{215B2D68-0A70-4D10-8E75-B33010C62A91}</ProjectGuid>
+ <RootNamespace>dbtests</RootNamespace>
+ <Keyword>Win32Proj</Keyword>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <CharacterSet>Unicode</CharacterSet>
+ <WholeProgramOptimization>true</WholeProgramOptimization>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <CharacterSet>Unicode</CharacterSet>
+ <WholeProgramOptimization>true</WholeProgramOptimization>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <UseOfMfc>false</UseOfMfc>
+ <UseOfAtl>false</UseOfAtl>
+ <CharacterSet>Unicode</CharacterSet>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <UseOfMfc>false</UseOfMfc>
+ <UseOfAtl>false</UseOfAtl>
+ <CharacterSet>Unicode</CharacterSet>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+ <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <PropertyGroup Label="UserMacros" />
+ <PropertyGroup>
+ <_ProjectFileVersion>10.0.30319.1</_ProjectFileVersion>
+ <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(SolutionDir)$(Configuration)\</OutDir>
+ <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(SolutionDir)$(Configuration)\</OutDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(Configuration)\</IntDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(Configuration)\</IntDir>
+ <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">false</LinkIncremental>
+ <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</LinkIncremental>
+ <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(SolutionDir)$(Configuration)\</OutDir>
+ <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(SolutionDir)$(Configuration)\</OutDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(Configuration)\</IntDir>
+ <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(Configuration)\</IntDir>
+ <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</LinkIncremental>
+ <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</LinkIncremental>
+ <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">AllRules.ruleset</CodeAnalysisRuleSet>
+ <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">AllRules.ruleset</CodeAnalysisRuleSet>
+ <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" />
+ <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" />
+ <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" />
+ <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" />
+ <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">AllRules.ruleset</CodeAnalysisRuleSet>
+ <CodeAnalysisRuleSet Condition="'$(Configuration)|$(Platform)'=='Release|x64'">AllRules.ruleset</CodeAnalysisRuleSet>
+ <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" />
+ <CodeAnalysisRules Condition="'$(Configuration)|$(Platform)'=='Release|x64'" />
+ <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" />
+ <CodeAnalysisRuleAssemblies Condition="'$(Configuration)|$(Platform)'=='Release|x64'" />
+ <IncludePath Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">..;$(IncludePath)</IncludePath>
+ <IncludePath Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">..;$(IncludePath)</IncludePath>
+ <IncludePath Condition="'$(Configuration)|$(Platform)'=='Release|x64'">..;$(IncludePath)</IncludePath>
+ <IncludePath Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">..;$(IncludePath)</IncludePath>
+ </PropertyGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ <ClCompile>
+ <Optimization>Disabled</Optimization>
+ <AdditionalIncludeDirectories>..\..\js\src;..\third_party\pcre-7.4;C:\boost;\boost;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ <PreprocessorDefinitions>_UNICODE;UNICODE;MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;_DEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <MinimalRebuild>No</MinimalRebuild>
+ <BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
+ <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
+ <PrecompiledHeader>Use</PrecompiledHeader>
+ <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
+ <WarningLevel>Level3</WarningLevel>
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+ <DisableSpecificWarnings>4355;4800;%(DisableSpecificWarnings)</DisableSpecificWarnings>
+ <MultiProcessorCompilation>true</MultiProcessorCompilation>
+ </ClCompile>
+ <Link>
+ <AdditionalDependencies>ws2_32.lib;Psapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalLibraryDirectories>c:\boost\lib\vs2010_32;\boost\lib\vs2010_32;\boost\lib</AdditionalLibraryDirectories>
+ <IgnoreAllDefaultLibraries>false</IgnoreAllDefaultLibraries>
+ <IgnoreSpecificDefaultLibraries>%(IgnoreSpecificDefaultLibraries)</IgnoreSpecificDefaultLibraries>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ <SubSystem>Console</SubSystem>
+ <TargetMachine>MachineX86</TargetMachine>
+ <Profile>true</Profile>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ <ClCompile>
+ <Optimization>Disabled</Optimization>
+ <AdditionalIncludeDirectories>..\..\js\src;..\third_party\pcre-7.4;C:\boost;\boost;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ <PreprocessorDefinitions>_DURABLE;_UNICODE;UNICODE;MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;_DEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
+ <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
+ <PrecompiledHeader>Use</PrecompiledHeader>
+ <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
+ <WarningLevel>Level3</WarningLevel>
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+ <DisableSpecificWarnings>4355;4800;4267;4244;%(DisableSpecificWarnings)</DisableSpecificWarnings>
+ <MinimalRebuild>No</MinimalRebuild>
+ <MultiProcessorCompilation>true</MultiProcessorCompilation>
+ </ClCompile>
+ <Link>
+ <AdditionalDependencies>ws2_32.lib;Psapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalLibraryDirectories>c:\boost\lib\vs2010_64;\boost\lib\vs2010_64;\boost\lib</AdditionalLibraryDirectories>
+ <IgnoreAllDefaultLibraries>false</IgnoreAllDefaultLibraries>
+ <IgnoreSpecificDefaultLibraries>%(IgnoreSpecificDefaultLibraries)</IgnoreSpecificDefaultLibraries>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ <SubSystem>Console</SubSystem>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ <ClCompile>
+ <Optimization>MaxSpeed</Optimization>
+ <IntrinsicFunctions>true</IntrinsicFunctions>
+ <AdditionalIncludeDirectories>..\..\js\src;..\third_party\pcre-7.4;C:\boost;\boost;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ <PreprocessorDefinitions>_UNICODE;UNICODE;MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;NDEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <RuntimeLibrary>MultiThreaded</RuntimeLibrary>
+ <FunctionLevelLinking>true</FunctionLevelLinking>
+ <PrecompiledHeader>Use</PrecompiledHeader>
+ <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
+ <WarningLevel>Level3</WarningLevel>
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+ <DisableSpecificWarnings>4355;4800;%(DisableSpecificWarnings)</DisableSpecificWarnings>
+ <MinimalRebuild>No</MinimalRebuild>
+ <MultiProcessorCompilation>true</MultiProcessorCompilation>
+ </ClCompile>
+ <Link>
+ <AdditionalDependencies>ws2_32.lib;psapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalLibraryDirectories>c:\boost\lib\vs2010_32;\boost\lib\vs2010_32;\boost\lib</AdditionalLibraryDirectories>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ <SubSystem>Console</SubSystem>
+ <OptimizeReferences>true</OptimizeReferences>
+ <EnableCOMDATFolding>true</EnableCOMDATFolding>
+ <TargetMachine>MachineX86</TargetMachine>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ <ClCompile>
+ <Optimization>MaxSpeed</Optimization>
+ <IntrinsicFunctions>true</IntrinsicFunctions>
+ <AdditionalIncludeDirectories>..\..\js\src;..\third_party\pcre-7.4;C:\boost;\boost;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ <PreprocessorDefinitions>_UNICODE;UNICODE;MONGO_EXPOSE_MACROS;OLDJS;STATIC_JS_API;XP_WIN;NDEBUG;_CONSOLE;_CRT_SECURE_NO_WARNINGS;HAVE_CONFIG_H;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <RuntimeLibrary>MultiThreaded</RuntimeLibrary>
+ <FunctionLevelLinking>true</FunctionLevelLinking>
+ <PrecompiledHeader>Use</PrecompiledHeader>
+ <PrecompiledHeaderFile>pch.h</PrecompiledHeaderFile>
+ <WarningLevel>Level3</WarningLevel>
+ <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+ <DisableSpecificWarnings>4355;4800;4267;4244;%(DisableSpecificWarnings)</DisableSpecificWarnings>
+ <MinimalRebuild>No</MinimalRebuild>
+ <MultiProcessorCompilation>true</MultiProcessorCompilation>
+ </ClCompile>
+ <Link>
+ <AdditionalDependencies>ws2_32.lib;psapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalLibraryDirectories>c:\boost\lib\vs2010_64;\boost\lib\vs2010_64;\boost\lib</AdditionalLibraryDirectories>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ <SubSystem>Console</SubSystem>
+ <OptimizeReferences>true</OptimizeReferences>
+ <EnableCOMDATFolding>true</EnableCOMDATFolding>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemGroup>
+ <ClInclude Include="..\..\boostw\boost_1_34_1\boost\config\auto_link.hpp" />
+ <ClInclude Include="..\bson\bson-inl.h" />
+ <ClInclude Include="..\bson\bson.h" />
+ <ClInclude Include="..\bson\bsonelement.h" />
+ <ClInclude Include="..\bson\bsonmisc.h" />
+ <ClInclude Include="..\bson\bsonobj.h" />
+ <ClInclude Include="..\bson\bsonobjbuilder.h" />
+ <ClInclude Include="..\bson\bsonobjiterator.h" />
+ <ClInclude Include="..\bson\bsontypes.h" />
+ <ClInclude Include="..\bson\bson_db.h" />
+ <ClInclude Include="..\bson\inline_decls.h" />
+ <ClInclude Include="..\bson\oid.h" />
+ <ClInclude Include="..\bson\ordering.h" />
+ <ClInclude Include="..\bson\stringdata.h" />
+ <ClInclude Include="..\db\dur.h" />
+ <ClInclude Include="..\db\durop.h" />
+ <ClInclude Include="..\db\dur_journal.h" />
+ <ClInclude Include="..\db\jsobjmanipulator.h" />
+ <ClInclude Include="..\db\mongommf.h" />
+ <ClInclude Include="..\db\mongomutex.h" />
+ <ClInclude Include="..\db\ops\delete.h" />
+ <ClInclude Include="..\db\ops\query.h" />
+ <ClInclude Include="..\db\ops\update.h" />
+ <ClInclude Include="..\third_party\pcre-7.4\pcrecpp.h" />
+ <ClInclude Include="..\server.h" />
+ <ClInclude Include="..\targetver.h" />
+ <ClInclude Include="..\..\boostw\boost_1_34_1\boost\version.hpp" />
+ <ClInclude Include="..\third_party\pcre-7.4\config.h" />
+ <ClInclude Include="..\third_party\pcre-7.4\pcre.h" />
+ <ClInclude Include="..\client\connpool.h" />
+ <ClInclude Include="..\client\dbclient.h" />
+ <ClInclude Include="..\client\model.h" />
+ <ClInclude Include="..\db\btree.h" />
+ <ClInclude Include="..\db\clientcursor.h" />
+ <ClInclude Include="..\db\cmdline.h" />
+ <ClInclude Include="..\db\commands.h" />
+ <ClInclude Include="..\db\concurrency.h" />
+ <ClInclude Include="..\db\curop.h" />
+ <ClInclude Include="..\db\cursor.h" />
+ <ClInclude Include="..\db\database.h" />
+ <ClInclude Include="..\db\db.h" />
+ <ClInclude Include="..\db\dbhelpers.h" />
+ <ClInclude Include="..\db\dbinfo.h" />
+ <ClInclude Include="..\db\dbmessage.h" />
+ <ClInclude Include="..\db\diskloc.h" />
+ <ClInclude Include="..\db\extsort.h" />
+ <ClInclude Include="..\db\introspect.h" />
+ <ClInclude Include="..\db\jsobj.h" />
+ <ClInclude Include="..\db\json.h" />
+ <ClInclude Include="..\db\matcher.h" />
+ <ClInclude Include="..\grid\message.h" />
+ <ClInclude Include="..\db\minilex.h" />
+ <ClInclude Include="..\db\namespace.h" />
+ <ClInclude Include="..\pch.h" />
+ <ClInclude Include="..\db\pdfile.h" />
+ <ClInclude Include="..\grid\protocol.h" />
+ <ClInclude Include="..\db\query.h" />
+ <ClInclude Include="..\db\queryoptimizer.h" />
+ <ClInclude Include="..\db\repl.h" />
+ <ClInclude Include="..\db\replset.h" />
+ <ClInclude Include="..\db\resource.h" />
+ <ClInclude Include="..\db\scanandorder.h" />
+ <ClInclude Include="..\db\security.h" />
+ <ClInclude Include="..\third_party\snappy\config.h" />
+ <ClInclude Include="..\third_party\snappy\snappy-c.h" />
+ <ClInclude Include="..\third_party\snappy\snappy-internal.h" />
+ <ClInclude Include="..\third_party\snappy\snappy-sinksource.h" />
+ <ClInclude Include="..\third_party\snappy\snappy-stubs-internal.h" />
+ <ClInclude Include="..\third_party\snappy\snappy-stubs-public.h" />
+ <ClInclude Include="..\third_party\snappy\snappy.h" />
+ <ClInclude Include="..\util\builder.h" />
+ <ClInclude Include="..\util\checksum.h" />
+ <ClInclude Include="..\util\compress.h" />
+ <ClInclude Include="..\util\concurrency\list.h" />
+ <ClInclude Include="..\util\concurrency\task.h" />
+ <ClInclude Include="..\util\concurrency\value.h" />
+ <ClInclude Include="..\util\file.h" />
+ <ClInclude Include="..\util\goodies.h" />
+ <ClInclude Include="..\util\hashtab.h" />
+ <ClInclude Include="..\db\lasterror.h" />
+ <ClInclude Include="..\util\log.h" />
+ <ClInclude Include="..\util\logfile.h" />
+ <ClInclude Include="..\util\lruishmap.h" />
+ <ClInclude Include="..\util\md5.h" />
+ <ClInclude Include="..\util\md5.hpp" />
+ <ClInclude Include="..\util\miniwebserver.h" />
+ <ClInclude Include="..\util\mmap.h" />
+ <ClInclude Include="..\util\mongoutils\hash.h" />
+ <ClInclude Include="..\util\sock.h" />
+ <ClInclude Include="..\util\unittest.h" />
+ </ItemGroup>
+ <ItemGroup>
+ <ClCompile Include="..\bson\oid.cpp" />
+ <ClCompile Include="..\client\dbclientcursor.cpp" />
+ <ClCompile Include="..\client\dbclient_rs.cpp" />
+ <ClCompile Include="..\client\distlock.cpp" />
+ <ClCompile Include="..\client\gridfs.cpp" />
+ <ClCompile Include="..\client\model.cpp" />
+ <ClCompile Include="..\client\parallel.cpp" />
+ <ClCompile Include="..\db\btreebuilder.cpp" />
+ <ClCompile Include="..\db\cap.cpp" />
+ <ClCompile Include="..\db\commands\isself.cpp" />
+ <ClCompile Include="..\db\compact.cpp" />
+ <ClCompile Include="..\db\dbcommands_admin.cpp" />
+ <ClCompile Include="..\db\dbcommands_generic.cpp" />
+ <ClCompile Include="..\db\dur.cpp" />
+ <ClCompile Include="..\db\durop.cpp" />
+ <ClCompile Include="..\db\dur_commitjob.cpp" />
+ <ClCompile Include="..\db\dur_journal.cpp" />
+ <ClCompile Include="..\db\dur_preplogbuffer.cpp" />
+ <ClCompile Include="..\db\dur_recover.cpp" />
+ <ClCompile Include="..\db\dur_writetodatafiles.cpp" />
+ <ClCompile Include="..\db\geo\2d.cpp" />
+ <ClCompile Include="..\db\geo\haystack.cpp" />
+ <ClCompile Include="..\db\key.cpp" />
+ <ClCompile Include="..\db\mongommf.cpp" />
+ <ClCompile Include="..\db\ops\delete.cpp" />
+ <ClCompile Include="..\db\ops\query.cpp" />
+ <ClCompile Include="..\db\ops\update.cpp" />
+ <ClCompile Include="..\db\projection.cpp" />
+ <ClCompile Include="..\db\queryoptimizercursor.cpp" />
+ <ClCompile Include="..\db\querypattern.cpp">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">NotUsing</PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\db\record.cpp" />
+ <ClCompile Include="..\db\repl\consensus.cpp" />
+ <ClCompile Include="..\db\repl\heartbeat.cpp" />
+ <ClCompile Include="..\db\repl\manager.cpp" />
+ <ClCompile Include="..\db\repl\rs.cpp" />
+ <ClCompile Include="..\db\repl\rs_initialsync.cpp" />
+ <ClCompile Include="..\db\repl\rs_initiate.cpp" />
+ <ClCompile Include="..\db\repl\rs_rollback.cpp" />
+ <ClCompile Include="..\db\repl\rs_sync.cpp" />
+ <ClCompile Include="..\db\restapi.cpp" />
+ <ClCompile Include="..\db\scanandorder.cpp" />
+ <ClCompile Include="..\db\security_common.cpp" />
+ <ClCompile Include="..\third_party\pcre-7.4\pcrecpp.cc">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_chartables.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_compile.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_config.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_dfa_exec.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_exec.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_fullinfo.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_get.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_globals.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_info.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_maketables.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_newline.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_ord2utf8.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_refcount.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_scanner.cc">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_stringpiece.cc">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_study.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_tables.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_try_flipped.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_ucp_searchfuncs.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_valid_utf8.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_version.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcre_xclass.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\pcre-7.4\pcreposix.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\client\connpool.cpp" />
+ <ClCompile Include="..\client\dbclient.cpp" />
+ <ClCompile Include="..\client\syncclusterconnection.cpp" />
+ <ClCompile Include="..\db\btree.cpp" />
+ <ClCompile Include="..\db\btreecursor.cpp" />
+ <ClCompile Include="..\pch.cpp">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">Create</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">Create</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">Create</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">Create</PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\db\client.cpp" />
+ <ClCompile Include="..\db\clientcursor.cpp" />
+ <ClCompile Include="..\db\cloner.cpp" />
+ <ClCompile Include="..\db\commands.cpp" />
+ <ClCompile Include="..\db\common.cpp" />
+ <ClCompile Include="..\db\cursor.cpp" />
+ <ClCompile Include="..\db\database.cpp" />
+ <ClCompile Include="..\db\dbcommands.cpp" />
+ <ClCompile Include="..\db\dbeval.cpp" />
+ <ClCompile Include="..\db\dbhelpers.cpp" />
+ <ClCompile Include="..\db\dbwebserver.cpp" />
+ <ClCompile Include="..\db\extsort.cpp" />
+ <ClCompile Include="..\db\index.cpp" />
+ <ClCompile Include="..\db\indexkey.cpp" />
+ <ClCompile Include="..\db\instance.cpp" />
+ <ClCompile Include="..\db\introspect.cpp" />
+ <ClCompile Include="..\db\jsobj.cpp" />
+ <ClCompile Include="..\db\json.cpp" />
+ <ClCompile Include="..\db\lasterror.cpp" />
+ <ClCompile Include="..\db\matcher.cpp" />
+ <ClCompile Include="..\scripting\bench.cpp" />
+ <ClCompile Include="..\s\chunk.cpp" />
+ <ClCompile Include="..\s\config.cpp" />
+ <ClCompile Include="..\s\d_chunk_manager.cpp" />
+ <ClCompile Include="..\s\d_migrate.cpp" />
+ <ClCompile Include="..\s\d_split.cpp" />
+ <ClCompile Include="..\s\d_state.cpp" />
+ <ClCompile Include="..\s\d_writeback.cpp" />
+ <ClCompile Include="..\s\grid.cpp" />
+ <ClCompile Include="..\s\shard.cpp" />
+ <ClCompile Include="..\s\shardconnection.cpp" />
+ <ClCompile Include="..\s\shardkey.cpp" />
+ <ClCompile Include="..\third_party\snappy\snappy-sinksource.cc">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">NotUsing</PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\third_party\snappy\snappy.cc">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">NotUsing</PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\util\alignedbuilder.cpp">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\util\compress.cpp">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">NotUsing</PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\util\concurrency\spin_lock.cpp">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\util\concurrency\synchronization.cpp" />
+ <ClCompile Include="..\util\concurrency\task.cpp" />
+ <ClCompile Include="..\util\concurrency\thread_pool.cpp" />
+ <ClCompile Include="..\util\concurrency\vars.cpp" />
+ <ClCompile Include="..\util\file_allocator.cpp" />
+ <ClCompile Include="..\util\log.cpp" />
+ <ClCompile Include="..\util\logfile.cpp" />
+ <ClCompile Include="..\util\mmap_win.cpp" />
+ <ClCompile Include="..\db\namespace.cpp" />
+ <ClCompile Include="..\db\nonce.cpp" />
+ <ClCompile Include="..\db\pdfile.cpp" />
+ <ClCompile Include="..\db\queryoptimizer.cpp" />
+ <ClCompile Include="..\util\processinfo.cpp" />
+ <ClCompile Include="..\db\repl.cpp" />
+ <ClCompile Include="..\db\security.cpp" />
+ <ClCompile Include="..\db\security_commands.cpp" />
+ <ClCompile Include="..\db\tests.cpp" />
+ <ClCompile Include="..\db\cmdline.cpp" />
+ <ClCompile Include="..\db\dbmessage.cpp" />
+ <ClCompile Include="..\db\matcher_covered.cpp" />
+ <ClCompile Include="..\db\oplog.cpp" />
+ <ClCompile Include="..\db\queryutil.cpp" />
+ <ClCompile Include="..\db\repl_block.cpp" />
+ <ClCompile Include="..\util\assert_util.cpp" />
+ <ClCompile Include="..\util\background.cpp" />
+ <ClCompile Include="..\util\base64.cpp" />
+ <ClCompile Include="..\util\md5.c">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ <PrecompiledHeaderFile Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeaderFile>
+ <PrecompiledHeaderFile Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeaderFile>
+ </ClCompile>
+ <ClCompile Include="..\util\md5main.cpp" />
+ <ClCompile Include="..\util\net\message.cpp" />
+ <ClCompile Include="..\util\net\listen.cpp" />
+ <ClCompile Include="..\util\net\message_server_port.cpp" />
+ <ClCompile Include="..\util\net\message_port.cpp" />
+ <ClCompile Include="..\util\net\miniwebserver.cpp" />
+ <ClCompile Include="..\util\mmap.cpp" />
+ <ClCompile Include="..\util\processinfo_win32.cpp" />
+ <ClCompile Include="..\util\ramlog.cpp" />
+ <ClCompile Include="..\util\net\sock.cpp" />
+ <ClCompile Include="..\util\stringutils.cpp" />
+ <ClCompile Include="..\util\text.cpp" />
+ <ClCompile Include="..\util\util.cpp" />
+ <ClCompile Include="..\s\d_logic.cpp" />
+ <ClCompile Include="..\scripting\engine.cpp" />
+ <ClCompile Include="..\scripting\engine_spidermonkey.cpp" />
+ <ClCompile Include="..\shell\mongo_vstudio.cpp">
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">NotUsing</PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ </PrecompiledHeader>
+ <PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ </PrecompiledHeader>
+ </ClCompile>
+ <ClCompile Include="..\scripting\utils.cpp" />
+ <ClCompile Include="..\util\version.cpp" />
+ <ClCompile Include="basictests.cpp" />
+ <ClCompile Include="btreetests.cpp" />
+ <ClCompile Include="clienttests.cpp" />
+ <ClCompile Include="cursortests.cpp" />
+ <ClCompile Include="dbtests.cpp" />
+ <ClCompile Include="directclienttests.cpp" />
+ <ClCompile Include="framework.cpp" />
+ <ClCompile Include="jsobjtests.cpp" />
+ <ClCompile Include="jsontests.cpp" />
+ <ClCompile Include="jstests.cpp" />
+ <ClCompile Include="matchertests.cpp" />
+ <ClCompile Include="mmaptests.cpp" />
+ <ClCompile Include="namespacetests.cpp" />
+ <ClCompile Include="pdfiletests.cpp" />
+ <ClCompile Include="perftests.cpp" />
+ <ClCompile Include="queryoptimizertests.cpp" />
+ <ClCompile Include="querytests.cpp" />
+ <ClCompile Include="repltests.cpp" />
+ <ClCompile Include="socktests.cpp" />
+ <ClCompile Include="spin_lock_test.cpp" />
+ <ClCompile Include="threadedtests.cpp">
+ <DisableSpecificWarnings Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">4180;%(DisableSpecificWarnings)</DisableSpecificWarnings>
+ <DisableSpecificWarnings Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">4180;%(DisableSpecificWarnings)</DisableSpecificWarnings>
+ </ClCompile>
+ <ClCompile Include="updatetests.cpp" />
+ <ClCompile Include="..\db\stats\counters.cpp" />
+ <ClCompile Include="..\db\stats\snapshots.cpp" />
+ <ClCompile Include="..\db\stats\top.cpp" />
+ <ClCompile Include="..\db\repl\health.cpp" />
+ <ClCompile Include="..\db\repl\replset_commands.cpp" />
+ <ClCompile Include="..\db\repl\rs_config.cpp" />
+ </ItemGroup>
+ <ItemGroup>
+ <None Include="..\SConstruct" />
+ <None Include="btreetests.inl" />
+ </ItemGroup>
+ <ItemGroup>
+ <Library Include="..\..\js\js32d.lib">
+ <FileType>Document</FileType>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</ExcludedFromBuild>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</ExcludedFromBuild>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</ExcludedFromBuild>
+ </Library>
+ <Library Include="..\..\js\js32r.lib">
+ <FileType>Document</FileType>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</ExcludedFromBuild>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</ExcludedFromBuild>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</ExcludedFromBuild>
+ </Library>
+ <Library Include="..\..\js\js64d.lib">
+ <FileType>Document</FileType>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</ExcludedFromBuild>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">true</ExcludedFromBuild>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</ExcludedFromBuild>
+ </Library>
+ <Library Include="..\..\js\js64r.lib">
+ <FileType>Document</FileType>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</ExcludedFromBuild>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</ExcludedFromBuild>
+ <ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</ExcludedFromBuild>
+ </Library>
+ </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
</Project> \ No newline at end of file
diff --git a/dbtests/test.vcxproj.filters b/dbtests/test.vcxproj.filters
index c52f7f6..3554ce7 100755
--- a/dbtests/test.vcxproj.filters
+++ b/dbtests/test.vcxproj.filters
@@ -4,7 +4,7 @@
<Filter Include="misc and third party">
<UniqueIdentifier>{17c97725-06a4-41a6-bc1c-f0e05eada682}</UniqueIdentifier>
</Filter>
- <Filter Include="misc and third party\pcre">
+ <Filter Include="misc and third party">
<UniqueIdentifier>{0a50fb63-4ac3-4e30-a9d4-b0841878ee73}</UniqueIdentifier>
</Filter>
<Filter Include="client">
@@ -53,26 +53,23 @@
<Filter Include="dur">
<UniqueIdentifier>{c296d097-0d46-46ee-9097-f2df659d9596}</UniqueIdentifier>
</Filter>
+ <Filter Include="bson">
+ <UniqueIdentifier>{e6652333-c77f-420c-af8e-72d55bc095fe}</UniqueIdentifier>
+ </Filter>
+ <Filter Include="misc and third party\snappy">
+ <UniqueIdentifier>{fbc4416f-ca67-4e63-a1ea-49027de7e080}</UniqueIdentifier>
+ </Filter>
</ItemGroup>
<ItemGroup>
<ClInclude Include="..\..\boostw\boost_1_34_1\boost\config\auto_link.hpp">
<Filter>misc and third party</Filter>
</ClInclude>
- <ClInclude Include="..\pcre-7.4\pcrecpp.h">
- <Filter>misc and third party</Filter>
- </ClInclude>
<ClInclude Include="..\targetver.h">
<Filter>misc and third party</Filter>
</ClInclude>
<ClInclude Include="..\..\boostw\boost_1_34_1\boost\version.hpp">
<Filter>misc and third party</Filter>
</ClInclude>
- <ClInclude Include="..\pcre-7.4\config.h">
- <Filter>misc and third party\pcre</Filter>
- </ClInclude>
- <ClInclude Include="..\pcre-7.4\pcre.h">
- <Filter>misc and third party\pcre</Filter>
- </ClInclude>
<ClInclude Include="..\client\connpool.h">
<Filter>client</Filter>
</ClInclude>
@@ -244,6 +241,87 @@
<ClInclude Include="..\db\mongomutex.h">
<Filter>db</Filter>
</ClInclude>
+ <ClInclude Include="..\util\mongoutils\hash.h">
+ <Filter>util\h</Filter>
+ </ClInclude>
+ <ClInclude Include="..\util\checksum.h">
+ <Filter>util</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\bson.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\bson_db.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\bsonelement.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\bson-inl.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\bsonmisc.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\bsonobj.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\bsonobjbuilder.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\bsonobjiterator.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\bsontypes.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\inline_decls.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\oid.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\ordering.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\bson\stringdata.h">
+ <Filter>bson</Filter>
+ </ClInclude>
+ <ClInclude Include="..\db\ops\delete.h">
+ <Filter>db\cpp</Filter>
+ </ClInclude>
+ <ClInclude Include="..\db\ops\update.h">
+ <Filter>db\cpp</Filter>
+ </ClInclude>
+ <ClInclude Include="..\db\ops\query.h">
+ <Filter>db\cpp</Filter>
+ </ClInclude>
+ <ClInclude Include="..\server.h">
+ <Filter>db\h</Filter>
+ </ClInclude>
+ <ClInclude Include="..\third_party\snappy\config.h">
+ <Filter>misc and third party\snappy</Filter>
+ </ClInclude>
+ <ClInclude Include="..\third_party\snappy\snappy.h">
+ <Filter>misc and third party\snappy</Filter>
+ </ClInclude>
+ <ClInclude Include="..\third_party\snappy\snappy-c.h">
+ <Filter>misc and third party\snappy</Filter>
+ </ClInclude>
+ <ClInclude Include="..\third_party\snappy\snappy-internal.h">
+ <Filter>misc and third party\snappy</Filter>
+ </ClInclude>
+ <ClInclude Include="..\third_party\snappy\snappy-sinksource.h">
+ <Filter>misc and third party\snappy</Filter>
+ </ClInclude>
+ <ClInclude Include="..\third_party\snappy\snappy-stubs-internal.h">
+ <Filter>misc and third party\snappy</Filter>
+ </ClInclude>
+ <ClInclude Include="..\third_party\snappy\snappy-stubs-public.h">
+ <Filter>misc and third party\snappy</Filter>
+ </ClInclude>
+ <ClInclude Include="..\util\compress.h">
+ <Filter>misc and third party</Filter>
+ </ClInclude>
</ItemGroup>
<ItemGroup>
<Library Include="..\..\js\js64r.lib">
@@ -260,78 +338,6 @@
</Library>
</ItemGroup>
<ItemGroup>
- <ClCompile Include="..\pcre-7.4\pcrecpp.cc">
- <Filter>misc and third party</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_chartables.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_compile.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_config.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_dfa_exec.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_exec.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_fullinfo.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_get.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_globals.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_info.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_maketables.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_newline.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_ord2utf8.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_refcount.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_scanner.cc">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_stringpiece.cc">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_study.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_tables.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_try_flipped.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_ucp_searchfuncs.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_valid_utf8.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_version.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcre_xclass.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
- <ClCompile Include="..\pcre-7.4\pcreposix.c">
- <Filter>misc and third party\pcre</Filter>
- </ClCompile>
<ClCompile Include="..\client\connpool.cpp">
<Filter>client</Filter>
</ClCompile>
@@ -419,9 +425,6 @@
<ClCompile Include="..\db\pdfile.cpp">
<Filter>db\cpp</Filter>
</ClCompile>
- <ClCompile Include="..\db\query.cpp">
- <Filter>db\cpp</Filter>
- </ClCompile>
<ClCompile Include="..\db\queryoptimizer.cpp">
<Filter>db\cpp</Filter>
</ClCompile>
@@ -437,9 +440,6 @@
<ClCompile Include="..\db\tests.cpp">
<Filter>db\cpp</Filter>
</ClCompile>
- <ClCompile Include="..\db\update.cpp">
- <Filter>db\cpp</Filter>
- </ClCompile>
<ClCompile Include="..\db\cmdline.cpp">
<Filter>db\h</Filter>
</ClCompile>
@@ -464,33 +464,18 @@
<ClCompile Include="..\util\base64.cpp">
<Filter>util\cpp</Filter>
</ClCompile>
- <ClCompile Include="..\util\httpclient.cpp">
- <Filter>util\cpp</Filter>
- </ClCompile>
<ClCompile Include="..\util\md5.c">
<Filter>util\cpp</Filter>
</ClCompile>
<ClCompile Include="..\util\md5main.cpp">
<Filter>util\cpp</Filter>
</ClCompile>
- <ClCompile Include="..\util\message.cpp">
- <Filter>util\cpp</Filter>
- </ClCompile>
- <ClCompile Include="..\util\message_server_port.cpp">
- <Filter>util\cpp</Filter>
- </ClCompile>
- <ClCompile Include="..\util\miniwebserver.cpp">
- <Filter>util\cpp</Filter>
- </ClCompile>
<ClCompile Include="..\util\mmap.cpp">
<Filter>util\cpp</Filter>
</ClCompile>
<ClCompile Include="..\util\processinfo_win32.cpp">
<Filter>util\cpp</Filter>
</ClCompile>
- <ClCompile Include="..\util\sock.cpp">
- <Filter>util\cpp</Filter>
- </ClCompile>
<ClCompile Include="..\util\util.cpp">
<Filter>util\cpp</Filter>
</ClCompile>
@@ -542,9 +527,6 @@
<ClCompile Include="namespacetests.cpp">
<Filter>dbtests</Filter>
</ClCompile>
- <ClCompile Include="pairingtests.cpp">
- <Filter>dbtests</Filter>
- </ClCompile>
<ClCompile Include="pdfiletests.cpp">
<Filter>dbtests</Filter>
</ClCompile>
@@ -692,9 +674,6 @@
<ClCompile Include="..\db\restapi.cpp">
<Filter>db\cpp</Filter>
</ClCompile>
- <ClCompile Include="..\util\concurrency\spin_lock.cpp">
- <Filter>db\cpp</Filter>
- </ClCompile>
<ClCompile Include="mmaptests.cpp">
<Filter>dbtests</Filter>
</ClCompile>
@@ -761,16 +740,88 @@
<ClCompile Include="directclienttests.cpp">
<Filter>dbtests</Filter>
</ClCompile>
- <ClCompile Include="..\db\security_key.cpp">
- <Filter>db\cpp</Filter>
- </ClCompile>
<ClCompile Include="..\util\file_allocator.cpp">
<Filter>util\cpp</Filter>
</ClCompile>
+ <ClCompile Include="..\db\dbcommands_admin.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\querypattern.cpp">
+ <Filter>db</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\ramlog.cpp">
+ <Filter>util</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\key.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\btreebuilder.cpp">
+ <Filter>btree</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\queryoptimizercursor.cpp">
+ <Filter>db</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\record.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\ops\delete.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\ops\update.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\security_common.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\ops\query.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\dbmessage.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\net\message.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\net\listen.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\net\message_server_port.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\net\message_port.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\net\miniwebserver.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\net\sock.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
+ <ClCompile Include="spin_lock_test.cpp">
+ <Filter>dbtests</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\concurrency\spin_lock.cpp">
+ <Filter>util\concurrency</Filter>
+ </ClCompile>
+ <ClCompile Include="..\third_party\snappy\snappy.cc">
+ <Filter>misc and third party\snappy</Filter>
+ </ClCompile>
+ <ClCompile Include="..\util\compress.cpp">
+ <Filter>misc and third party</Filter>
+ </ClCompile>
+ <ClCompile Include="..\third_party\snappy\snappy-sinksource.cc">
+ <Filter>misc and third party\snappy</Filter>
+ </ClCompile>
+ <ClCompile Include="..\db\scanandorder.cpp">
+ <Filter>db\cpp</Filter>
+ </ClCompile>
</ItemGroup>
<ItemGroup>
<None Include="..\SConstruct">
<Filter>misc and third party</Filter>
</None>
+ <None Include="btreetests.inl">
+ <Filter>dbtests</Filter>
+ </None>
</ItemGroup>
</Project> \ No newline at end of file
diff --git a/dbtests/threadedtests.cpp b/dbtests/threadedtests.cpp
index 805b2d5..3a5ee10 100644
--- a/dbtests/threadedtests.cpp
+++ b/dbtests/threadedtests.cpp
@@ -21,6 +21,7 @@
#include "../bson/util/atomic_int.h"
#include "../util/concurrency/mvar.h"
#include "../util/concurrency/thread_pool.h"
+#include "../util/concurrency/list.h"
#include "../util/timer.h"
#include <boost/thread.hpp>
#include <boost/bind.hpp>
@@ -33,8 +34,8 @@ namespace ThreadedTests {
class ThreadedTest {
public:
virtual void setup() {} //optional
- virtual void subthread() = 0;
- virtual void validate() = 0;
+ virtual void subthread(int remaining) = 0; // each thread whatever test work you want done
+ virtual void validate() = 0; // after work is done
static const int nthreads = nthreads_param;
@@ -48,12 +49,11 @@ namespace ThreadedTests {
private:
void launch_subthreads(int remaining) {
- if (!remaining) return;
-
- boost::thread athread(boost::bind(&ThreadedTest::subthread, this));
+ if (!remaining)
+ return;
+ boost::thread athread(boost::bind(&ThreadedTest::subthread, this, remaining));
launch_subthreads(remaining - 1);
-
athread.join();
}
};
@@ -65,8 +65,18 @@ namespace ThreadedTests {
enum { N = 40000 };
#endif
MongoMutex *mm;
+ ProgressMeter pm;
public:
+ MongoMutexTest() : pm(N * nthreads) {}
void run() {
+ DEV {
+ // in _DEBUG builds on linux we mprotect each time a writelock
+ // is taken. That can greatly slow down this test if there are
+ // many open files
+ DBDirectClient db;
+ db.simpleCommand("admin", NULL, "closeAllDatabases");
+ }
+
Timer t;
cout << "MongoMutexTest N:" << N << endl;
ThreadedTest<135>::run();
@@ -74,9 +84,9 @@ namespace ThreadedTests {
}
private:
virtual void setup() {
- mm = new MongoMutex("MongoMutexTest");
+ mm = &dbMutex;
}
- virtual void subthread() {
+ virtual void subthread(int) {
Client::initThread("mongomutextest");
sleepmillis(0);
for( int i = 0; i < N; i++ ) {
@@ -122,6 +132,7 @@ namespace ThreadedTests {
mm->lock_shared();
mm->unlock_shared();
}
+ pm.hit();
}
cc().shutdown();
}
@@ -139,7 +150,7 @@ namespace ThreadedTests {
static const int iterations = 1000000;
AtomicUInt target;
- void subthread() {
+ void subthread(int) {
for(int i=0; i < iterations; i++) {
//target.x++; // verified to fail with this version
target++;
@@ -170,7 +181,7 @@ namespace ThreadedTests {
public:
MVarTest() : target(0) {}
- void subthread() {
+ void subthread(int) {
for(int i=0; i < iterations; i++) {
int val = target.take();
#if BOOST_VERSION >= 103500
@@ -224,16 +235,370 @@ namespace ThreadedTests {
}
};
- class All : public Suite {
+ class RWLockTest1 {
+ public:
+ void run() {
+ RWLock lk( "eliot" );
+ {
+ rwlock r( lk , true , 1000 );
+ }
+ }
+ };
+
+ class RWLockTest2 {
+ public:
+
+ static void worker1( const RWLock * lk , AtomicUInt * x ) {
+ (*x)++; // 1
+ //cout << "lock b try" << endl;
+ rwlock b( *lk , true );
+ //cout << "lock b got" << endl;
+ (*x)++; // 2
+ }
+
+ static void worker2( const RWLock * lk , AtomicUInt * x ) {
+ //cout << "lock c try" << endl;
+ rwlock c( *lk , false );
+ (*x)++;
+ //cout << "lock c got" << endl;
+ }
+
+ void run() {
+ /**
+ * note: this test will deadlock if the code breaks
+ */
+
+ RWLock lk( "eliot2" , 120 * 1000 );
+ cout << "RWLock impl: " << lk.implType() << endl;
+
+ auto_ptr<rwlock> a( new rwlock( lk , false ) );
+
+ AtomicUInt x1 = 0;
+ cout << "A : " << &x1 << endl;
+ boost::thread t1( boost::bind( worker1 , &lk , &x1 ) );
+ while ( ! x1 );
+ assert( x1 == 1 );
+ sleepmillis( 500 );
+ assert( x1 == 1 );
+
+ AtomicUInt x2 = 0;
+
+ boost::thread t2( boost::bind( worker2, &lk , &x2 ) );
+ t2.join();
+ assert( x2 == 1 );
+
+ a.reset();
+
+ for ( int i=0; i<2000; i++ ) {
+ if ( x1 == 2 )
+ break;
+ sleepmillis(1);
+ }
+
+ assert( x1 == 2 );
+ t1.join();
+
+ }
+ };
+
+
+
+ /** test of shared lock */
+ class RWLockTest3 {
+ public:
+
+ static void worker2( RWLock * lk , AtomicUInt * x ) {
+ assert( ! lk->lock_try(0) );
+ //cout << "lock c try" << endl;
+ rwlock c( *lk , false );
+ (*x)++;
+ //cout << "lock c got" << endl;
+ }
+
+ void run() {
+ /**
+ * note: this test will deadlock if the code breaks
+ */
+
+ RWLock lk( "eliot2" , 120 * 1000 );
+
+ auto_ptr<rwlock> a( new rwlock( lk , false ) );
+
+ AtomicUInt x2 = 0;
+
+ boost::thread t2( boost::bind( worker2, &lk , &x2 ) );
+ t2.join();
+ assert( x2 == 1 );
+
+ a.reset();
+
+ }
+ };
+
+ class RWLockTest4 {
+ public:
+
+#if defined(__linux__) || defined(__APPLE__)
+ static void worker1( pthread_rwlock_t * lk , AtomicUInt * x ) {
+ (*x)++; // 1
+ cout << "lock b try" << endl;
+ while ( 1 ) {
+ if ( pthread_rwlock_trywrlock( lk ) == 0 )
+ break;
+ sleepmillis(10);
+ }
+ cout << "lock b got" << endl;
+ (*x)++; // 2
+ pthread_rwlock_unlock( lk );
+ }
+
+ static void worker2( pthread_rwlock_t * lk , AtomicUInt * x ) {
+ cout << "lock c try" << endl;
+ pthread_rwlock_rdlock( lk );
+ (*x)++;
+ cout << "lock c got" << endl;
+ pthread_rwlock_unlock( lk );
+ }
+#endif
+ void run() {
+ /**
+ * note: this test will deadlock if the code breaks
+ */
+
+#if defined(__linux__) || defined(__APPLE__)
+
+ // create
+ pthread_rwlock_t lk;
+ assert( pthread_rwlock_init( &lk , 0 ) == 0 );
+
+ // read lock
+ assert( pthread_rwlock_rdlock( &lk ) == 0 );
+
+ AtomicUInt x1 = 0;
+ boost::thread t1( boost::bind( worker1 , &lk , &x1 ) );
+ while ( ! x1 );
+ assert( x1 == 1 );
+ sleepmillis( 500 );
+ assert( x1 == 1 );
+
+ AtomicUInt x2 = 0;
+
+ boost::thread t2( boost::bind( worker2, &lk , &x2 ) );
+ t2.join();
+ assert( x2 == 1 );
+
+ pthread_rwlock_unlock( &lk );
+
+ for ( int i=0; i<2000; i++ ) {
+ if ( x1 == 2 )
+ break;
+ sleepmillis(1);
+ }
+
+ assert( x1 == 2 );
+ t1.join();
+#endif
+ }
+ };
+
+ class List1Test2 : public ThreadedTest<> {
+ static const int iterations = 1000; // note: a lot of iterations will use a lot of memory as List1 leaks on purpose
+ class M : public List1<M>::Base {
+ public:
+ M(int x) : _x(x) { }
+ const int _x;
+ };
+ List1<M> l;
+ public:
+ void validate() { }
+ void subthread(int) {
+ for(int i=0; i < iterations; i++) {
+ int r = std::rand() % 256;
+ if( r == 0 ) {
+ l.orphanAll();
+ }
+ else if( r < 4 ) {
+ l.push(new M(r));
+ }
+ else {
+ M *orph = 0;
+ for( M *m = l.head(); m; m=m->next() ) {
+ ASSERT( m->_x > 0 && m->_x < 4 );
+ if( r > 192 && std::rand() % 8 == 0 )
+ orph = m;
+ }
+ if( orph ) {
+ try {
+ l.orphan(orph);
+ }
+ catch(...) { }
+ }
+ }
+ }
+ }
+ };
+
+ class List1Test {
public:
- All() : Suite( "threading" ) {
+ class M : public List1<M>::Base {
+ ~M();
+ public:
+ M( int x ) {
+ num = x;
+ }
+ int num;
+ };
+
+ void run(){
+ List1<M> l;
+
+ vector<M*> ms;
+ for ( int i=0; i<5; i++ ) {
+ M * m = new M(i);
+ ms.push_back( m );
+ l.push( m );
+ }
+
+ // must assert as the item is missing
+ ASSERT_EXCEPTION( l.orphan( new M( -3 ) ) , UserException );
}
+ };
+
+#if 0
+ class UpgradableTest : public ThreadedTest<7> {
+ RWLock m;
+ public:
+ UpgradableTest() : m("utest") {}
+ private:
+ virtual void validate() { }
+ virtual void subthread(int x) {
+ Client::initThread("utest");
+
+ /* r = read lock
+ R = get a read lock and we expect it to be fast
+ w = write lock
+ */
+ // /-- verify upgrade can be done instantly while in a read lock already
+ // | /-- verify upgrade acquisition isn't greedy
+ // | | /-- verify writes aren't greedy while in upgradable
+ // v v v
+ const char *what = " RURuRwR";
+
+ sleepmillis(100*x);
+
+ log() << x << what[x] << " request" << endl;
+ switch( what[x] ) {
+ case 'w':
+ {
+ m.lock();
+ log() << x << " W got" << endl;
+ sleepmillis(100);
+ log() << x << " W unlock" << endl;
+ m.unlock();
+ }
+ break;
+ case 'u':
+ case 'U':
+ {
+ Timer t;
+ m.lockAsUpgradable();
+ log() << x << " U got" << endl;
+ if( what[x] == 'U' ) {
+ if( t.millis() > 20 ) {
+ DEV {
+ // a _DEBUG buildbot might be slow, try to avoid false positives
+ log() << "warning lock upgrade was slow " << t.millis() << endl;
+ }
+ else {
+ ASSERT( false );
+ }
+ }
+ }
+ sleepsecs(1);
+ log() << x << " U unlock" << endl;
+ m.unlockFromUpgradable();
+ }
+ break;
+ case 'r':
+ case 'R':
+ {
+ Timer t;
+ m.lock_shared();
+ log() << x << " R got " << endl;
+ if( what[x] == 'R' ) {
+ if( t.millis() > 15 ) {
+ log() << "warning: when in upgradable write locks are still greedy on this platform" << endl;
+ }
+ }
+ sleepmillis(200);
+ log() << x << " R unlock" << endl;
+ m.unlock_shared();
+ }
+ break;
+ default:
+ ASSERT(false);
+ }
+
+ cc().shutdown();
+ }
+ };
+#endif
+
+ class WriteLocksAreGreedy : public ThreadedTest<3> {
+ public:
+ WriteLocksAreGreedy() : m("gtest") {}
+ private:
+ RWLock m;
+ virtual void validate() { }
+ virtual void subthread(int x) {
+ Client::initThread("utest");
+ if( x == 1 ) {
+ cout << mongo::curTimeMillis64() % 10000 << " 1" << endl;
+ rwlock_shared lk(m);
+ sleepmillis(300);
+ cout << mongo::curTimeMillis64() % 10000 << " 1x" << endl;
+ }
+ if( x == 2 ) {
+ sleepmillis(100);
+ cout << mongo::curTimeMillis64() % 10000 << " 2" << endl;
+ rwlock lk(m, true);
+ //m._lock();
+ cout << mongo::curTimeMillis64() % 10000 << " 2x" << endl;
+ //m.unlock();
+ }
+ if( x == 3 ) {
+ sleepmillis(200);
+ Timer t;
+ cout << mongo::curTimeMillis64() % 10000 << " 3" << endl;
+ rwlock_shared lk(m);
+ cout << mongo::curTimeMillis64() % 10000 << " 3x" << endl;
+ cout << t.millis() << endl;
+ ASSERT( t.millis() > 50 );
+ }
+ cc().shutdown();
+ }
+ };
+
+ class All : public Suite {
+ public:
+ All() : Suite( "threading" ) { }
void setupTests() {
+ add< WriteLocksAreGreedy >();
+ //add< UpgradableTest >();
+ add< List1Test >();
+ add< List1Test2 >();
+
add< IsAtomicUIntAtomic >();
add< MVarTest >();
add< ThreadPoolTest >();
add< LockTest >();
+
+ add< RWLockTest1 >();
+ //add< RWLockTest2 >(); // SERVER-2996
+ add< RWLockTest3 >();
+ add< RWLockTest4 >();
+
add< MongoMutexTest >();
}
} myall;
diff --git a/dbtests/updatetests.cpp b/dbtests/updatetests.cpp
index 0f95a32..c912bf4 100644
--- a/dbtests/updatetests.cpp
+++ b/dbtests/updatetests.cpp
@@ -18,13 +18,13 @@
*/
#include "pch.h"
-#include "../db/query.h"
+#include "../db/ops/query.h"
#include "../db/db.h"
#include "../db/instance.h"
#include "../db/json.h"
#include "../db/lasterror.h"
-#include "../db/update.h"
+#include "../db/ops/update.h"
#include "dbtests.h"
@@ -750,18 +750,19 @@ namespace UpdateTests {
virtual BSONObj after() { return BSONObj(); }
void dotest() {
- client().insert( ns() , BSON( "x" << 5 ) );
+ long long start = numeric_limits<int>::max() - 5;
+ long long max = numeric_limits<int>::max() + 5ll;
+
+ client().insert( ns() , BSON( "x" << (int)start ) );
ASSERT( findOne()["x"].type() == NumberInt );
- long long start = 5;
- long long max = numeric_limits<int>::max();
- max *= 32;
while ( start < max ) {
- update( BSON( "$inc" << BSON( "x" << 500000 ) ) );
- start += 500000;
+ update( BSON( "$inc" << BSON( "x" << 1 ) ) );
+ start += 1;
ASSERT_EQUALS( start , findOne()["x"].numberLong() ); // SERVER-2005
}
+ ASSERT( findOne()["x"].type() == NumberLong );
}
};